Spaces:
Sleeping
Sleeping
File size: 3,787 Bytes
942f8b0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 |
#!/usr/bin/env python3
"""
Script to check the current model configuration
"""
import os
import sys
from pathlib import Path
def check_environment_variables():
"""Check environment variables related to model configuration"""
print("π Checking Environment Variables")
print("=" * 40)
# Check for model-related environment variables
model_vars = [
'DEFAULT_MODEL',
'HUGGINGFACE_API_KEY',
'HUGGINGFAC_API_KEY_2',
'HF_TOKEN',
'MODEL_NAME'
]
for var in model_vars:
value = os.getenv(var)
if value:
# Mask API keys for security
if 'API_KEY' in var or 'TOKEN' in var:
masked_value = value[:10] + "..." + value[-10:] if len(value) > 20 else "***"
print(f"β
{var}: {masked_value}")
else:
print(f"β
{var}: {value}")
else:
print(f"β {var}: Not set")
def check_app_files():
"""Check which app files exist and their model configurations"""
print("\nπ Checking App Files")
print("=" * 40)
app_files = [
"app.py",
"app_backup.py",
"api_server.py",
"web_app.py"
]
for app_file in app_files:
if Path(app_file).exists():
print(f"β
{app_file}: Exists")
# Check for model configuration in the file
try:
with open(app_file, 'r', encoding='utf-8') as f:
content = f.read()
# Look for model configuration patterns
if 'DEFAULT_MODEL' in content:
lines = content.split('\n')
for i, line in enumerate(lines):
if 'DEFAULT_MODEL' in line and '=' in line:
print(f" Model config: {line.strip()}")
break
except Exception as e:
print(f" Error reading file: {e}")
else:
print(f"β {app_file}: Not found")
def check_huggingface_models():
"""Check what models are available via Hugging Face"""
print("\nπ€ Checking Hugging Face Model Availability")
print("=" * 40)
try:
from huggingface_hub import InferenceClient
# Check if we have an API key
api_key = os.getenv('HUGGINGFACE_API_KEY') or os.getenv('HF_TOKEN')
if not api_key:
print("β No Hugging Face API key found")
return
client = InferenceClient(token=api_key)
# Test models
test_models = [
"meta-llama/Llama-3.2-1B-Instruct",
"meta-llama/Llama-3.1-8B-Instruct",
"gpt2",
"distilgpt2"
]
for model in test_models:
try:
# Try to create a client with this model
test_client = InferenceClient(token=api_key, model=model)
print(f"β
{model}: Available")
except Exception as e:
print(f"β {model}: Not available - {str(e)[:100]}...")
except ImportError:
print("β huggingface_hub not installed")
except Exception as e:
print(f"β Error checking Hugging Face models: {e}")
def main():
print("π§ Model Configuration Checker")
print("=" * 50)
check_environment_variables()
check_app_files()
check_huggingface_models()
print("\nπ Summary:")
print("- Check the environment variables above")
print("- Verify which app file is actually running")
print("- Ensure the model is compatible with Hugging Face Inference API")
if __name__ == "__main__":
main()
|