Update app.py
Browse files
app.py
CHANGED
@@ -26,14 +26,34 @@ def load_llm():
|
|
26 |
)
|
27 |
return pipeline("text-generation", model=mod, tokenizer=tok, max_new_tokens=512)
|
28 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
# --- Basic Agent Definition ---
|
30 |
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
31 |
class BasicAgent:
|
32 |
def __init__(self):
|
33 |
print("BasicAgent initialized.")
|
34 |
|
35 |
-
print("ENV-HF_TOKEN-LEN", len(
|
36 |
print(smolagents.__version__)
|
|
|
37 |
|
38 |
# Local test
|
39 |
# client = InferenceClient(
|
|
|
26 |
)
|
27 |
return pipeline("text-generation", model=mod, tokenizer=tok, max_new_tokens=512)
|
28 |
|
29 |
+
def check_token_access():
|
30 |
+
token = os.environ.get("HF_TOKEN", "")
|
31 |
+
if not token:
|
32 |
+
print("❌ No token found")
|
33 |
+
return
|
34 |
+
headers = {"Authorization": f"Bearer {token}"}
|
35 |
+
url = "https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct/resolve/main/config.json"
|
36 |
+
try:
|
37 |
+
r = requests.get(url, headers=headers, timeout=10)
|
38 |
+
print(f"🔍 Token test response: {r.status_code}")
|
39 |
+
if r.status_code == 200:
|
40 |
+
print("✅ Token access confirmed for gated model.")
|
41 |
+
elif r.status_code == 403:
|
42 |
+
print("❌ 403 Forbidden: Token does not have access.")
|
43 |
+
else:
|
44 |
+
print("⚠️ Unexpected status:", r.status_code)
|
45 |
+
except Exception as e:
|
46 |
+
print("❌ Token check failed:", e)
|
47 |
+
|
48 |
# --- Basic Agent Definition ---
|
49 |
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
50 |
class BasicAgent:
|
51 |
def __init__(self):
|
52 |
print("BasicAgent initialized.")
|
53 |
|
54 |
+
print("ENV-HF_TOKEN-LEN", len(hf_token), file=sys.stderr)
|
55 |
print(smolagents.__version__)
|
56 |
+
check_token_access()
|
57 |
|
58 |
# Local test
|
59 |
# client = InferenceClient(
|