Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -33,10 +33,18 @@ model_error = None
|
|
33 |
try:
|
34 |
print("Initializing model... This may take a moment.")
|
35 |
|
36 |
-
# Explicitly load the token from environment variables (for HF Spaces secrets)
|
37 |
# This makes the authentication more robust, overriding any bad default credentials.
|
38 |
-
hf_token = os.environ.get("HF_TOKEN"
|
39 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
40 |
# Using a smaller model from the user's list.
|
41 |
# Passing the token explicitly to ensure correct authentication.
|
42 |
generator = pipeline(
|
|
|
33 |
try:
|
34 |
print("Initializing model... This may take a moment.")
|
35 |
|
36 |
+
# Explicitly load the token from environment variables (for HF Spaces secrets).
|
37 |
# This makes the authentication more robust, overriding any bad default credentials.
|
38 |
+
hf_token = os.environ.get("HF_TOKEN")
|
39 |
+
|
40 |
+
# Add a check to see if the token was loaded correctly.
|
41 |
+
if hf_token:
|
42 |
+
print("✅ HF_TOKEN secret found.")
|
43 |
+
else:
|
44 |
+
print("⚠️ HF_TOKEN secret not found. Please ensure it is set in your Hugging Face Space settings.")
|
45 |
+
# Raise an error to stop the app from proceeding without a token.
|
46 |
+
raise ValueError("Hugging Face token not found. Please set the HF_TOKEN secret.")
|
47 |
+
|
48 |
# Using a smaller model from the user's list.
|
49 |
# Passing the token explicitly to ensure correct authentication.
|
50 |
generator = pipeline(
|