Spaces:
Running
Running
File size: 2,790 Bytes
892f06a f3f0a69 892f06a f3f0a69 56325dc 892f06a f3f0a69 892f06a f3f0a69 892f06a 56325dc 892f06a 4f034fb 892f06a 56325dc edfcf73 8f771eb 892f06a cca9b28 edfcf73 892f06a edfcf73 892f06a edfcf73 4f034fb edfcf73 8f771eb 1131989 edfcf73 8f771eb 892f06a 4f034fb 892f06a 4f034fb 1131989 892f06a 4f034fb edfcf73 4f034fb edfcf73 1131989 892f06a 1131989 8f771eb 892f06a 4f034fb 19ea0c5 1131989 4f034fb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 |
# === Imports ===
import os
import time
import requests
from dotenv import load_dotenv
from supabase import create_client
from sentence_transformers import SentenceTransformer
from openai import OpenAI
# === Load Environment Variables ===
load_dotenv()
# === Supabase Configuration ===
SUPABASE_URL = "https://lmpazoxzucnlqqxjoihi.supabase.co"
SUPABASE_KEY = os.getenv("SUPABASE_API_KEY")
if not SUPABASE_KEY:
raise ValueError("SUPABASE_KEY is not set in the environment variables.")
supabase = create_client(SUPABASE_URL, SUPABASE_KEY)
# === Embedding Model for Scoring ===
embedding_model = SentenceTransformer("sentence-transformers/all-MiniLM-L6-v2")
# === Hugging Face API Configuration ===
HF_API_TOKEN = os.getenv("HF_API_TOKEN")
if not HF_API_TOKEN:
raise ValueError("Missing Hugging Face API key. Check your .env file.")
HF_HEADERS = {"Authorization": f"Bearer {HF_API_TOKEN}"}
# === Hugging Face Model Endpoints ===
HF_MODELS = {
"pegasus": "https://router.huggingface.co/hf-inference/models/google/pegasus-xsum",
"gemma": "tgi" # Used as the model name with OpenAI-compatible client
}
# === OpenAI-Compatible Client (for Gemma) ===
client = OpenAI(
base_url="https://vzwjawyxvu030jsw.us-east-1.aws.endpoints.huggingface.cloud/v1/",
api_key=HF_API_TOKEN,
)
# === Optional: General Query Helper (for non-chat models like pegasus) ===
def query(payload, model="pegasus", retries=5, delay=5):
"""
Sends a request to the Hugging Face API with retries and error handling.
"""
if model not in HF_MODELS:
raise ValueError(f"Invalid model name: {model}. Available: {list(HF_MODELS.keys())}")
api_url = HF_MODELS[model]
for attempt in range(retries):
try:
response = requests.post(api_url, headers=HF_HEADERS, json=payload, timeout=10)
if response.status_code == 401:
print("β Unauthorized (401). Check HF_API_TOKEN.")
return None
if response.status_code == 402:
print("π° Payment Required (402). Free tier may not support this model.")
return None
if response.status_code in [500, 503]:
print(f"β οΈ Server error ({response.status_code}) on attempt {attempt + 1}. Retrying in {delay}s...")
time.sleep(delay)
continue
response.raise_for_status()
return response.json()
except requests.exceptions.Timeout:
print(f"β³ Timeout on attempt {attempt + 1}. Retrying in {delay}s...")
time.sleep(delay)
except requests.exceptions.RequestException as e:
print(f"β Request failed: {e}")
time.sleep(delay)
print("π¨ All retry attempts failed.")
return None |