TalentLensAI / config.py
Johnny
feat: Complete Format_Resume.py system with OpenAI GPT-4o integration and template preservation - Added Format_Resume.py Streamlit page with OpenAI GPT-4o primary extraction, HF Cloud backup, 5-tier fallback system, template preservation with Qvell branding, contact info extraction, skills cleaning, career timeline generation, and comprehensive utils restructure (10/11 files required). Renamed app.py to TalentLens.py, added blank_resume.docx template, updated .gitignore for Salesforce exclusion.
c2f9ec8
# === Imports ===
import os
import time
import requests
from dotenv import load_dotenv
from supabase import create_client
from sentence_transformers import SentenceTransformer
from openai import OpenAI
# === Load Environment Variables ===
load_dotenv()
# === Supabase Configuration ===
SUPABASE_URL = "https://lmpazoxzucnlqqxjoihi.supabase.co"
SUPABASE_KEY = os.getenv("SUPABASE_API_KEY")
if not SUPABASE_KEY:
raise ValueError("SUPABASE_KEY is not set in the environment variables.")
supabase = create_client(SUPABASE_URL, SUPABASE_KEY)
# === Embedding Model for Scoring ===
embedding_model = SentenceTransformer("sentence-transformers/all-MiniLM-L6-v2")
# === Hugging Face API Configuration (for summarization/other) ===
HF_API_TOKEN = os.getenv("HF_API_TOKEN")
if not HF_API_TOKEN:
raise ValueError("Missing Hugging Face API key. Check your .env file.")
HF_HEADERS = {"Authorization": f"Bearer {HF_API_TOKEN}"}
# === Hugging Face Model Endpoints ===
HF_MODELS = {
"pegasus": "https://router.huggingface.co/hf-inference/models/google/pegasus-xsum",
"gemma": "tgi" # Used as the model name with OpenAI-compatible client
}
# === OpenAI-Compatible Client (for Gemma) ===
client = OpenAI(
base_url="https://vzwjawyxvu030jsw.us-east-1.aws.endpoints.huggingface.cloud/v1/",
api_key=HF_API_TOKEN,
)
# === Optional: General Query Helper (for non-chat models like pegasus) ===
def query(payload, model="pegasus", retries=5, delay=5):
"""
Sends a request to the Hugging Face API with retries and error handling.
"""
if model not in HF_MODELS:
raise ValueError(f"Invalid model name: {model}. Available: {list(HF_MODELS.keys())}")
api_url = HF_MODELS[model]
for attempt in range(retries):
try:
response = requests.post(api_url, headers=HF_HEADERS, json=payload, timeout=10)
if response.status_code in (401, 402):
print(f"❌ HF error {response.status_code}")
return None
response.raise_for_status()
return response.json()
except requests.exceptions.RequestException as e:
print(f"⚠️ Attempt {attempt+1} failed: {e}")
time.sleep(delay)
print("🚨 All retry attempts failed.")
return None