keynes42 commited on
Commit
804ec12
·
verified ·
1 Parent(s): 115c388

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -4
app.py CHANGED
@@ -23,6 +23,7 @@ print(dir(_tools))
23
  # (Keep Constants as is)
24
  # --- Constants ---
25
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
 
26
 
27
  def check_token_access():
28
  token = os.environ.get("HF_TOKEN", "")
@@ -30,7 +31,8 @@ def check_token_access():
30
  print("❌ No token found")
31
  return
32
  headers = {"Authorization": f"Bearer {token}"}
33
- url = "https://huggingface.co/meta-llama/Llama-3.1-8B-Instruct/resolve/main/config.json"
 
34
  try:
35
  r = requests.get(url, headers=headers, timeout=10)
36
  print(f"🔍 Token test response: {r.status_code}")
@@ -70,13 +72,13 @@ class PreloadedPythonTool(PythonInterpreterTool):
70
  # --- Basic Model Definition ---
71
  # ----- THIS IS WHERE YOU CAN BUILD WHAT YOU WANT ------
72
  class BasicModel:
73
- def __init__(self, model_id="meta-llama/Llama-3.1-8B-Instruct", hf_token=""):
74
  print("BasicAgent initialized.")
75
  print("ENV-HF_TOKEN-LEN", len(hf_token), file=sys.stderr)
76
  check_token_access()
77
 
78
  # Initialize the model
79
- # model = HfApiModel(model_id="meta-llama/Llama-3.1-70B-Instruct",
80
  # # format="text-generation",
81
  # token=os.environ["HF_TOKEN"],
82
  # max_tokens=2048,
@@ -181,7 +183,7 @@ def run_and_submit_all( profile: gr.OAuthProfile | None):
181
  # 1. Instantiate Agent ( modify this part to create your agent)
182
  try:
183
  # (1) Create the LLM wrapper
184
- llm_model = BasicModel(hf_token=hf_token)
185
  # (2) Create the tools
186
  wiki_tool = CachedWikiTool()
187
  search_tool = CachedWebSearchTool()
 
23
  # (Keep Constants as is)
24
  # --- Constants ---
25
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
26
+ MODEL_ID = "Qwen/Qwen3-32B"
27
 
28
  def check_token_access():
29
  token = os.environ.get("HF_TOKEN", "")
 
31
  print("❌ No token found")
32
  return
33
  headers = {"Authorization": f"Bearer {token}"}
34
+ url = f"https://huggingface.co/{MODEL_ID}/resolve/main/config.json"
35
+
36
  try:
37
  r = requests.get(url, headers=headers, timeout=10)
38
  print(f"🔍 Token test response: {r.status_code}")
 
72
  # --- Basic Model Definition ---
73
  # ----- THIS IS WHERE YOU CAN BUILD WHAT YOU WANT ------
74
  class BasicModel:
75
+ def __init__(self, model_id, hf_token=""):
76
  print("BasicAgent initialized.")
77
  print("ENV-HF_TOKEN-LEN", len(hf_token), file=sys.stderr)
78
  check_token_access()
79
 
80
  # Initialize the model
81
+ # model = HfApiModel(model_id=model_id,
82
  # # format="text-generation",
83
  # token=os.environ["HF_TOKEN"],
84
  # max_tokens=2048,
 
183
  # 1. Instantiate Agent ( modify this part to create your agent)
184
  try:
185
  # (1) Create the LLM wrapper
186
+ llm_model = BasicModel(model_id=MODEL_ID, hf_token=hf_token)
187
  # (2) Create the tools
188
  wiki_tool = CachedWikiTool()
189
  search_tool = CachedWebSearchTool()