krrishk22 commited on
Commit
fada4c5
·
verified ·
1 Parent(s): 494d9f8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +50 -27
app.py CHANGED
@@ -226,41 +226,64 @@ final_answer = FinalAnswerTool()
226
  # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
227
  # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
228
 
229
- model = HfApiModel(
230
- max_tokens=2096,
231
- temperature=0.5,
232
- model_id='sarvamai/sarvam-m',# it is possible that this model may be overloaded
233
- )
234
-
235
 
 
 
 
 
 
 
236
 
237
- # Import tool from Hub
238
- image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
 
 
 
239
 
240
- with open("prompts.yaml", 'r') as stream:
241
- prompt_templates = yaml.safe_load(stream)
242
-
243
-
244
- agent = CodeAgent(
245
- model=model,
246
- tools=[final_answer,
247
- get_horoscope,
248
  get_date_panchang,
249
  get_holidays,
250
  get_panchang_field,
251
  get_festivals_today,
252
  get_current_time_in_timezone,
253
  my_custom_tool,
254
- image_generation_tool], ## add your tools here (don't remove final answer)
255
- max_steps=6,
256
- verbosity_level=1,
257
- grammar=None,
258
- planning_interval=None,
259
- name=None,
260
- description=None,
261
- prompt_templates=prompt_templates
262
-
263
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
264
 
 
 
 
 
 
 
265
 
266
- GradioUI(agent).launch()
 
 
226
  # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
227
  # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
228
 
 
 
 
 
 
 
229
 
230
+ # Define Models
231
+ primary_model = HfApiModel(
232
+ max_tokens=2096,
233
+ temperature=0.5,
234
+ model_id='sarvamai/sarvam-m', # Primary model, possibly overloaded
235
+ )
236
 
237
+ backup_model = HfApiModel(
238
+ max_tokens=2096,
239
+ temperature=0.5,
240
+ model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud', # Backup model
241
+ )
242
 
243
+ # Function to Build an Agent with Any Model
244
+ def build_agent(model_to_use):
245
+ return CodeAgent(
246
+ model=model_to_use,
247
+ tools=[
248
+ final_answer,
249
+ get_horoscope,
 
250
  get_date_panchang,
251
  get_holidays,
252
  get_panchang_field,
253
  get_festivals_today,
254
  get_current_time_in_timezone,
255
  my_custom_tool,
256
+ image_generation_tool
257
+ ],
258
+ max_steps=6,
259
+ verbosity_level=1,
260
+ grammar=None,
261
+ planning_interval=None,
262
+ name=None,
263
+ description=None,
264
+ prompt_templates=prompt_templates
265
+ )
266
+
267
+ # Instantiate Primary Agent
268
+ agent = build_agent(primary_model)
269
+
270
+ # Fallback-Handled Runner Function
271
+ def agent_runner(user_input):
272
+ try:
273
+ result = agent.run(user_input)
274
+ if result is None or result.strip() == "":
275
+ raise ValueError("Primary model returned empty response.")
276
+ return result
277
+ except Exception as e:
278
+ print(f"Primary model failed: {e}")
279
+ print("Switching to backup model...")
280
 
281
+ backup_agent = build_agent(backup_model)
282
+ try:
283
+ result = backup_agent.run(user_input)
284
+ return result or "Backup model also failed."
285
+ except Exception as e2:
286
+ return f"Backup model failed as well: {e2}"
287
 
288
+ # Launch Gradio with Fallback Logic
289
+ GradioUI(agent_runner).launch()