Spaces:
Sleeping
Sleeping
import os | |
import gradio as gr | |
import requests | |
import inspect | |
import pandas as pd | |
#import smolagents #to test | |
from smolagents import CodeAgent, InferenceClientModel, load_tool, tool #DuckDuckGoSearchTool, | |
from huggingface_hub import InferenceClient | |
import json | |
from final_answer import FinalAnswerTool | |
from visit_webpage import VisitWebpageTool | |
from web_search import web_search_DuckDuckGoSearchTool | |
from wikipediaLookup import WikipediaLookupTool | |
api_url = "https://agents-course-unit4-scoring.hf.space" | |
questions_url = f"{api_url}/questions" | |
submit_url = f"{api_url}/submit" | |
class BasicAgent: | |
def __init__(self): | |
print("BasicAgent initialized.") | |
def __call__(self, question: str) -> str: | |
print(f"Agent received question (first 50 chars): {question[:50]}...") | |
fixed_answer = "This is a default answer." | |
print(f"Agent returning fixed answer: {fixed_answer}") | |
return fixed_answer | |
def load_questions_from_file(filepath="questions.json"): | |
try: | |
with open(filepath, "r", encoding="utf-8") as f: | |
questions_data = json.load(f) | |
if not questions_data: | |
print("Loaded file is empty.") | |
return "Loaded file is empty.", None | |
print(f"Loaded {len(questions_data)} questions from file.") | |
return "Loaded questions successfully.", questions_data | |
except FileNotFoundError: | |
print("File not found. Please run the API fetch first.") | |
return "File not found.", None | |
except json.JSONDecodeError as e: | |
print(f"Error decoding JSON: {e}") | |
return f"Error decoding JSON: {e}", None | |
except Exception as e: | |
print(f"Unexpected error: {e}") | |
return f"Unexpected error: {e}", None | |
#set up | |
#token | |
#Model | |
model = InferenceClientModel( | |
max_tokens=2096, | |
temperature=0.5, | |
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded | |
custom_role_conversions=None, | |
) | |
#Tools | |
final_answer = FinalAnswerTool() | |
#duckDuckGoSearch = DuckDuckGoSearchTool() #smolagent version | |
visitWebpage = VisitWebpageTool() | |
wikipediaLookup = WikipediaLookupTool() | |
webSearch = web_search_DuckDuckGoSearchTool() | |
#Agent | |
agent_codeagent = CodeAgent( | |
model=model, | |
tools=[final_answer, wikipediaLookup, visitWebpage, webSearch], ## add your tools here (don't remove final answer) duckDuckGoSearch, | |
max_steps=3, | |
verbosity_level=1, | |
grammar=None, | |
planning_interval=None, | |
name=None, | |
description=None | |
#prompt_templates=prompt_templates | |
) | |
# Gradio handler that runs the agent | |
def run_once(state): | |
if state is not None: | |
return "Already run once. Refresh to rerun.", state | |
status_message, questions_data = load_questions_from_file() | |
if questions_data is None or len(questions_data) == 0: | |
return "No questions found or failed to load.", None | |
question = questions_data[0] | |
question_text = question["question"] | |
task_id = question["task_id"] | |
print(f"\nTask ID: {task_id}") | |
print(f"Question: {question_text}") | |
try: | |
answer = agent_codeagent(question_text) | |
output = f"Answer to task {task_id}:\n{answer}" | |
return output, output | |
except Exception as e: | |
return f"Error running agent: {e}", None | |
# Create Gradio interface | |
with gr.Blocks() as demo: | |
gr.Markdown("## Run AI Agent Once") | |
output_text = gr.Textbox(label="Agent Output", lines=10) | |
run_button = gr.Button("Run Agent") | |
state = gr.State() # cache variable to prevent re-runs | |
run_button.click(fn=run_once, inputs=state, outputs=[output_text, state]) | |
# Launch the interface | |
demo.launch() | |