Spaces:
Sleeping
Sleeping
File size: 2,178 Bytes
b967600 87bb6a9 b967600 87bb6a9 b967600 87bb6a9 b967600 87bb6a9 b967600 87bb6a9 b967600 80ea69c 87bb6a9 80ea69c 87bb6a9 80ea69c 87bb6a9 80ea69c 87bb6a9 b967600 87bb6a9 b967600 87bb6a9 b967600 87bb6a9 b967600 87bb6a9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
import gradio as gr
import os
from mcp import StdioServerParameters
from smolagents import InferenceClientModel, CodeAgent, ToolCollection, MCPClient
from smolagents import LiteLLMModel
# --- Configuration ---
# Ensure you have your GOOGLE_API_KEY set as an environment variable
# You can get one from Google AI Studio: https://aistudio.google.com/app/apikey
API_KEY = os.getenv("GOOGLE_API_KEY")
# This is the public URL of the MCP server we built.
# It's derived from your Space name: https://huggingface.co/spaces/Agents-MCP-Hackathon/HuggingFaceDoc
MCP_SERVER_URL = "https://agents-mcp-hackathon-huggingfacedoc.hf.space/gradio_api/mcp/sse"
if not API_KEY:
raise ValueError("GOOGLE_API_KEY environment variable not set. Please set your API key to run this app.")
# --- Main Application ---
try:
print(f"π Connecting to MCP Server: {MCP_SERVER_URL}")
mcp_client = MCPClient(
{"url": MCP_SERVER_URL}
)
tools = mcp_client.get_tools()
print(f"β
Successfully connected. Found {len(tools)} tools.")
# model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
# We use LiteLLM to connect to the Gemini API
model = LiteLLMModel(
model_id="gemini/gemini-1.5-flash",
temperature=0.2,
api_key=API_KEY
)
# The CodeAgent is effective at using tools
agent = CodeAgent(tools=[*tools], model=model)
# Create the Gradio ChatInterface
demo = gr.ChatInterface(
fn=lambda message, history: str(agent.run(message)),
title="π Hugging Face Research Agent",
description="This agent uses the Hugging Face Information Server to answer questions about models, datasets, and documentation.",
examples=[
"What is a Hugging Face pipeline?",
"Find 3 popular models for text classification",
"Get the info for the 'squad' dataset",
"What is PEFT?"
],
)
demo.launch()
finally:
# Ensure the connection is closed when the app stops
if 'mcp_client' in locals() and mcp_client.is_connected:
print("π Disconnecting from MCP Server...")
mcp_client.disconnect() |