MCPclient / app.py
ABDALLALSWAITI's picture
Update app.py
87bb6a9 verified
import gradio as gr
import os
from mcp import StdioServerParameters
from smolagents import InferenceClientModel, CodeAgent, ToolCollection, MCPClient
from smolagents import LiteLLMModel
# --- Configuration ---
# Ensure you have your GOOGLE_API_KEY set as an environment variable
# You can get one from Google AI Studio: https://aistudio.google.com/app/apikey
API_KEY = os.getenv("GOOGLE_API_KEY")
# This is the public URL of the MCP server we built.
# It's derived from your Space name: https://huggingface.co/spaces/Agents-MCP-Hackathon/HuggingFaceDoc
MCP_SERVER_URL = "https://agents-mcp-hackathon-huggingfacedoc.hf.space/gradio_api/mcp/sse"
if not API_KEY:
raise ValueError("GOOGLE_API_KEY environment variable not set. Please set your API key to run this app.")
# --- Main Application ---
try:
print(f"πŸ”Œ Connecting to MCP Server: {MCP_SERVER_URL}")
mcp_client = MCPClient(
{"url": MCP_SERVER_URL}
)
tools = mcp_client.get_tools()
print(f"βœ… Successfully connected. Found {len(tools)} tools.")
# model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
# We use LiteLLM to connect to the Gemini API
model = LiteLLMModel(
model_id="gemini/gemini-1.5-flash",
temperature=0.2,
api_key=API_KEY
)
# The CodeAgent is effective at using tools
agent = CodeAgent(tools=[*tools], model=model)
# Create the Gradio ChatInterface
demo = gr.ChatInterface(
fn=lambda message, history: str(agent.run(message)),
title="πŸ“š Hugging Face Research Agent",
description="This agent uses the Hugging Face Information Server to answer questions about models, datasets, and documentation.",
examples=[
"What is a Hugging Face pipeline?",
"Find 3 popular models for text classification",
"Get the info for the 'squad' dataset",
"What is PEFT?"
],
)
demo.launch()
finally:
# Ensure the connection is closed when the app stops
if 'mcp_client' in locals() and mcp_client.is_connected:
print("πŸ”Œ Disconnecting from MCP Server...")
mcp_client.disconnect()