WebCrawler / src /webui /components /agent_settings_tab.py
Carlos Gonzalez
Add application file
b1f90a5
import json
import os
import gradio as gr
from gradio.components import Component
from typing import Any, Dict, Optional
from src.webui.webui_manager import WebuiManager
from src.utils import config
import logging
from functools import partial
logger = logging.getLogger(__name__)
async def update_mcp_server(mcp_file: str, webui_manager: WebuiManager):
"""
Update the MCP server.
"""
if hasattr(webui_manager, "bu_controller") and webui_manager.bu_controller:
logger.warning("⚠️ Close controller because mcp file has changed!")
await webui_manager.bu_controller.close_mcp_client()
webui_manager.bu_controller = None
if not mcp_file or not os.path.exists(mcp_file) or not mcp_file.endswith('.json'):
logger.warning(f"{mcp_file} is not a valid MCP file.")
return None, gr.update(visible=False)
with open(mcp_file, 'r') as f:
mcp_server = json.load(f)
return json.dumps(mcp_server, indent=2), gr.update(visible=True)
def create_agent_settings_tab(webui_manager: WebuiManager):
"""
Creates an agent settings tab.
"""
input_components = set(webui_manager.get_components())
tab_components = {}
with gr.Group():
with gr.Column():
override_system_prompt = gr.Textbox(label="Override system prompt", lines=4, interactive=True)
extend_system_prompt = gr.Textbox(label="Extend system prompt", lines=4, interactive=True)
with gr.Group():
mcp_json_file = gr.File(label="MCP server json", interactive=True, file_types=[".json"])
mcp_server_config = gr.Textbox(label="MCP server", lines=6, interactive=True, visible=False)
with gr.Group():
with gr.Row():
# Fixed provider as OpenAI
llm_provider = gr.Dropdown(
choices=["openai"],
label="LLM Provider",
value="openai",
info="OpenAI is the only supported LLM provider",
interactive=False
)
llm_model_name = gr.Dropdown(
label="LLM Model Name",
choices=config.model_names['openai'],
value="gpt-4o",
interactive=True,
allow_custom_value=True,
info="Select a model in the dropdown options or directly type a custom model name"
)
with gr.Row():
llm_temperature = gr.Slider(
minimum=0.0,
maximum=2.0,
value=0.6,
step=0.1,
label="LLM Temperature",
info="Controls randomness in model outputs",
interactive=True
)
use_vision = gr.Checkbox(
label="Use Vision",
value=True,
info="Enable Vision(Input highlighted screenshot into LLM)",
interactive=True
)
with gr.Row():
llm_base_url = gr.Textbox(
label="Base URL",
value="",
info="API endpoint URL (if required)"
)
llm_api_key = gr.Textbox(
label="API Key",
type="password",
value="",
info="Your API key (leave blank to use .env)"
)
with gr.Row():
max_steps = gr.Slider(
minimum=1,
maximum=1000,
value=100,
step=1,
label="Max Run Steps",
info="Maximum number of steps the agent will take",
interactive=True
)
max_actions = gr.Slider(
minimum=1,
maximum=100,
value=10,
step=1,
label="Max Number of Actions",
info="Maximum number of actions the agent will take per step",
interactive=True
)
with gr.Row():
max_input_tokens = gr.Number(
label="Max Input Tokens",
value=128000,
precision=0,
interactive=True
)
tool_calling_method = gr.Dropdown(
label="Tool Calling Method",
value="auto",
interactive=True,
allow_custom_value=True,
choices=['function_calling', 'json_mode', 'raw', 'auto', 'tools', "None"],
visible=True
)
tab_components.update(dict(
override_system_prompt=override_system_prompt,
extend_system_prompt=extend_system_prompt,
llm_provider=llm_provider,
llm_model_name=llm_model_name,
llm_temperature=llm_temperature,
use_vision=use_vision,
llm_base_url=llm_base_url,
llm_api_key=llm_api_key,
max_steps=max_steps,
max_actions=max_actions,
max_input_tokens=max_input_tokens,
tool_calling_method=tool_calling_method,
mcp_json_file=mcp_json_file,
mcp_server_config=mcp_server_config,
))
webui_manager.add_components("agent_settings", tab_components)
async def update_wrapper(mcp_file):
"""Wrapper for handle_pause_resume."""
update_dict = await update_mcp_server(mcp_file, webui_manager)
yield update_dict
mcp_json_file.change(
update_wrapper,
inputs=[mcp_json_file],
outputs=[mcp_server_config, mcp_server_config]
)