|
import gradio as gr |
|
from database import Database |
|
from filesource import FileSource |
|
from agent import run_agent |
|
from services.utils import get_db_scheme_from_uri |
|
source = None |
|
|
|
|
|
def connect_to_file(file): |
|
global source |
|
try: |
|
source = FileSource(file.name) |
|
status = source.connect() |
|
schema = source._pretify_schema() |
|
status = "Connection successful!" |
|
except Exception as e: |
|
schema = "" |
|
status = f"Error: {str(e)}" |
|
return schema, status |
|
|
|
|
|
def connect_to_database(db_url): |
|
global source |
|
try: |
|
dialect = get_db_scheme_from_uri(db_url) |
|
source = Database(db_url,dialect) |
|
status = source.connect() |
|
schema = source._pretify_schema() |
|
status = "Connection successful!" |
|
except Exception as e: |
|
schema = "" |
|
status = f"Error: {str(e)}" |
|
return schema, status |
|
|
|
|
|
def user(user_message, chat_history): |
|
chat_history.append({"role": "user", "content": user_message}) |
|
return "", chat_history |
|
|
|
|
|
def bot(chat_history): |
|
|
|
if source is None: |
|
chat_history.append({"role":"assistant","content":"please connect to a database before asking question"}) |
|
yield chat_history |
|
else: |
|
answer = run_agent(source,chat_history[-1]['content']) |
|
chat_history.append({"role":"assistant","content":""}) |
|
|
|
for chunk in answer: |
|
chat_history[-1]['content'] += chunk |
|
yield chat_history |
|
|
|
|
|
with gr.Blocks(theme=gr.themes.Default(), css=""" |
|
.gr-button { margin: 5px; border-radius:16px; } |
|
.gr-textbox, .gr-text-area, .gr-dropdown, .gr-json { border-radius: 8px; } |
|
.gr-row { gap: 10px; } |
|
.gr-tab { border-radius: 8px; } |
|
.status-text { font-size: 0.9em; color: #555; } |
|
.gr-json { max-height: 300px; overflow-y: auto; } /* Added scrolling for JSON */ |
|
""") as demo: |
|
gr.Markdown( |
|
f""" |
|
# π€ MCP DB Answer |
|
Your mcp server that allow you to talk to any database |
|
|
|
|
|
Powered by Ibis it support : PostgreSQL , SQLite , MySQL , MSSQL , ClickHouse , BigQuery and many other |
|
|
|
|
|
Also support .CSV and .parquet files |
|
|
|
""", |
|
elem_classes=["header"] |
|
) |
|
|
|
with gr.Column(scale=3): |
|
with gr.Tabs(): |
|
with gr.TabItem("π¬ Chat"): |
|
with gr.Group(): |
|
main_chat_disp = gr.Chatbot( |
|
label=None, height=600, |
|
avatar_images=(None, "https://huggingface.co/spaces/Space-Share/bucket/resolve/main/images/pfp.webp"), |
|
show_copy_button=True, render_markdown=True, sanitize_html=True, type='messages' |
|
) |
|
with gr.Row(variant="compact"): |
|
user_msg_tb = gr.Textbox( |
|
show_label=False, placeholder="Talk with your data...", |
|
scale=7, lines=1, max_lines=3 |
|
) |
|
send_btn = gr.Button("Send", variant="primary", scale=1, min_width=100) |
|
with gr.TabItem("Config"): |
|
with gr.Row(): |
|
|
|
with gr.Column(scale=1): |
|
gr.Markdown("## Database Configuration") |
|
|
|
db_url_tb = gr.Textbox( |
|
show_label=True, label="Database URL", placeholder="Enter the URL to connect to the database..." |
|
) |
|
|
|
connect_btn = gr.Button("Connect", variant="primary") |
|
|
|
file_uploader = gr.File( |
|
label="Upload File", file_types=[".csv", ".parquet", ".xls", ".xlsx"] |
|
) |
|
|
|
load_btn = gr.Button("Load", variant="primary") |
|
|
|
|
|
with gr.Column(scale=3): |
|
gr.Markdown("## Database Schema") |
|
|
|
schema_ta = gr.TextArea( |
|
show_label=False, placeholder="Database schema will be displayed here...", |
|
lines=20, max_lines=50, interactive=False |
|
) |
|
|
|
status_tb = gr.Textbox( |
|
show_label=False, placeholder="Status message will be displayed here...", |
|
lines=1, max_lines=1, interactive=False, elem_classes=["status-text"] |
|
) |
|
connect_btn.click(fn=connect_to_database, inputs=db_url_tb, outputs=[schema_ta, status_tb]) |
|
load_btn.click(fn=connect_to_file, inputs=file_uploader, outputs=[schema_ta, status_tb]) |
|
send_btn.click(fn=user, inputs=[user_msg_tb, main_chat_disp], outputs=[user_msg_tb, main_chat_disp], queue=False).then( |
|
fn=bot, inputs=main_chat_disp, outputs=main_chat_disp |
|
) |
|
|
|
if __name__ == "__main__": |
|
demo.launch(mcp_server=True) |