File size: 5,465 Bytes
d25ee4b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
import gradio as gr
from database import Database
from filesource import FileSource
from agent import run_agent
from services.utils import get_db_scheme_from_uri
source = None


def connect_to_file(file):
    global source
    try:
        source = FileSource(file.name)
        status = source.connect()
        schema = source._pretify_schema()
        status = "Connection successful!"
    except Exception as e:
        schema = ""
        status = f"Error: {str(e)}"
    return schema, status


def connect_to_database(db_url):
        global source
        try:
            dialect = get_db_scheme_from_uri(db_url)
            source = Database(db_url,dialect)
            status = source.connect()
            schema = source._pretify_schema()
            status = "Connection successful!"
        except Exception as e:
            schema = ""
            status = f"Error: {str(e)}"
        return schema, status

# Function to add user message to chat history
def user(user_message, chat_history):
    chat_history.append({"role": "user", "content": user_message})
    return "", chat_history

# Function to generate a bot response
def bot(chat_history):
  
    if source is None:
         chat_history.append({"role":"assistant","content":"please connect to a database  before asking question"})
         yield chat_history
    else:
        answer = run_agent(source,chat_history[-1]['content'])
        chat_history.append({"role":"assistant","content":""})

        for chunk in answer:
            chat_history[-1]['content'] += chunk    
            yield chat_history

# Create the Gradio interface
with gr.Blocks(theme=gr.themes.Default(), css="""
    .gr-button { margin: 5px; border-radius:16px; }
    .gr-textbox, .gr-text-area, .gr-dropdown, .gr-json { border-radius: 8px; }
    .gr-row { gap: 10px; }
    .gr-tab { border-radius: 8px; }
    .status-text { font-size: 0.9em; color: #555; }
    .gr-json { max-height: 300px; overflow-y: auto; } /* Added scrolling for JSON */
""") as demo:
    gr.Markdown(
        f"""
        # πŸ€– MCP DB Answer
        Your mcp server that allow you to talk to any database


        Powered by Ibis it support : PostgreSQL , SQLite , MySQL , MSSQL , ClickHouse , BigQuery and many other
        
        
        Also support .CSV and .parquet files
        
        """,
        elem_classes=["header"]
    )

    with gr.Column(scale=3):
        with gr.Tabs():
            with gr.TabItem("πŸ’¬ Chat"):
                with gr.Group():
                    main_chat_disp = gr.Chatbot(
                        label=None, height=600,
                        avatar_images=(None, "https://huggingface.co/spaces/Space-Share/bucket/resolve/main/images/pfp.webp"),
                        show_copy_button=True, render_markdown=True, sanitize_html=True, type='messages'
                    )
                    with gr.Row(variant="compact"):
                        user_msg_tb = gr.Textbox(
                            show_label=False, placeholder="Talk with your data...",
                            scale=7, lines=1, max_lines=3
                        )
                        send_btn = gr.Button("Send", variant="primary", scale=1, min_width=100)
            with gr.TabItem("Config"):        
                with gr.Row():
                    # Left column for database configuration.
                    with gr.Column(scale=1):
                        gr.Markdown("## Database Configuration")
                        # Textbox for entering the database URL.
                        db_url_tb = gr.Textbox(
                            show_label=True, label="Database URL", placeholder="Enter the URL to connect to the database..."
                        )
                        # Button to connect to the database.
                        connect_btn = gr.Button("Connect", variant="primary")

                        file_uploader = gr.File(
                            label="Upload File", file_types=[".csv", ".parquet", ".xls", ".xlsx"]
                        )
                        # Button to connect to the database.
                        load_btn = gr.Button("Load", variant="primary")

                    # Right column for displaying the database schema and status message.
                    with gr.Column(scale=3):
                        gr.Markdown("## Database Schema")
                        # Textarea to display the database schema.
                        schema_ta = gr.TextArea(
                            show_label=False, placeholder="Database schema will be displayed here...",
                            lines=20, max_lines=50, interactive=False
                        )
                        # Textbox to display the status message.
                        status_tb = gr.Textbox(
                            show_label=False, placeholder="Status message will be displayed here...",
                            lines=1, max_lines=1, interactive=False, elem_classes=["status-text"]
                        )
    connect_btn.click(fn=connect_to_database, inputs=db_url_tb, outputs=[schema_ta, status_tb])
    load_btn.click(fn=connect_to_file, inputs=file_uploader, outputs=[schema_ta, status_tb])
    send_btn.click(fn=user, inputs=[user_msg_tb, main_chat_disp], outputs=[user_msg_tb, main_chat_disp], queue=False).then(
        fn=bot, inputs=main_chat_disp, outputs=main_chat_disp
    )

if __name__ == "__main__":
    demo.launch(mcp_server=True)