File size: 2,019 Bytes
9726fac 1e2d981 9726fac 2332ba1 b6e4778 9726fac b6e4778 1e2d981 2332ba1 1729f2d 2332ba1 1729f2d 2332ba1 1e2d981 2332ba1 6a34e6a 1e2d981 9702f0e e5f6777 9726fac 1e2d981 56edfcb 9726fac 9a28b27 b6e4778 2332ba1 9726fac b6e4778 56edfcb 2332ba1 9726fac 8a8d916 e5f6777 1e2d981 d24f851 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 |
import subprocess
import gradio as gr
from openai import OpenAI
import json
from agno.agent import Agent, RunResponse
from agno.models.openai.like import OpenAILike
subprocess.Popen("bash /home/user/app/start.sh", shell=True)
agent = Agent(
model=OpenAILike(
id="model",
api_key="no-token",
base_url="http://0.0.0.0:8000/v1",
)
)
def handle_function_call(function_name, arguments):
"""Handle function calls from the model"""
if function_name == "browser_search":
# Implement your browser search logic here
query = arguments.get("query", "")
max_results = arguments.get("max_results", 5)
return f"Search results for '{query}' (max {max_results} results): [Implementation needed]"
elif function_name == "code_interpreter":
# Implement your code interpreter logic here
code = arguments.get("code", "")
if not code:
return "No code provided to execute."
return f"Code interpreter results for '{code}': [Implementation needed]"
return f"Unknown function: {function_name}"
def respond(
message,
history: list[tuple[str, str]] = [],
system_message=None,
):
messages = []
if system_message:
messages = [{"role": "system", "content": system_message}]
for user, assistant in history:
if user:
messages.append({"role": "user", "content": user})
if assistant:
messages.append({"role": "assistant", "content": assistant})
messages.append({"role": "user", "content": message})
output = ""
try:
print("messages", messages)
stream = agent.run(messages=messages, stream=True)
for chunk in stream:
print("chunk", chunk)
output += chunk.content
yield output
except Exception as e:
print(f"[Error] {e}")
yield "⚠️ Llama.cpp server error"
demo = gr.ChatInterface(respond)
if __name__ == "__main__":
demo.launch(show_api=False)
|