Spaces:
Runtime error
Runtime error
File size: 5,309 Bytes
64e0772 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 |
import asyncio
import base64
import json
import shlex
import gradio as gr
from gradio.oauth import attach_oauth, OAuthToken
from huggingface_hub import HfApi
from src.team import TeamChatSession
from src.db import list_sessions_info
# Store active chat sessions
_SESSIONS: dict[tuple[str, str], TeamChatSession] = {}
_API = HfApi()
def _username(token: OAuthToken) -> str:
"""Return the username for the given token."""
info = _API.whoami(token.token)
return info.get("name") or info.get("user", "unknown")
async def _get_chat(user: str, session: str) -> TeamChatSession:
"""Return an active :class:`TeamChatSession` for ``user`` and ``session``."""
key = (user, session)
chat = _SESSIONS.get(key)
if chat is None:
chat = TeamChatSession(user=user, session=session)
await chat.__aenter__()
_SESSIONS[key] = chat
return chat
async def _vm_execute(user: str, session: str, command: str) -> str:
"""Execute ``command`` inside the user's VM and return output."""
chat = await _get_chat(user, session)
vm = getattr(chat.senior, "_vm", None)
if vm is None:
raise RuntimeError("VM not running")
return await vm.execute_async(command, timeout=5)
async def send_message(
message: str, history: list[dict] | None, session: str, token: OAuthToken
):
user = _username(token)
chat = await _get_chat(user, session)
history = history or []
# user turn
history.append({"role": "user", "content": message})
yield history # show immediately
# stream assistant turns
async for part in chat.chat_stream(message):
if history[-1]["role"] == "assistant":
history[-1]["content"] += part
else:
history.append({"role": "assistant", "content": part})
yield history
def load_sessions(token: OAuthToken):
user = _username(token)
infos = list_sessions_info(user)
names = [info["name"] for info in infos]
table = [[info["name"], info["last_message"]] for info in infos]
value = names[0] if names else "default"
return gr.update(choices=names or ["default"], value=value), table
async def list_dir(path: str, session: str, token: OAuthToken):
user = _username(token)
cmd = f"ls -1ap {shlex.quote(path)}"
output = await _vm_execute(user, session, cmd)
if output.startswith("ls:"):
return []
rows = []
for line in output.splitlines():
line = line.strip()
if not line or line in (".", ".."):
continue
is_dir = line.endswith("/")
name = line[:-1] if is_dir else line
rows.append([name, is_dir])
return rows
async def read_file(path: str, session: str, token: OAuthToken):
user = _username(token)
cmd = f"cat {shlex.quote(path)}"
return await _vm_execute(user, session, cmd)
async def save_file(path: str, content: str, session: str, token: OAuthToken):
user = _username(token)
encoded = base64.b64encode(content.encode()).decode()
cmd = (
f"python -c 'import base64,os; "
f'open({json.dumps(path)}, "wb").write(base64.b64decode({json.dumps(encoded)}))\''
)
await _vm_execute(user, session, cmd)
return "Saved"
async def delete_path(path: str, session: str, token: OAuthToken):
user = _username(token)
cmd = (
f"bash -c 'if [ -d {shlex.quote(path)} ]; then rm -rf {shlex.quote(path)} && echo Deleted; "
f"elif [ -e {shlex.quote(path)} ]; then rm -f {shlex.quote(path)} && echo Deleted; "
f"else echo File not found; fi'"
)
return await _vm_execute(user, session, cmd)
with gr.Blocks(theme=gr.themes.Soft()) as demo:
attach_oauth(demo.app)
login_btn = gr.LoginButton()
with gr.Tab("Chat"):
session_dd = gr.Dropdown(["default"], label="Session", value="default")
refresh = gr.Button("Refresh Sessions")
chatbox = gr.Chatbot(type="messages")
msg = gr.Textbox(label="Message")
send = gr.Button("Send")
gr.Markdown(
"""
This is a demo app of [llmOS](https://github.com/starsnatched/llmOS-Agent), an agent framework for building AI assistants that can interact with a Linux VM to accomplish tasks.
"""
)
with gr.Tab("Files"):
dir_path = gr.Textbox(label="Directory", value="/")
list_btn = gr.Button("List")
table = gr.Dataframe(headers=["name", "is_dir"], datatype=["str", "bool"])
file_path = gr.Textbox(label="File Path")
load_btn = gr.Button("Load")
content = gr.Code(label="Content", language=None)
save_btn = gr.Button("Save")
del_btn = gr.Button("Delete")
refresh.click(load_sessions, outputs=[session_dd, table])
send_click = send.click(
send_message,
inputs=[msg, chatbox, session_dd],
outputs=chatbox,
)
list_btn.click(list_dir, inputs=[dir_path, session_dd], outputs=table)
load_btn.click(read_file, inputs=[file_path, session_dd], outputs=content)
save_btn.click(save_file, inputs=[file_path, content, session_dd], outputs=content)
del_btn.click(delete_path, inputs=[file_path, session_dd], outputs=content)
send_click.then(lambda: "", None, msg)
demo.queue()
if __name__ == "__main__":
demo.launch()
|