tech-envision commited on
Commit
ce67dba
·
unverified ·
2 Parent(s): 7403772 1f8a981

Merge pull request #84 from EnvisionMindCa/codex/create-macos-gui-app-for-api

Browse files
Files changed (5) hide show
  1. README.md +17 -0
  2. mac_gui/__init__.py +0 -0
  3. mac_gui/__main__.py +4 -0
  4. mac_gui/api_client.py +70 -0
  5. mac_gui/app.py +144 -0
README.md CHANGED
@@ -129,3 +129,20 @@ pyinstaller --onefile -n llm-chat cli_app/main.py
129
  ```
130
 
131
  The resulting `llm-chat.exe` works on Windows 10/11.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
129
  ```
130
 
131
  The resulting `llm-chat.exe` works on Windows 10/11.
132
+
133
+ ## macOS GUI Application
134
+
135
+ A simple graphical client built with Tkinter lives in the `mac_gui` module. It
136
+ provides a text chat interface and supports file uploads via the REST API.
137
+
138
+ ### Run the GUI
139
+
140
+ ```bash
141
+ pip install -r requirements.txt
142
+ python -m mac_gui
143
+ ```
144
+
145
+ Use the fields at the top of the window to configure the API URL, optional API
146
+ key, user name and session. Type a message and press **Send** to chat or click
147
+ **Upload** to select a document to upload. Responses stream into the main text
148
+ area.
mac_gui/__init__.py ADDED
File without changes
mac_gui/__main__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ from .app import main
2
+
3
+ if __name__ == "__main__":
4
+ main()
mac_gui/api_client.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ from typing import Iterator, List, Dict, Any
5
+
6
+ import httpx
7
+
8
+
9
+ class APIClient:
10
+ """Simple client for the LLM backend API."""
11
+
12
+ def __init__(self, server: str = "http://localhost:8000", api_key: str | None = None) -> None:
13
+ self._server = server.rstrip("/")
14
+ self._headers = {"X-API-Key": api_key} if api_key else {}
15
+
16
+ # ------------------------------------------------------------------
17
+ # Helper methods
18
+ # ------------------------------------------------------------------
19
+ def _url(self, path: str) -> str:
20
+ return f"{self._server}{path}"
21
+
22
+ # ------------------------------------------------------------------
23
+ # Public API methods
24
+ # ------------------------------------------------------------------
25
+ def list_sessions(self, user: str) -> List[str]:
26
+ resp = httpx.get(self._url(f"/sessions/{user}"), headers=self._headers)
27
+ resp.raise_for_status()
28
+ data = resp.json()
29
+ return data.get("sessions", [])
30
+
31
+ def stream_chat(self, user: str, session: str, prompt: str) -> Iterator[str]:
32
+ with httpx.stream(
33
+ "POST",
34
+ self._url("/chat/stream"),
35
+ json={"user": user, "session": session, "prompt": prompt},
36
+ headers=self._headers,
37
+ timeout=None,
38
+ ) as resp:
39
+ resp.raise_for_status()
40
+ for line in resp.iter_lines():
41
+ if line:
42
+ yield line.decode()
43
+
44
+ def upload_document(self, user: str, session: str, path: str) -> str:
45
+ name = os.path.basename(path)
46
+ with open(path, "rb") as f:
47
+ files = {"file": (name, f)}
48
+ data = {"user": user, "session": session}
49
+ resp = httpx.post(self._url("/upload"), data=data, files=files, headers=self._headers)
50
+ resp.raise_for_status()
51
+ return resp.json()["path"]
52
+
53
+ def list_vm_dir(self, user: str, path: str = "/data") -> List[Dict[str, Any]]:
54
+ resp = httpx.get(self._url(f"/vm/{user}/list"), params={"path": path}, headers=self._headers)
55
+ resp.raise_for_status()
56
+ return resp.json().get("entries", [])
57
+
58
+ def read_vm_file(self, user: str, path: str) -> str:
59
+ resp = httpx.get(self._url(f"/vm/{user}/file"), params={"path": path}, headers=self._headers)
60
+ resp.raise_for_status()
61
+ return resp.json().get("content", "")
62
+
63
+ def write_vm_file(self, user: str, path: str, content: str) -> None:
64
+ payload = {"path": path, "content": content}
65
+ resp = httpx.post(self._url(f"/vm/{user}/file"), json=payload, headers=self._headers)
66
+ resp.raise_for_status()
67
+
68
+ def delete_vm_file(self, user: str, path: str) -> None:
69
+ resp = httpx.delete(self._url(f"/vm/{user}/file"), params={"path": path}, headers=self._headers)
70
+ resp.raise_for_status()
mac_gui/app.py ADDED
@@ -0,0 +1,144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import threading
4
+ import queue
5
+ from pathlib import Path
6
+ from tkinter import (
7
+ Tk,
8
+ Text,
9
+ Entry,
10
+ Button,
11
+ Scrollbar,
12
+ Frame,
13
+ Label,
14
+ StringVar,
15
+ END,
16
+ filedialog,
17
+ )
18
+
19
+ from .api_client import APIClient
20
+
21
+
22
+ class ChatApp:
23
+ """Tkinter GUI for interacting with the LLM backend."""
24
+
25
+ def __init__(self, root: Tk) -> None:
26
+ self.root = root
27
+ self.root.title("LLM Backend Chat")
28
+
29
+ self.server_var = StringVar(value="http://localhost:8000")
30
+ self.api_key_var = StringVar(value="")
31
+ self.user_var = StringVar(value="default")
32
+ self.session_var = StringVar(value="default")
33
+
34
+ self._client = APIClient()
35
+ self._queue: queue.Queue[tuple[str, str]] = queue.Queue()
36
+
37
+ self._build_ui()
38
+ self.root.after(100, self._process_queue)
39
+
40
+ # ------------------------------------------------------------------
41
+ # UI construction
42
+ # ------------------------------------------------------------------
43
+ def _build_ui(self) -> None:
44
+ top = Frame(self.root)
45
+ top.pack(fill="x")
46
+
47
+ Label(top, text="Server:").grid(row=0, column=0, sticky="w")
48
+ Entry(top, textvariable=self.server_var, width=30).grid(row=0, column=1, sticky="ew")
49
+
50
+ Label(top, text="API Key:").grid(row=0, column=2, sticky="w")
51
+ Entry(top, textvariable=self.api_key_var, width=20).grid(row=0, column=3, sticky="ew")
52
+
53
+ Label(top, text="User:").grid(row=1, column=0, sticky="w")
54
+ Entry(top, textvariable=self.user_var, width=15).grid(row=1, column=1, sticky="ew")
55
+
56
+ Label(top, text="Session:").grid(row=1, column=2, sticky="w")
57
+ Entry(top, textvariable=self.session_var, width=15).grid(row=1, column=3, sticky="ew")
58
+
59
+ self.chat_display = Text(self.root, wrap="word", height=20)
60
+ self.chat_display.pack(fill="both", expand=True)
61
+
62
+ scroll = Scrollbar(self.chat_display)
63
+ scroll.pack(side="right", fill="y")
64
+ self.chat_display.config(yscrollcommand=scroll.set)
65
+ scroll.config(command=self.chat_display.yview)
66
+
67
+ bottom = Frame(self.root)
68
+ bottom.pack(fill="x")
69
+
70
+ self.msg_entry = Entry(bottom)
71
+ self.msg_entry.pack(side="left", fill="x", expand=True)
72
+ self.msg_entry.bind("<Return>", lambda _: self.send_message())
73
+
74
+ Button(bottom, text="Send", command=self.send_message).pack(side="left")
75
+ Button(bottom, text="Upload", command=self.upload_file).pack(side="left")
76
+
77
+ # ------------------------------------------------------------------
78
+ # Event handlers
79
+ # ------------------------------------------------------------------
80
+ def _update_client(self) -> None:
81
+ self._client = APIClient(self.server_var.get(), self.api_key_var.get() or None)
82
+
83
+ def send_message(self) -> None:
84
+ prompt = self.msg_entry.get().strip()
85
+ if not prompt:
86
+ return
87
+ self.msg_entry.delete(0, END)
88
+ self.chat_display.insert(END, f"You: {prompt}\n")
89
+ self.chat_display.see(END)
90
+ self._update_client()
91
+ thread = threading.Thread(target=self._stream_prompt, args=(prompt,), daemon=True)
92
+ thread.start()
93
+
94
+ def _stream_prompt(self, prompt: str) -> None:
95
+ try:
96
+ for part in self._client.stream_chat(
97
+ self.user_var.get(), self.session_var.get(), prompt
98
+ ):
99
+ self._queue.put(("assistant", part))
100
+ except Exception as exc: # pragma: no cover - runtime errors
101
+ self._queue.put(("error", str(exc)))
102
+
103
+ def upload_file(self) -> None:
104
+ path = filedialog.askopenfilename()
105
+ if path:
106
+ self._update_client()
107
+ thread = threading.Thread(target=self._upload_file, args=(Path(path),), daemon=True)
108
+ thread.start()
109
+
110
+ def _upload_file(self, path: Path) -> None:
111
+ try:
112
+ vm_path = self._client.upload_document(
113
+ self.user_var.get(), self.session_var.get(), str(path)
114
+ )
115
+ self._queue.put(("info", f"Uploaded {path.name} -> {vm_path}"))
116
+ except Exception as exc: # pragma: no cover - runtime errors
117
+ self._queue.put(("error", str(exc)))
118
+
119
+ # ------------------------------------------------------------------
120
+ # Queue processing
121
+ # ------------------------------------------------------------------
122
+ def _process_queue(self) -> None:
123
+ while True:
124
+ try:
125
+ kind, msg = self._queue.get_nowait()
126
+ except queue.Empty:
127
+ break
128
+ if kind == "assistant":
129
+ self.chat_display.insert(END, msg)
130
+ else:
131
+ prefix = "INFO" if kind == "info" else "ERROR"
132
+ self.chat_display.insert(END, f"[{prefix}] {msg}\n")
133
+ self.chat_display.see(END)
134
+ self.root.after(100, self._process_queue)
135
+
136
+
137
+ def main() -> None:
138
+ root = Tk()
139
+ ChatApp(root)
140
+ root.mainloop()
141
+
142
+
143
+ if __name__ == "__main__": # pragma: no cover - manual execution
144
+ main()