Spaces:
Running
Running
add debug
Browse files
app.py
CHANGED
@@ -42,8 +42,11 @@ def start_server() -> subprocess.Popen:
|
|
42 |
"--clip_model_path", PROJ_FILE,
|
43 |
"--chat_format", "llava-1-5",
|
44 |
"--port", str(PORT),
|
|
|
45 |
"--model_alias", MODEL_ALIAS
|
46 |
]
|
|
|
|
|
47 |
proc = subprocess.Popen(cmd)
|
48 |
atexit.register(proc.terminate)
|
49 |
# wait until responsive
|
@@ -54,6 +57,9 @@ def start_server() -> subprocess.Popen:
|
|
54 |
except OSError:
|
55 |
time.sleep(0.25)
|
56 |
proc.terminate()
|
|
|
|
|
|
|
57 |
raise RuntimeError(f"Server failed to start on port {PORT}.")
|
58 |
|
59 |
server_proc = start_server()
|
|
|
42 |
"--clip_model_path", PROJ_FILE,
|
43 |
"--chat_format", "llava-1-5",
|
44 |
"--port", str(PORT),
|
45 |
+
"--host", "127.0.0.1",
|
46 |
"--model_alias", MODEL_ALIAS
|
47 |
]
|
48 |
+
|
49 |
+
print("⏳ Launching llama server:", " ".join(cmd), file=sys.stderr)
|
50 |
proc = subprocess.Popen(cmd)
|
51 |
atexit.register(proc.terminate)
|
52 |
# wait until responsive
|
|
|
57 |
except OSError:
|
58 |
time.sleep(0.25)
|
59 |
proc.terminate()
|
60 |
+
out, err = proc.communicate(timeout=1)
|
61 |
+
print("🐛 llama server stdout:\n", out.decode(), file=sys.stderr)
|
62 |
+
print("🐛 llama server stderr:\n", err.decode(), file=sys.stderr)
|
63 |
raise RuntimeError(f"Server failed to start on port {PORT}.")
|
64 |
|
65 |
server_proc = start_server()
|