Update app.py
Browse files
app.py
CHANGED
@@ -33,7 +33,7 @@ def respond(
|
|
33 |
|
34 |
try:
|
35 |
stream = client.chat.completions.create(
|
36 |
-
model="
|
37 |
messages=messages,
|
38 |
max_tokens=max_tokens,
|
39 |
temperature=temperature,
|
|
|
33 |
|
34 |
try:
|
35 |
stream = client.chat.completions.create(
|
36 |
+
model="Deepseek-R1-0528-Qwen3-8B", # ⚠️ Replace it with the name of the model loaded by your llama.cpp
|
37 |
messages=messages,
|
38 |
max_tokens=max_tokens,
|
39 |
temperature=temperature,
|