Spaces:
Runtime error
Runtime error
Merge pull request #9 from tech-envision/codex/add-robust-system-prompt-for-tool-chaining
Browse files- README.md +4 -0
- src/chat.py +17 -1
- src/config.py +8 -0
README.md
CHANGED
@@ -10,6 +10,10 @@ conversations can be resumed with context. Two example tools are included:
|
|
10 |
and allows importing safe modules like ``math``. The result is returned from a
|
11 |
``result`` variable or captured output.
|
12 |
|
|
|
|
|
|
|
|
|
13 |
## Usage
|
14 |
|
15 |
```bash
|
|
|
10 |
and allows importing safe modules like ``math``. The result is returned from a
|
11 |
``result`` variable or captured output.
|
12 |
|
13 |
+
The application now injects a system prompt that instructs the model to chain
|
14 |
+
multiple tools when required. This prompt ensures the assistant can orchestrate
|
15 |
+
tool calls in sequence to satisfy the user's request.
|
16 |
+
|
17 |
## Usage
|
18 |
|
19 |
```bash
|
src/chat.py
CHANGED
@@ -5,7 +5,13 @@ import json
|
|
5 |
|
6 |
from ollama import AsyncClient, ChatResponse, Message
|
7 |
|
8 |
-
from .config import
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
from .db import Conversation, Message as DBMessage, User, _db, init_db
|
10 |
from .log import get_logger
|
11 |
from .schema import Msg
|
@@ -30,6 +36,7 @@ class ChatSession:
|
|
30 |
user=self._user, session_name=session
|
31 |
)
|
32 |
self._messages: List[Msg] = self._load_history()
|
|
|
33 |
|
34 |
async def __aenter__(self) -> "ChatSession":
|
35 |
return self
|
@@ -38,6 +45,15 @@ class ChatSession:
|
|
38 |
if not _db.is_closed():
|
39 |
_db.close()
|
40 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
41 |
def _load_history(self) -> List[Msg]:
|
42 |
messages: List[Msg] = []
|
43 |
for msg in self._conversation.messages.order_by(DBMessage.created_at):
|
|
|
5 |
|
6 |
from ollama import AsyncClient, ChatResponse, Message
|
7 |
|
8 |
+
from .config import (
|
9 |
+
MAX_TOOL_CALL_DEPTH,
|
10 |
+
MODEL_NAME,
|
11 |
+
NUM_CTX,
|
12 |
+
OLLAMA_HOST,
|
13 |
+
SYSTEM_PROMPT,
|
14 |
+
)
|
15 |
from .db import Conversation, Message as DBMessage, User, _db, init_db
|
16 |
from .log import get_logger
|
17 |
from .schema import Msg
|
|
|
36 |
user=self._user, session_name=session
|
37 |
)
|
38 |
self._messages: List[Msg] = self._load_history()
|
39 |
+
self._ensure_system_prompt()
|
40 |
|
41 |
async def __aenter__(self) -> "ChatSession":
|
42 |
return self
|
|
|
45 |
if not _db.is_closed():
|
46 |
_db.close()
|
47 |
|
48 |
+
def _ensure_system_prompt(self) -> None:
|
49 |
+
if any(m.get("role") == "system" for m in self._messages):
|
50 |
+
return
|
51 |
+
|
52 |
+
DBMessage.create(
|
53 |
+
conversation=self._conversation, role="system", content=SYSTEM_PROMPT
|
54 |
+
)
|
55 |
+
self._messages.insert(0, {"role": "system", "content": SYSTEM_PROMPT})
|
56 |
+
|
57 |
def _load_history(self) -> List[Msg]:
|
58 |
messages: List[Msg] = []
|
59 |
for msg in self._conversation.messages.order_by(DBMessage.created_at):
|
src/config.py
CHANGED
@@ -7,3 +7,11 @@ MODEL_NAME: Final[str] = os.getenv("OLLAMA_MODEL", "qwen3")
|
|
7 |
OLLAMA_HOST: Final[str] = os.getenv("OLLAMA_HOST", "http://localhost:11434")
|
8 |
MAX_TOOL_CALL_DEPTH: Final[int] = 5
|
9 |
NUM_CTX: Final[int] = int(os.getenv("OLLAMA_NUM_CTX", "32000"))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
OLLAMA_HOST: Final[str] = os.getenv("OLLAMA_HOST", "http://localhost:11434")
|
8 |
MAX_TOOL_CALL_DEPTH: Final[int] = 5
|
9 |
NUM_CTX: Final[int] = int(os.getenv("OLLAMA_NUM_CTX", "32000"))
|
10 |
+
|
11 |
+
SYSTEM_PROMPT: Final[str] = (
|
12 |
+
"You are a versatile AI assistant able to orchestrate several tools to "
|
13 |
+
"complete tasks. Plan your responses carefully and, when needed, call one "
|
14 |
+
"or more tools consecutively to gather data, compute answers, or transform "
|
15 |
+
"information. Continue chaining tools until the user's request is fully "
|
16 |
+
"addressed and then deliver a concise, coherent final reply."
|
17 |
+
)
|