minchyeom commited on
Commit
0e02b97
·
1 Parent(s): 7637921

Add initial implementation of chat application with required dependencies

Browse files

- Created requirements.txt for package dependencies
- Implemented main run logic in run.py
- Defined ChatSession class in chat.py for managing conversations
- Added configuration settings in config.py
- Implemented logging functionality in log.py
- Defined message schema in schema.py
- Added utility function for addition in tools.py

Files changed (8) hide show
  1. requirements.txt +2 -0
  2. run.py +18 -0
  3. src/__init__.py +4 -0
  4. src/chat.py +63 -0
  5. src/config.py +8 -0
  6. src/log.py +34 -0
  7. src/schema.py +12 -0
  8. src/tools.py +16 -0
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ colorlog
2
+ ollama
run.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+
5
+ from src.chat import ChatSession
6
+
7
+
8
+ async def _main() -> None:
9
+ chat = ChatSession()
10
+ answer = await chat.chat("What is 10 + 23?")
11
+ print("\n>>>", answer)
12
+
13
+
14
+ if __name__ == "__main__":
15
+ try:
16
+ asyncio.run(_main())
17
+ except KeyboardInterrupt:
18
+ pass
src/__init__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ from .chat import ChatSession
2
+ from .tools import add_two_numbers
3
+
4
+ __all__: list[str] = ["ChatSession", "add_two_numbers"]
src/chat.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from typing import List
4
+
5
+ from ollama import AsyncClient, ChatResponse
6
+
7
+ from .config import MAX_TOOL_CALL_DEPTH, MODEL_NAME, OLLAMA_HOST
8
+ from .log import get_logger
9
+ from .schema import Msg
10
+ from .tools import add_two_numbers
11
+
12
+ _LOG = get_logger(__name__)
13
+
14
+
15
+ class ChatSession:
16
+ def __init__(self, host: str = OLLAMA_HOST, model: str = MODEL_NAME) -> None:
17
+ self._client = AsyncClient(host=host)
18
+ self._model = model
19
+
20
+ async def __aenter__(self) -> "ChatSession":
21
+ return self
22
+
23
+ async def ask(self, messages: List[Msg], *, think: bool = True) -> ChatResponse:
24
+ return await self._client.chat(
25
+ self._model,
26
+ messages=messages,
27
+ think=think,
28
+ tools=[add_two_numbers],
29
+ )
30
+
31
+ async def _handle_tool_calls(
32
+ self,
33
+ messages: List[Msg],
34
+ response: ChatResponse,
35
+ depth: int = 0,
36
+ ) -> ChatResponse:
37
+ if depth >= MAX_TOOL_CALL_DEPTH or not response.message.tool_calls:
38
+ return response
39
+
40
+ for call in response.message.tool_calls:
41
+ if call.function.name == "add_two_numbers":
42
+ result = add_two_numbers(**call.function.arguments)
43
+ messages.append(
44
+ {
45
+ "role": "tool",
46
+ "name": call.function.name,
47
+ "content": str(result),
48
+ }
49
+ )
50
+ nxt = await self.ask(messages, think=True)
51
+ return await self._handle_tool_calls(messages, nxt, depth + 1)
52
+
53
+ return response
54
+
55
+ async def chat(self, prompt: str) -> str:
56
+ messages: List[Msg] = [{"role": "user", "content": prompt}]
57
+ response = await self.ask(messages)
58
+ messages.append(response.message.model_dump())
59
+
60
+ _LOG.info("Thinking:\n%s", response.message.thinking or "<no thinking trace>")
61
+
62
+ final_resp = await self._handle_tool_calls(messages, response)
63
+ return final_resp.message.content
src/config.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ from typing import Final
5
+
6
+ MODEL_NAME: Final[str] = os.getenv("OLLAMA_MODEL", "qwen3")
7
+ OLLAMA_HOST: Final[str] = os.getenv("OLLAMA_HOST", "http://localhost:11434")
8
+ MAX_TOOL_CALL_DEPTH: Final[int] = 5
src/log.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from typing import Final
5
+
6
+ from colorama import Fore, Style, init as colorama_init
7
+
8
+ __all__: Final[list[str]] = ["get_logger"]
9
+
10
+
11
+ class _ColourFormatter(logging.Formatter):
12
+ _COLOUR_FOR_LEVEL = {
13
+ logging.DEBUG: Fore.CYAN,
14
+ logging.INFO: Fore.GREEN,
15
+ logging.WARNING: Fore.YELLOW,
16
+ logging.ERROR: Fore.RED,
17
+ logging.CRITICAL: Fore.MAGENTA,
18
+ }
19
+
20
+ def format(self, record: logging.LogRecord) -> str: # noqa: D401
21
+ colour = self._COLOUR_FOR_LEVEL.get(record.levelno, "")
22
+ return f"{colour}{super().format(record)}{Style.RESET_ALL}"
23
+
24
+
25
+ def get_logger(name: str | None = None, level: int = logging.INFO) -> logging.Logger:
26
+ colorama_init()
27
+ handler = logging.StreamHandler()
28
+ handler.setFormatter(_ColourFormatter("%(asctime)s [%(levelname)s] %(name)s: %(message)s"))
29
+
30
+ logger = logging.getLogger(name)
31
+ logger.setLevel(level)
32
+ logger.handlers.clear()
33
+ logger.addHandler(handler)
34
+ return logger
src/schema.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from typing import List, Literal, Optional, TypedDict
4
+
5
+ from ollama import Message
6
+
7
+
8
+ class Msg(TypedDict, total=False):
9
+ role: Literal["user", "assistant", "tool"]
10
+ content: str
11
+ name: Optional[str]
12
+ tool_calls: Optional[List[Message.ToolCall]]
src/tools.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ __all__: list[str] = ["add_two_numbers"]
4
+
5
+
6
+ def add_two_numbers(a: int, b: int) -> int: # noqa: D401
7
+ """Add two numbers together.
8
+
9
+ Args:
10
+ a (int): First number to add.
11
+ b (int): Second number to add.
12
+
13
+ Returns:
14
+ int: The sum of the two numbers.
15
+ """
16
+ return a + b