tech-envision commited on
Commit
9d47355
·
unverified ·
2 Parent(s): 834003a 280fbf3

Merge pull request #31 from EnvisionMindCa/codex/modify-llm-generation-response-behavior

Browse files
Files changed (1) hide show
  1. src/chat.py +18 -6
src/chat.py CHANGED
@@ -284,7 +284,10 @@ class ChatSession:
284
  async for resp in self._handle_tool_calls_stream(
285
  self._messages, response, self._conversation
286
  ):
287
- yield resp.message.content
 
 
 
288
 
289
  async def _chat_during_tool(self, prompt: str) -> AsyncIterator[str]:
290
  DBMessage.create(conversation=self._conversation, role="user", content=prompt)
@@ -317,18 +320,23 @@ class ChatSession:
317
  nxt = await self.ask(self._messages, think=True)
318
  self._store_assistant_message(self._conversation, nxt.message)
319
  self._messages.append(nxt.message.model_dump())
320
- yield nxt.message.content
 
321
  async for part in self._handle_tool_calls_stream(
322
  self._messages, nxt, self._conversation
323
  ):
324
- yield part.message.content
 
 
 
325
  else:
326
  resp = await user_task
327
  self._store_assistant_message(self._conversation, resp.message)
328
  self._messages.append(resp.message.model_dump())
329
  async with self._lock:
330
  self._state = "awaiting_tool"
331
- yield resp.message.content
 
332
  result = await exec_task
333
  self._tool_task = None
334
  self._messages.append(
@@ -342,10 +350,14 @@ class ChatSession:
342
  nxt = await self.ask(self._messages, think=True)
343
  self._store_assistant_message(self._conversation, nxt.message)
344
  self._messages.append(nxt.message.model_dump())
345
- yield nxt.message.content
 
346
  async for part in self._handle_tool_calls_stream(
347
  self._messages, nxt, self._conversation
348
  ):
349
- yield part.message.content
 
 
 
350
 
351
 
 
284
  async for resp in self._handle_tool_calls_stream(
285
  self._messages, response, self._conversation
286
  ):
287
+ if resp.message.tool_calls:
288
+ continue
289
+ if resp.message.content:
290
+ yield resp.message.content
291
 
292
  async def _chat_during_tool(self, prompt: str) -> AsyncIterator[str]:
293
  DBMessage.create(conversation=self._conversation, role="user", content=prompt)
 
320
  nxt = await self.ask(self._messages, think=True)
321
  self._store_assistant_message(self._conversation, nxt.message)
322
  self._messages.append(nxt.message.model_dump())
323
+ if not nxt.message.tool_calls and nxt.message.content:
324
+ yield nxt.message.content
325
  async for part in self._handle_tool_calls_stream(
326
  self._messages, nxt, self._conversation
327
  ):
328
+ if part.message.tool_calls:
329
+ continue
330
+ if part.message.content:
331
+ yield part.message.content
332
  else:
333
  resp = await user_task
334
  self._store_assistant_message(self._conversation, resp.message)
335
  self._messages.append(resp.message.model_dump())
336
  async with self._lock:
337
  self._state = "awaiting_tool"
338
+ if not resp.message.tool_calls and resp.message.content:
339
+ yield resp.message.content
340
  result = await exec_task
341
  self._tool_task = None
342
  self._messages.append(
 
350
  nxt = await self.ask(self._messages, think=True)
351
  self._store_assistant_message(self._conversation, nxt.message)
352
  self._messages.append(nxt.message.model_dump())
353
+ if not nxt.message.tool_calls and nxt.message.content:
354
+ yield nxt.message.content
355
  async for part in self._handle_tool_calls_stream(
356
  self._messages, nxt, self._conversation
357
  ):
358
+ if part.message.tool_calls:
359
+ continue
360
+ if part.message.content:
361
+ yield part.message.content
362
 
363