Spaces:
Sleeping
Sleeping
import cohere | |
import asyncio | |
def send_message_stream(system_message, user_message, conversation_history, api_key, model_name="command-a-03-2025"): | |
"""Stream response from Cohere API""" | |
# Initialize the Cohere client | |
co = cohere.ClientV2(api_key) | |
# Prepare all messages including history | |
messages = [{"role": "system", "content": system_message}] | |
messages.extend(conversation_history) | |
messages.append({"role": "user", "content": user_message}) | |
# Send streaming request to Cohere | |
stream = co.chat_stream( | |
model=model_name, | |
messages=messages | |
) | |
# Collect full response for history | |
full_response = "" | |
# Yield chunks as they come | |
for chunk in stream: | |
if chunk.type == "content-delta": | |
text_chunk = chunk.delta.message.content.text | |
full_response += text_chunk | |
yield text_chunk | |
# Update conversation history after streaming is complete | |
conversation_history.append({"role": "user", "content": user_message}) | |
conversation_history.append({"role": "assistant", "content": full_response}) | |
def send_message(system_message, user_message, conversation_history, api_key, model_name="command-a-03-2025"): | |
"""Non-streaming version for backward compatibility""" | |
# Initialize the Cohere client | |
co = cohere.ClientV2(api_key) | |
# Prepare all messages including history | |
messages = [{"role": "system", "content": system_message}] | |
messages.extend(conversation_history) | |
messages.append({"role": "user", "content": user_message}) | |
# Send request to Cohere synchronously | |
response = co.chat( | |
model=model_name, | |
messages=messages | |
) | |
# Get the response | |
response_content = response.message.content[0].text | |
# Update conversation history for this session | |
conversation_history.append({"role": "user", "content": user_message}) | |
conversation_history.append({"role": "assistant", "content": response_content}) | |
return response_content, conversation_history |