Spaces:
Sleeping
Sleeping
File size: 2,221 Bytes
4024b96 de90f95 4024b96 de90f95 4024b96 de90f95 4024b96 de90f95 4024b96 de90f95 4024b96 de90f95 4024b96 9c8e2dd 4024b96 de90f95 4024b96 de90f95 9c8e2dd 4024b96 9c8e2dd 4024b96 9c8e2dd 688c4db 9c8e2dd 4024b96 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 |
import gradio as gr
from gradio import ChatMessage
import time
sleep_time = 0.5
def simulate_thinking_chat(message, history):
start_time = time.time()
response = ChatMessage(
content="",
metadata={"title": "_Thinking_ step-by-step", "id": 0, "status": "pending"}
)
print("one")
yield response
print("two")
thoughts = [
"First, I need to understand the core aspects of the query...",
"Now, considering the broader context and implications...",
"Analyzing potential approaches to formulate a comprehensive answer...",
"Finally, structuring the response for clarity and completeness..."
]
accumulated_thoughts = ""
for thought in thoughts:
time.sleep(sleep_time)
accumulated_thoughts += f"- {thought}\n\n"
response.content = accumulated_thoughts.strip()
print("three")
yield response
print("four")
response.metadata["status"] = "done"
response.metadata["duration"] = time.time() - start_time
print("five")
yield response
print("six")
time.sleep(5.0)
response = [
response,
ChatMessage(
content="Based on my thoughts and analysis above, my response is: This dummy repro shows how thoughts of a thinking LLM can be progressively shown before providing its final answer."
)
]
print("seven")
yield response
print("eight")
#response = ChatMessage(
# content="Based on my thoughts and analysis above, my response is: This dummy repro shows how thoughts of a thinking LLM can be progressively shown before providing its final answer."
# )
#yield response
demo1 = gr.ChatInterface(
simulate_thinking_chat,
title="Thinking LLM Chat Interface 🤔",
type="messages",
)
with gr.Blocks() as demo:
chat = gr.Chatbot()
with gr.Row():
text = gr.Textbox(scale=9)
btn = gr.Button(scale=1)
btn.click(simulate_thinking_chat, [text, chat], [chat]).then(lambda :gr.Textbox(value=""), [], text)
text.submit(simulate_thinking_chat, [text, chat], [chat]).then(lambda :gr.Textbox(value=""), [], text)
if __name__ == "__main__":
demo.launch() |