Update app.py
Browse files
app.py
CHANGED
@@ -6,19 +6,19 @@ generator = pipeline("text-generation", model="IDEA-CCNL/Wenzhong2.0-GPT2-110M")
|
|
6 |
|
7 |
def chat_fn(user_input, history):
|
8 |
history = history or []
|
9 |
-
|
10 |
-
#
|
11 |
prompt = ""
|
12 |
for user_msg, bot_msg in history:
|
13 |
prompt += f"用戶:{user_msg}\n機器人:{bot_msg}\n"
|
14 |
prompt += f"用戶:{user_input}\n機器人:"
|
15 |
|
16 |
-
#
|
17 |
output = generator(prompt, max_new_tokens=100, pad_token_id=50256)[0]["generated_text"]
|
18 |
reply = output.split("機器人:")[-1].strip()
|
19 |
|
20 |
-
# 更新歷史
|
21 |
history.append((user_input, reply))
|
22 |
return history, history
|
23 |
|
24 |
-
gr.ChatInterface(
|
|
|
|
6 |
|
7 |
def chat_fn(user_input, history):
|
8 |
history = history or []
|
9 |
+
|
10 |
+
# 建立 prompt
|
11 |
prompt = ""
|
12 |
for user_msg, bot_msg in history:
|
13 |
prompt += f"用戶:{user_msg}\n機器人:{bot_msg}\n"
|
14 |
prompt += f"用戶:{user_input}\n機器人:"
|
15 |
|
16 |
+
# 模型生成
|
17 |
output = generator(prompt, max_new_tokens=100, pad_token_id=50256)[0]["generated_text"]
|
18 |
reply = output.split("機器人:")[-1].strip()
|
19 |
|
|
|
20 |
history.append((user_input, reply))
|
21 |
return history, history
|
22 |
|
23 |
+
# ✅ 用 gr.ChatInterface() 就會自動長得像聊天室
|
24 |
+
gr.ChatInterface(chat_fn).launch()
|