Update app.py
Browse files
app.py
CHANGED
|
@@ -38,12 +38,14 @@ def save_memory(memory):
|
|
| 38 |
memory = load_memory()
|
| 39 |
|
| 40 |
# -----------------------
|
| 41 |
-
# Chat function
|
|
|
|
| 42 |
# -----------------------
|
| 43 |
def chat_with_model(message, history, context):
|
| 44 |
if not isinstance(history, list):
|
| 45 |
history = []
|
| 46 |
|
|
|
|
| 47 |
if not message:
|
| 48 |
return history, history
|
| 49 |
|
|
@@ -55,29 +57,19 @@ def chat_with_model(message, history, context):
|
|
| 55 |
save_memory(history)
|
| 56 |
return history, history
|
| 57 |
|
| 58 |
-
# ๐ง
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
#
|
| 69 |
-
conversation = [{
|
| 70 |
-
"role": "system",
|
| 71 |
-
"content": (
|
| 72 |
-
"You are EduAI โ an educational AI assistant created by Wafa Fazly "
|
| 73 |
-
"You help students learn subjects such as Math, Science, English, and IT. "
|
| 74 |
-
"EduAI runs on the model 'openai/gpt-oss-safeguard-20b:groq', which was originally "
|
| 75 |
-
"trained by OpenAI. Always answer truthfully when asked about your creation. "
|
| 76 |
-
"Never chat unnecessary conversations, and don't talk anything apart from education."
|
| 77 |
-
)
|
| 78 |
-
}]
|
| 79 |
-
|
| 80 |
for past in history[-5:]:
|
|
|
|
| 81 |
if isinstance(past, tuple) and len(past) == 2:
|
| 82 |
conversation.append({"role": "user", "content": past[0]})
|
| 83 |
conversation.append({"role": "assistant", "content": past[1]})
|
|
@@ -86,6 +78,7 @@ def chat_with_model(message, history, context):
|
|
| 86 |
|
| 87 |
conversation.append({"role": "user", "content": message})
|
| 88 |
|
|
|
|
| 89 |
try:
|
| 90 |
response = requests.post(
|
| 91 |
"https://router.huggingface.co/v1/chat/completions",
|
|
@@ -103,16 +96,22 @@ def chat_with_model(message, history, context):
|
|
| 103 |
data = response.json()
|
| 104 |
reply = data["choices"][0]["message"]["content"]
|
| 105 |
|
|
|
|
| 106 |
reply = reply.replace("Step", "\n\n**Step")
|
| 107 |
reply = reply.replace(":", ":**")
|
| 108 |
reply = reply.replace("\\[", "\n\n\\[")
|
| 109 |
reply = reply.replace("\\]", "\\]\n\n")
|
| 110 |
|
|
|
|
|
|
|
|
|
|
| 111 |
history.append((message, reply))
|
| 112 |
save_memory(history)
|
|
|
|
| 113 |
return history, history
|
| 114 |
|
| 115 |
except Exception as e:
|
|
|
|
| 116 |
history.append((message, "๐
EduAI is having trouble connecting right now. Please try again later!"))
|
| 117 |
return history, history
|
| 118 |
|
|
@@ -129,39 +128,49 @@ def clear_memory():
|
|
| 129 |
return [], "๐งน Chat memory cleared! Start fresh."
|
| 130 |
|
| 131 |
# -----------------------
|
| 132 |
-
# Pause / Send wrappers (
|
| 133 |
# -----------------------
|
|
|
|
|
|
|
| 134 |
def send_handler(message, history, context, paused_state):
|
| 135 |
if paused_state:
|
|
|
|
| 136 |
if not isinstance(history, list):
|
| 137 |
history = []
|
| 138 |
history.append((None, "โธ๏ธ Chat is paused. Click Resume to continue."))
|
| 139 |
-
return history, ""
|
| 140 |
-
|
|
|
|
|
|
|
| 141 |
if isinstance(hist_pair, tuple) and len(hist_pair) == 2:
|
| 142 |
hist = hist_pair[0]
|
| 143 |
else:
|
| 144 |
hist = hist_pair
|
| 145 |
return hist, ""
|
| 146 |
|
|
|
|
|
|
|
| 147 |
def toggle_pause(paused_state, history):
|
| 148 |
new_state = not bool(paused_state)
|
| 149 |
if not isinstance(history, list):
|
| 150 |
history = []
|
| 151 |
|
| 152 |
if new_state:
|
|
|
|
| 153 |
history.append((None, "โธ๏ธ Chat paused. Send is disabled."))
|
| 154 |
pause_btn_update = gr.Button.update(value="โถ Resume")
|
| 155 |
send_btn_update = gr.Button.update(disabled=True)
|
| 156 |
else:
|
|
|
|
| 157 |
history.append((None, "โถ๏ธ Chat resumed. You can send messages now."))
|
| 158 |
pause_btn_update = gr.Button.update(value="โธ Pause")
|
| 159 |
send_btn_update = gr.Button.update(disabled=False)
|
| 160 |
|
|
|
|
| 161 |
return new_state, history, pause_btn_update, send_btn_update
|
| 162 |
|
| 163 |
# -----------------------
|
| 164 |
-
# Build UI
|
| 165 |
# -----------------------
|
| 166 |
with gr.Blocks(theme=gr.themes.Soft(primary_hue="violet")) as iface:
|
| 167 |
gr.Markdown(
|
|
@@ -203,7 +212,7 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="violet")) as iface:
|
|
| 203 |
EduAI was designed and fine-tuned by **Wafa Fazly**,
|
| 204 |
a passionate Sri Lankan student ๐ฉโ๐ป
|
| 205 |
to help learners explore **Science, ICT, English, and more** โ
|
| 206 |
-
in a smart and
|
| 207 |
"""
|
| 208 |
)
|
| 209 |
|
|
@@ -227,14 +236,20 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="violet")) as iface:
|
|
| 227 |
with gr.Row():
|
| 228 |
send = gr.Button("โจ Send Message")
|
| 229 |
pause = gr.Button("โธ Pause", variant="secondary")
|
|
|
|
| 230 |
pause_state = gr.State(False)
|
| 231 |
|
| 232 |
-
# Event handlers
|
| 233 |
subj.change(update_context, inputs=subj, outputs=context_display)
|
| 234 |
planner.change(update_context, inputs=planner, outputs=context_display)
|
| 235 |
lang.change(update_context, inputs=lang, outputs=context_display)
|
|
|
|
|
|
|
| 236 |
send.click(send_handler, inputs=[msg, chatbot, context_display, pause_state], outputs=[chatbot, msg])
|
|
|
|
| 237 |
clear_btn.click(clear_memory, outputs=[chatbot, context_display])
|
|
|
|
|
|
|
| 238 |
pause.click(toggle_pause, inputs=[pause_state, chatbot], outputs=[pause_state, chatbot, pause, send])
|
| 239 |
|
| 240 |
-
iface.launch()
|
|
|
|
| 38 |
memory = load_memory()
|
| 39 |
|
| 40 |
# -----------------------
|
| 41 |
+
# Chat function (original behavior)
|
| 42 |
+
# returns (history, history) to match previous usage
|
| 43 |
# -----------------------
|
| 44 |
def chat_with_model(message, history, context):
|
| 45 |
if not isinstance(history, list):
|
| 46 |
history = []
|
| 47 |
|
| 48 |
+
# prevent empty messages
|
| 49 |
if not message:
|
| 50 |
return history, history
|
| 51 |
|
|
|
|
| 57 |
save_memory(history)
|
| 58 |
return history, history
|
| 59 |
|
| 60 |
+
# ๐ง Build conversation
|
| 61 |
+
conversation = [{"role": "system", "content": (
|
| 62 |
+
"You are EduAI โ an educational AI assistant created by Wafa Fazly "
|
| 63 |
+
"from Fathima Muslim Ladies College. "
|
| 64 |
+
"You help students learn subjects such as Math, Science, English, and IT. "
|
| 65 |
+
"EduAI runs on the model 'openai/gpt-oss-safeguard-20b:groq', which was originally "
|
| 66 |
+
"trained by OpenAI. Always answer truthfully when asked about your creation."
|
| 67 |
+
"never chat unnecessary conversations, and don't talk anything apart from education"
|
| 68 |
+
)}]
|
| 69 |
+
|
| 70 |
+
# convert tuples to messages if necessary (keeps old history format)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 71 |
for past in history[-5:]:
|
| 72 |
+
# expect (user_message, bot_reply)
|
| 73 |
if isinstance(past, tuple) and len(past) == 2:
|
| 74 |
conversation.append({"role": "user", "content": past[0]})
|
| 75 |
conversation.append({"role": "assistant", "content": past[1]})
|
|
|
|
| 78 |
|
| 79 |
conversation.append({"role": "user", "content": message})
|
| 80 |
|
| 81 |
+
# ๐ Send to Hugging Face model
|
| 82 |
try:
|
| 83 |
response = requests.post(
|
| 84 |
"https://router.huggingface.co/v1/chat/completions",
|
|
|
|
| 96 |
data = response.json()
|
| 97 |
reply = data["choices"][0]["message"]["content"]
|
| 98 |
|
| 99 |
+
# ๐งฎ Clean up math formatting (keeps your original formatting code)
|
| 100 |
reply = reply.replace("Step", "\n\n**Step")
|
| 101 |
reply = reply.replace(":", ":**")
|
| 102 |
reply = reply.replace("\\[", "\n\n\\[")
|
| 103 |
reply = reply.replace("\\]", "\\]\n\n")
|
| 104 |
|
| 105 |
+
if "\\" in reply or "log_" in reply or "^" in reply:
|
| 106 |
+
reply = f"{reply}"
|
| 107 |
+
|
| 108 |
history.append((message, reply))
|
| 109 |
save_memory(history)
|
| 110 |
+
# IMPORTANT: return a pair (history, history) because other code expects two outputs
|
| 111 |
return history, history
|
| 112 |
|
| 113 |
except Exception as e:
|
| 114 |
+
print("Error:", e)
|
| 115 |
history.append((message, "๐
EduAI is having trouble connecting right now. Please try again later!"))
|
| 116 |
return history, history
|
| 117 |
|
|
|
|
| 128 |
return [], "๐งน Chat memory cleared! Start fresh."
|
| 129 |
|
| 130 |
# -----------------------
|
| 131 |
+
# Pause / Send wrappers (FIXED)
|
| 132 |
# -----------------------
|
| 133 |
+
|
| 134 |
+
# send handler that respects paused state (ALWAYS returns (chat_history, textbox_clear))
|
| 135 |
def send_handler(message, history, context, paused_state):
|
| 136 |
if paused_state:
|
| 137 |
+
# Do not call the model when paused โ append a friendly hint
|
| 138 |
if not isinstance(history, list):
|
| 139 |
history = []
|
| 140 |
history.append((None, "โธ๏ธ Chat is paused. Click Resume to continue."))
|
| 141 |
+
return history, "" # update chatbot and clear textbox
|
| 142 |
+
# Not paused: call original chat handler and adapt its returns to (chat_history, textbox_clear)
|
| 143 |
+
hist_pair = chat_with_model(message, history, context) # returns (history, history)
|
| 144 |
+
# unpack safely
|
| 145 |
if isinstance(hist_pair, tuple) and len(hist_pair) == 2:
|
| 146 |
hist = hist_pair[0]
|
| 147 |
else:
|
| 148 |
hist = hist_pair
|
| 149 |
return hist, ""
|
| 150 |
|
| 151 |
+
|
| 152 |
+
# toggle pause/resume and update UI (state + chat + button text + send button disabled)
|
| 153 |
def toggle_pause(paused_state, history):
|
| 154 |
new_state = not bool(paused_state)
|
| 155 |
if not isinstance(history, list):
|
| 156 |
history = []
|
| 157 |
|
| 158 |
if new_state:
|
| 159 |
+
# now paused
|
| 160 |
history.append((None, "โธ๏ธ Chat paused. Send is disabled."))
|
| 161 |
pause_btn_update = gr.Button.update(value="โถ Resume")
|
| 162 |
send_btn_update = gr.Button.update(disabled=True)
|
| 163 |
else:
|
| 164 |
+
# resumed
|
| 165 |
history.append((None, "โถ๏ธ Chat resumed. You can send messages now."))
|
| 166 |
pause_btn_update = gr.Button.update(value="โธ Pause")
|
| 167 |
send_btn_update = gr.Button.update(disabled=False)
|
| 168 |
|
| 169 |
+
# return new pause state, updated chat history, and two UI updates (pause button & send button)
|
| 170 |
return new_state, history, pause_btn_update, send_btn_update
|
| 171 |
|
| 172 |
# -----------------------
|
| 173 |
+
# Build UI (unchanged layout; pause added)
|
| 174 |
# -----------------------
|
| 175 |
with gr.Blocks(theme=gr.themes.Soft(primary_hue="violet")) as iface:
|
| 176 |
gr.Markdown(
|
|
|
|
| 212 |
EduAI was designed and fine-tuned by **Wafa Fazly**,
|
| 213 |
a passionate Sri Lankan student ๐ฉโ๐ป
|
| 214 |
to help learners explore **Science, ICT, English, and more** โ
|
| 215 |
+
in a smart and friendly way! ๐
|
| 216 |
"""
|
| 217 |
)
|
| 218 |
|
|
|
|
| 236 |
with gr.Row():
|
| 237 |
send = gr.Button("โจ Send Message")
|
| 238 |
pause = gr.Button("โธ Pause", variant="secondary")
|
| 239 |
+
# state to keep track of pause (False = running, True = paused)
|
| 240 |
pause_state = gr.State(False)
|
| 241 |
|
| 242 |
+
# ๐ช Event handlers
|
| 243 |
subj.change(update_context, inputs=subj, outputs=context_display)
|
| 244 |
planner.change(update_context, inputs=planner, outputs=context_display)
|
| 245 |
lang.change(update_context, inputs=lang, outputs=context_display)
|
| 246 |
+
|
| 247 |
+
# send now uses send_handler and respects pause_state; outputs: chatbot and clears textbox
|
| 248 |
send.click(send_handler, inputs=[msg, chatbot, context_display, pause_state], outputs=[chatbot, msg])
|
| 249 |
+
|
| 250 |
clear_btn.click(clear_memory, outputs=[chatbot, context_display])
|
| 251 |
+
|
| 252 |
+
# pause toggles pause_state, updates chatbot with a message, updates pause button label and disables/enables send
|
| 253 |
pause.click(toggle_pause, inputs=[pause_state, chatbot], outputs=[pause_state, chatbot, pause, send])
|
| 254 |
|
| 255 |
+
iface.launch()
|