null and void commited on
Commit
783c2f7
·
verified ·
1 Parent(s): 1b244db

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -18
app.py CHANGED
@@ -82,16 +82,13 @@ class ConversationManager:
82
  manager = ConversationManager()
83
 
84
  def get_model(dropdown, custom):
85
- return custom if custom.strip() else dropdown
 
86
 
87
  def chat(model1, model2, user_input, history, inserted_response=""):
88
- try:
89
- # Attempt to load models
90
- manager.load_model(model1)
91
- manager.load_model(model2)
92
- except Exception as e:
93
- return f"Error loading models: {str(e)}", ""
94
-
95
  if not manager.conversation:
96
  manager.initial_prompt = user_input
97
  manager.clear_conversation()
@@ -131,6 +128,7 @@ def chat(model1, model2, user_input, history, inserted_response=""):
131
  current_model_index = (current_model_index + 1) % 2
132
 
133
  return history, "Conversation completed."
 
134
 
135
  models = [model1, model2]
136
  current_model_index = 0 if manager.current_model in ["User", "Model 2"] else 1
@@ -224,10 +222,10 @@ with gr.Blocks() as demo:
224
 
225
  with gr.Row():
226
  with gr.Column(scale=1):
227
- model1_dropdown = gr.Dropdown(open_source_models, label="Model 1")
228
  model1_custom = gr.Textbox(label="Custom Model 1")
229
  with gr.Column(scale=1):
230
- model2_dropdown = gr.Dropdown(open_source_models, label="Model 2")
231
  model2_custom = gr.Textbox(label="Custom Model 2")
232
 
233
  user_input = gr.Textbox(label="Initial prompt", lines=2)
@@ -250,6 +248,7 @@ with gr.Blocks() as demo:
250
 
251
  user_satisfaction_input = gr.Textbox(label="Are you satisfied with the result? (Yes/No)", visible=False)
252
 
 
253
  gr.Markdown("""
254
  ## Button Descriptions
255
  - **Pause**: Temporarily stops the conversation. The current model will finish its response.
@@ -269,8 +268,8 @@ with gr.Blocks() as demo:
269
  chat_output = start_btn.click(
270
  chat,
271
  inputs=[
272
- gr.Dropdown(choices=lambda: [get_model(model1_dropdown.value, model1_custom.value)], value=lambda: get_model(model1_dropdown.value, model1_custom.value)),
273
- gr.Dropdown(choices=lambda: [get_model(model2_dropdown.value, model2_custom.value)], value=lambda: get_model(model2_dropdown.value, model2_custom.value)),
274
  user_input,
275
  chat_history
276
  ],
@@ -290,8 +289,8 @@ with gr.Blocks() as demo:
290
  ).then(
291
  chat,
292
  inputs=[
293
- gr.Dropdown(choices=lambda: [get_model(model1_dropdown.value, model1_custom.value)], value=lambda: get_model(model1_dropdown.value, model1_custom.value)),
294
- gr.Dropdown(choices=lambda: [get_model(model2_dropdown.value, model2_custom.value)], value=lambda: get_model(model2_dropdown.value, model2_custom.value)),
295
  user_input,
296
  chat_history
297
  ],
@@ -302,8 +301,8 @@ with gr.Blocks() as demo:
302
  resume_btn.click(
303
  chat,
304
  inputs=[
305
- gr.Dropdown(choices=lambda: [get_model(model1_dropdown.value, model1_custom.value)], value=lambda: get_model(model1_dropdown.value, model1_custom.value)),
306
- gr.Dropdown(choices=lambda: [get_model(model2_dropdown.value, model2_custom.value)], value=lambda: get_model(model2_dropdown.value, model2_custom.value)),
307
  user_input,
308
  chat_history,
309
  inserted_response
@@ -315,8 +314,8 @@ with gr.Blocks() as demo:
315
  restart_btn.click(
316
  restart_conversation,
317
  inputs=[
318
- gr.Dropdown(choices=lambda: [get_model(model1_dropdown.value, model1_custom.value)], value=lambda: get_model(model1_dropdown.value, model1_custom.value)),
319
- gr.Dropdown(choices=lambda: [get_model(model2_dropdown.value, model2_custom.value)], value=lambda: get_model(model2_dropdown.value, model2_custom.value)),
320
  user_input
321
  ],
322
  outputs=[chat_history, current_response]
@@ -326,3 +325,4 @@ with gr.Blocks() as demo:
326
 
327
  if __name__ == "__main__":
328
  demo.launch()
 
 
82
  manager = ConversationManager()
83
 
84
  def get_model(dropdown, custom):
85
+ model = custom if custom.strip() else dropdown
86
+ return (model, model) # Return a tuple (label, value)
87
 
88
  def chat(model1, model2, user_input, history, inserted_response=""):
89
+ model1 = get_model(model1, model1_custom.value)[0]
90
+ model2 = get_model(model2, model2_custom.value)[0]
91
+
 
 
 
 
92
  if not manager.conversation:
93
  manager.initial_prompt = user_input
94
  manager.clear_conversation()
 
128
  current_model_index = (current_model_index + 1) % 2
129
 
130
  return history, "Conversation completed."
131
+
132
 
133
  models = [model1, model2]
134
  current_model_index = 0 if manager.current_model in ["User", "Model 2"] else 1
 
222
 
223
  with gr.Row():
224
  with gr.Column(scale=1):
225
+ model1_dropdown = gr.Dropdown(choices=open_source_models, label="Model 1")
226
  model1_custom = gr.Textbox(label="Custom Model 1")
227
  with gr.Column(scale=1):
228
+ model2_dropdown = gr.Dropdown(choices=open_source_models, label="Model 2")
229
  model2_custom = gr.Textbox(label="Custom Model 2")
230
 
231
  user_input = gr.Textbox(label="Initial prompt", lines=2)
 
248
 
249
  user_satisfaction_input = gr.Textbox(label="Are you satisfied with the result? (Yes/No)", visible=False)
250
 
251
+
252
  gr.Markdown("""
253
  ## Button Descriptions
254
  - **Pause**: Temporarily stops the conversation. The current model will finish its response.
 
268
  chat_output = start_btn.click(
269
  chat,
270
  inputs=[
271
+ model1_dropdown,
272
+ model2_dropdown,
273
  user_input,
274
  chat_history
275
  ],
 
289
  ).then(
290
  chat,
291
  inputs=[
292
+ model1_dropdown,
293
+ model2_dropdown,
294
  user_input,
295
  chat_history
296
  ],
 
301
  resume_btn.click(
302
  chat,
303
  inputs=[
304
+ model1_dropdown,
305
+ model2_dropdown,
306
  user_input,
307
  chat_history,
308
  inserted_response
 
314
  restart_btn.click(
315
  restart_conversation,
316
  inputs=[
317
+ model1_dropdown,
318
+ model2_dropdown,
319
  user_input
320
  ],
321
  outputs=[chat_history, current_response]
 
325
 
326
  if __name__ == "__main__":
327
  demo.launch()
328
+