mouryachinta commited on
Commit
0027941
Β·
verified Β·
1 Parent(s): afddbb2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -16
app.py CHANGED
@@ -99,16 +99,17 @@
99
  # iface.launch()
100
 
101
 
 
 
102
  import gradio as gr
103
  from pinecone import Pinecone
104
  from sentence_transformers import SentenceTransformer
105
  from openai import AzureOpenAI
106
- from huggingface_hub import login as hf_login
107
  import cohere
108
  import os
109
  from dotenv import load_dotenv
110
 
111
- load_dotenv() # Load keys from .env file
112
 
113
  # === ENVIRONMENT VARIABLES ===
114
  AZURE_OPENAI_KEY = os.getenv("AZURE_OPENAI_KEY")
@@ -138,6 +139,7 @@ llm_client = AzureOpenAI(
138
  azure_endpoint=AZURE_OPENAI_ENDPOINT
139
  )
140
 
 
141
  def run_rag_query(query: str, model_choice: str) -> str:
142
  if model_choice not in EMBED_INDEXES:
143
  return f"Invalid model selected. Choose from {list(EMBED_INDEXES.keys())}"
@@ -177,7 +179,7 @@ Answer:"""
177
  )
178
  answer = response.choices[0].message.content
179
 
180
- # Include source in retrieved chunks
181
  top_matches = "\n\n".join([
182
  f"**Rank {i+1}:**\n"
183
  f"**Source:** {m.metadata.get('source', 'N/A')}\n"
@@ -187,12 +189,13 @@ Answer:"""
187
 
188
  return f"### βœ… Answer:\n{answer}\n\n---\n### πŸ“„ Top Retrieved Chunks:\n{top_matches}"
189
 
190
- # Gradio UI
 
191
  with gr.Blocks(title="QWEN vs COHERE RAG App") as iface:
192
  gr.Markdown("## πŸ“š QWEN vs COHERE RAG App")
193
  gr.Markdown("Ask a question and retrieve contextual answers from your embedded documents.\n"
194
  "[πŸ“ View PDF Files](https://drive.google.com/drive/folders/1fq-PyNptFg20cknkzNrmW6Tev-869RY9?usp=sharing)")
195
-
196
  with gr.Row():
197
  query = gr.Textbox(label="Enter your query", lines=2, scale=3)
198
  model_choice = gr.Radio(["cohere", "qwen"], label="Choose embedding model", scale=1)
@@ -201,17 +204,8 @@ with gr.Blocks(title="QWEN vs COHERE RAG App") as iface:
201
 
202
  submit_btn = gr.Button("πŸ” Run Query")
203
 
204
- # Add spinner and action
205
- with gr.Row():
206
- status = gr.Markdown("")
207
-
208
- def wrapped_run(query, model_choice):
209
- status.update("⏳ Running... please wait")
210
- result = run_rag_query(query, model_choice)
211
- status.update("βœ… Done")
212
- return result
213
-
214
- submit_btn.click(fn=wrapped_run, inputs=[query, model_choice], outputs=output)
215
 
216
  if __name__ == "__main__":
217
  iface.launch()
 
99
  # iface.launch()
100
 
101
 
102
+
103
+
104
  import gradio as gr
105
  from pinecone import Pinecone
106
  from sentence_transformers import SentenceTransformer
107
  from openai import AzureOpenAI
 
108
  import cohere
109
  import os
110
  from dotenv import load_dotenv
111
 
112
+ load_dotenv()
113
 
114
  # === ENVIRONMENT VARIABLES ===
115
  AZURE_OPENAI_KEY = os.getenv("AZURE_OPENAI_KEY")
 
139
  azure_endpoint=AZURE_OPENAI_ENDPOINT
140
  )
141
 
142
+ # === Core Function ===
143
  def run_rag_query(query: str, model_choice: str) -> str:
144
  if model_choice not in EMBED_INDEXES:
145
  return f"Invalid model selected. Choose from {list(EMBED_INDEXES.keys())}"
 
179
  )
180
  answer = response.choices[0].message.content
181
 
182
+ # Add source to results
183
  top_matches = "\n\n".join([
184
  f"**Rank {i+1}:**\n"
185
  f"**Source:** {m.metadata.get('source', 'N/A')}\n"
 
189
 
190
  return f"### βœ… Answer:\n{answer}\n\n---\n### πŸ“„ Top Retrieved Chunks:\n{top_matches}"
191
 
192
+
193
+ # === Gradio UI ===
194
  with gr.Blocks(title="QWEN vs COHERE RAG App") as iface:
195
  gr.Markdown("## πŸ“š QWEN vs COHERE RAG App")
196
  gr.Markdown("Ask a question and retrieve contextual answers from your embedded documents.\n"
197
  "[πŸ“ View PDF Files](https://drive.google.com/drive/folders/1fq-PyNptFg20cknkzNrmW6Tev-869RY9?usp=sharing)")
198
+
199
  with gr.Row():
200
  query = gr.Textbox(label="Enter your query", lines=2, scale=3)
201
  model_choice = gr.Radio(["cohere", "qwen"], label="Choose embedding model", scale=1)
 
204
 
205
  submit_btn = gr.Button("πŸ” Run Query")
206
 
207
+ # Spinner: Gradio shows spinner automatically while fn= is running
208
+ submit_btn.click(fn=run_rag_query, inputs=[query, model_choice], outputs=output)
 
 
 
 
 
 
 
 
 
209
 
210
  if __name__ == "__main__":
211
  iface.launch()