rahul7star commited on
Commit
7b87d32
·
verified ·
1 Parent(s): e682860

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -23
app.py CHANGED
@@ -1,33 +1,15 @@
1
  import gradio as gr
2
  from transformers import pipeline
3
 
4
- # Specify the model explicitly
5
- model_name = "rahul7star/Rahul-FineTunedLLM-v03"
6
- qa_pipeline = pipeline("question-answering", model=model_name)
7
 
8
  def answer_question(context, question):
9
- """
10
- Function to run the QA pipeline and get the answer.
11
-
12
- :param context: The context in which the question is asked.
13
- :param question: The question that needs to be answered.
14
- :return: Answer from the QA model.
15
- """
16
  result = qa_pipeline({'context': context, 'question': question})
17
  return result['answer']
18
 
19
- # Create the Gradio interface
20
- interface = gr.Interface(
21
- fn=answer_question,
22
- inputs=[
23
- gr.Textbox(label="Context", placeholder="Enter context here...", lines=4), # Left-side input
24
- gr.Textbox(label="Question", placeholder="Ask a question here...", lines=1) # Right-side input
25
- ],
26
- outputs="text", # Output as text
27
- live=True, # Optional: Display results as the user types
28
- layout="horizontal" # Align inputs side by side
29
- )
30
 
31
  # Launch the interface
32
- if __name__ == "__main__":
33
- interface.launch()
 
1
  import gradio as gr
2
  from transformers import pipeline
3
 
4
+ # Initialize the custom pipeline
5
+ qa_pipeline = pipeline("question-answering")
 
6
 
7
  def answer_question(context, question):
 
 
 
 
 
 
 
8
  result = qa_pipeline({'context': context, 'question': question})
9
  return result['answer']
10
 
11
+ # Define the Gradio interface
12
+ interface = gr.Interface(fn=answer_question, inputs=["text", "text"], outputs="text")
 
 
 
 
 
 
 
 
 
13
 
14
  # Launch the interface
15
+ interface.launch()