context-AI / app.py
rahul7star's picture
Create app.py
01eb127 verified
raw
history blame
1.06 kB
import gradio as gr
from transformers import pipeline
# Initialize the question-answering pipeline
qa_pipeline = pipeline("question-answering")
def answer_question(context, question):
"""
Function to run the QA pipeline and get the answer.
:param context: The context in which the question is asked.
:param question: The question that needs to be answered.
:return: Answer from the QA model.
"""
result = qa_pipeline({'context': context, 'question': question})
return result['answer']
# Create the Gradio interface
interface = gr.Interface(
fn=answer_question,
inputs=[
gr.Textbox(label="Context", placeholder="Enter context here...", lines=4), # Left-side input
gr.Textbox(label="Question", placeholder="Ask a question here...", lines=1) # Right-side input
],
outputs="text", # Output as text
live=True, # Optional: Display results as the user types
layout="horizontal" # Align inputs side by side
)
# Launch the interface
if __name__ == "__main__":
interface.launch()