File size: 698 Bytes
48966e1 b88f342 48966e1 e2224e6 48966e1 b88f342 48966e1 e2224e6 48966e1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 |
def generate_mcqs(text, num_questions=1):
input_text = f"generate question: {text}"
print(input_text)
input_ids = tokenizer.encode(input_text, return_tensors="pt", max_length=512, truncation=True)
questions = []
for _ in range(num_questions):
output = model.generate(
input_ids=input_ids,
max_length=128,
num_return_sequences=1,
temperature=0.7
)
decoded = tokenizer.decode(output[0], skip_special_tokens=True)
print(decoded)
questions.append(decoded.strip())
# Join all questions into one string for Gradio output
return "\n".join(f"{i+1}. {q}" for i, q in enumerate(questions))
|