smart-lms-suite / utils /quiz_offline.py
sathwikabhavaraju2005's picture
Update utils/quiz_offline.py
b88f342 verified
def generate_mcqs(text, num_questions=1):
input_text = f"generate question: {text}"
print(input_text)
input_ids = tokenizer.encode(input_text, return_tensors="pt", max_length=512, truncation=True)
questions = []
for _ in range(num_questions):
output = model.generate(
input_ids=input_ids,
max_length=128,
num_return_sequences=1,
temperature=0.7
)
decoded = tokenizer.decode(output[0], skip_special_tokens=True)
print(decoded)
questions.append(decoded.strip())
# Join all questions into one string for Gradio output
return "\n".join(f"{i+1}. {q}" for i, q in enumerate(questions))