YuvrajSingh9886 commited on
Commit
e06d1bb
·
verified ·
1 Parent(s): f10d514

Made changes to the prompt

Browse files
Files changed (1) hide show
  1. app.py +7 -0
app.py CHANGED
@@ -28,6 +28,13 @@ model.eval()
28
 
29
 
30
  def answer_question(prompt, temperature, top_k, max_length):
 
 
 
 
 
 
 
31
  with torch.no_grad():
32
  generated_text = topk_sampling(model, prompt, max_length=max_length, top_k=top_k, temperature=temperature, device=ModelArgs.device)
33
  # print("Gnerated: ", generated_text)
 
28
 
29
 
30
  def answer_question(prompt, temperature, top_k, max_length):
31
+
32
+ prompt = f'''
33
+ ### Instruction: You are a helpful AI Assistant for question answering.
34
+ ### Input : {prompt}
35
+
36
+ ### Response:
37
+ '''
38
  with torch.no_grad():
39
  generated_text = topk_sampling(model, prompt, max_length=max_length, top_k=top_k, temperature=temperature, device=ModelArgs.device)
40
  # print("Gnerated: ", generated_text)