surkovvv commited on
Commit
e4978be
·
1 Parent(s): 6757728

new message format

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -25,8 +25,8 @@ def predict(message, history):
25
  # messages = "".join(["".join(["<|start_header_id|>user<|end_header_id|>\n"+item[0],
26
  # "<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n"+item[1]])
27
  # for item in history_transformer_format])
28
- messages = "".join(["".join(["\n<human>:"+item[0], "\n<bot>:"+item[1]])
29
- for item in history_transformer_format])
30
 
31
  # model_inputs = tokenizer([messages], return_tensors="pt") # .to("cuda")
32
  model_inputs = tokenizer.apply_chat_template(messages, return_tensors="pt")
 
25
  # messages = "".join(["".join(["<|start_header_id|>user<|end_header_id|>\n"+item[0],
26
  # "<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n"+item[1]])
27
  # for item in history_transformer_format])
28
+ messages = [{"role": item[0], "user": item[1]} for item in history_transformer_format]
29
+ print(messages)
30
 
31
  # model_inputs = tokenizer([messages], return_tensors="pt") # .to("cuda")
32
  model_inputs = tokenizer.apply_chat_template(messages, return_tensors="pt")