tr3n1ttty commited on
Commit
44d76ba
·
1 Parent(s): 632b373

some debug messages

Browse files
Files changed (1) hide show
  1. app.py +2 -0
app.py CHANGED
@@ -24,8 +24,10 @@ def predict(message, history):
24
 
25
  messages = "".join(["".join(["\n<human>:"+item[0], "\n<bot>:"+item[1]])
26
  for item in history_transformer_format])
 
27
 
28
  model_inputs = tokenizer([messages], return_tensors="pt") # .to("cuda")
 
29
  streamer = TextIteratorStreamer(tokenizer, timeout=10., skip_prompt=True, skip_special_tokens=True)
30
  generate_kwargs = dict(
31
  model_inputs,
 
24
 
25
  messages = "".join(["".join(["\n<human>:"+item[0], "\n<bot>:"+item[1]])
26
  for item in history_transformer_format])
27
+ print("MESSAGES: ", messages)
28
 
29
  model_inputs = tokenizer([messages], return_tensors="pt") # .to("cuda")
30
+ print("MODEL INPUT:\n", model_inputs)
31
  streamer = TextIteratorStreamer(tokenizer, timeout=10., skip_prompt=True, skip_special_tokens=True)
32
  generate_kwargs = dict(
33
  model_inputs,