Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -50,14 +50,17 @@ class HfModelWrapper:
|
|
50 |
streamer = TextIteratorStreamer(self.tokenizer, skip_prompt=True)#skip_special_tokens=True)
|
51 |
|
52 |
gen_kwargs = {
|
53 |
-
"
|
54 |
"streamer": streamer,
|
55 |
-
"
|
56 |
-
"max_length": 32768,
|
57 |
-
"temperature": 0.1,
|
58 |
-
"top_p": 0.9,
|
59 |
-
#"repetition_penalty": 1.1,
|
60 |
"min_length": 10,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
}
|
62 |
|
63 |
thread = Thread(target=self.model.generate, kwargs=gen_kwargs)
|
|
|
50 |
streamer = TextIteratorStreamer(self.tokenizer, skip_prompt=True)#skip_special_tokens=True)
|
51 |
|
52 |
gen_kwargs = {
|
53 |
+
"input_ids": input_ids,
|
54 |
"streamer": streamer,
|
55 |
+
"max_new_tokens": 2048,
|
|
|
|
|
|
|
|
|
56 |
"min_length": 10,
|
57 |
+
"temperature": 0.7,
|
58 |
+
"top_p": 0.9,
|
59 |
+
"top_k": 50,
|
60 |
+
"repetition_penalty": 1.1,
|
61 |
+
"do_sample": True,
|
62 |
+
"eos_token_id": tokenizer.eos_token_id,
|
63 |
+
"pad_token_id": tokenizer.pad_token_id,
|
64 |
}
|
65 |
|
66 |
thread = Thread(target=self.model.generate, kwargs=gen_kwargs)
|