Spaces:
Sleeping
Sleeping
Update src/RAGSample.py
Browse files- src/RAGSample.py +2 -7
src/RAGSample.py
CHANGED
@@ -351,19 +351,14 @@ def initialize_biogpt():
|
|
351 |
try:
|
352 |
hf_pipeline = pipeline(
|
353 |
"text-generation",
|
354 |
-
model="microsoft/
|
355 |
-
tokenizer="microsoft/BioGPT",
|
356 |
max_new_tokens=150,
|
357 |
temperature=0.3,
|
358 |
device_map="auto",
|
359 |
-
torch_dtype=torch.float16,
|
360 |
return_full_text=False,
|
361 |
truncation=True,
|
362 |
do_sample=True,
|
363 |
-
|
364 |
-
repetition_penalty=1.1,
|
365 |
-
pad_token_id=1,
|
366 |
-
eos_token_id=2,
|
367 |
)
|
368 |
print("BioGPT loaded successfully!")
|
369 |
return hf_pipeline
|
|
|
351 |
try:
|
352 |
hf_pipeline = pipeline(
|
353 |
"text-generation",
|
354 |
+
model="microsoft/DialoGPT-medium", # No extra dependencies needed
|
|
|
355 |
max_new_tokens=150,
|
356 |
temperature=0.3,
|
357 |
device_map="auto",
|
|
|
358 |
return_full_text=False,
|
359 |
truncation=True,
|
360 |
do_sample=True,
|
361 |
+
pad_token_id=50256, # DialoGPT pad token
|
|
|
|
|
|
|
362 |
)
|
363 |
print("BioGPT loaded successfully!")
|
364 |
return hf_pipeline
|