Update src/RAGSample.py
Browse files- src/RAGSample.py +11 -6
src/RAGSample.py
CHANGED
@@ -352,15 +352,20 @@ def initialize_biogpt():
|
|
352 |
try:
|
353 |
hf_pipeline = pipeline(
|
354 |
"text-generation",
|
355 |
-
model="microsoft/
|
356 |
-
|
357 |
-
|
|
|
|
|
|
|
358 |
device_map="auto",
|
|
|
359 |
return_full_text=False,
|
360 |
-
truncation=
|
361 |
do_sample=True,
|
362 |
-
pad_token_id=
|
363 |
-
|
|
|
364 |
print("BioGPT loaded successfully!")
|
365 |
return hf_pipeline
|
366 |
except Exception as e:
|
|
|
352 |
try:
|
353 |
hf_pipeline = pipeline(
|
354 |
"text-generation",
|
355 |
+
model="microsoft/BioGPT",
|
356 |
+
tokenizer="microsoft/BioGPT",
|
357 |
+
max_new_tokens=100,
|
358 |
+
# Remove or comment out max_length to avoid truncation
|
359 |
+
# max_length=1024,
|
360 |
+
temperature=0.2,
|
361 |
device_map="auto",
|
362 |
+
torch_dtype=torch.float16,
|
363 |
return_full_text=False,
|
364 |
+
truncation=False, # Explicitly disable truncation
|
365 |
do_sample=True,
|
366 |
+
pad_token_id=1,
|
367 |
+
eos_token_id=2,
|
368 |
+
)
|
369 |
print("BioGPT loaded successfully!")
|
370 |
return hf_pipeline
|
371 |
except Exception as e:
|