Update src/RAGSample.py
Browse files- src/RAGSample.py +16 -1
src/RAGSample.py
CHANGED
@@ -392,7 +392,22 @@ Answer:
|
|
392 |
)
|
393 |
|
394 |
# Initialize a local Hugging Face model
|
395 |
-
hf_pipeline =
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
396 |
|
397 |
# Wrap it in LangChain
|
398 |
llm = HuggingFacePipeline(pipeline=hf_pipeline)
|
|
|
392 |
)
|
393 |
|
394 |
# Initialize a local Hugging Face model
|
395 |
+
hf_pipeline = pipeline(
|
396 |
+
"text-generation",
|
397 |
+
model="microsoft/BioGPT",
|
398 |
+
tokenizer="microsoft/BioGPT",
|
399 |
+
max_new_tokens=150,
|
400 |
+
temperature=0.3,
|
401 |
+
device_map="auto",
|
402 |
+
torch_dtype=torch.float16,
|
403 |
+
return_full_text=False,
|
404 |
+
truncation=True,
|
405 |
+
do_sample=True,
|
406 |
+
top_p=0.9,
|
407 |
+
repetition_penalty=1.1,
|
408 |
+
pad_token_id=1,
|
409 |
+
eos_token_id=2,
|
410 |
+
)
|
411 |
|
412 |
# Wrap it in LangChain
|
413 |
llm = HuggingFacePipeline(pipeline=hf_pipeline)
|