Spaces:
Sleeping
Sleeping
Update src/RAGSample.py
Browse files- src/RAGSample.py +6 -6
src/RAGSample.py
CHANGED
@@ -18,6 +18,10 @@ import pandas as pd
|
|
18 |
from typing import Optional, List
|
19 |
import re
|
20 |
|
|
|
|
|
|
|
|
|
21 |
# Disable ChromaDB telemetry to avoid the error
|
22 |
os.environ["ANONYMIZED_TELEMETRY"] = "False"
|
23 |
os.environ["CHROMA_SERVER_HOST"] = "localhost"
|
@@ -357,16 +361,12 @@ Answer:
|
|
357 |
""",
|
358 |
input_variables=["question", "documents"],
|
359 |
)
|
360 |
-
|
361 |
-
# OPTION 1: Use Hugging Face Pipeline (Recommended for HF Spaces)
|
362 |
-
from transformers import pipeline
|
363 |
-
from langchain.llms import HuggingFacePipeline
|
364 |
|
365 |
# Initialize a local Hugging Face model
|
366 |
hf_pipeline = pipeline(
|
367 |
"text-generation",
|
368 |
-
model="
|
369 |
-
tokenizer="
|
370 |
max_length=512,
|
371 |
temperature=0.1,
|
372 |
device=0 if torch.cuda.is_available() else -1,
|
|
|
18 |
from typing import Optional, List
|
19 |
import re
|
20 |
|
21 |
+
# OPTION 1: Use Hugging Face Pipeline (Recommended for HF Spaces)
|
22 |
+
from transformers import pipeline
|
23 |
+
from langchain.llms import HuggingFacePipeline
|
24 |
+
|
25 |
# Disable ChromaDB telemetry to avoid the error
|
26 |
os.environ["ANONYMIZED_TELEMETRY"] = "False"
|
27 |
os.environ["CHROMA_SERVER_HOST"] = "localhost"
|
|
|
361 |
""",
|
362 |
input_variables=["question", "documents"],
|
363 |
)
|
|
|
|
|
|
|
|
|
364 |
|
365 |
# Initialize a local Hugging Face model
|
366 |
hf_pipeline = pipeline(
|
367 |
"text-generation",
|
368 |
+
model="deepseek-ai/DeepSeek-R1-0528-Qwen3-8B", # Good for Q&A tasks
|
369 |
+
tokenizer="deepseek-ai/DeepSeek-R1-0528-Qwen3-8B",
|
370 |
max_length=512,
|
371 |
temperature=0.1,
|
372 |
device=0 if torch.cuda.is_available() else -1,
|