Update app.py
Browse files
app.py
CHANGED
@@ -13,6 +13,13 @@ from huggingface_hub import InferenceClient, hf_hub_download
|
|
13 |
# --- Constants ---
|
14 |
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
@spaces.GPU
|
17 |
def load_llm(hf_token):
|
18 |
model_id = "meta-llama/Llama-3.1-8B-Instruct"
|
@@ -72,7 +79,7 @@ class BasicAgent:
|
|
72 |
|
73 |
# Initialize the agent
|
74 |
pipe = load_llm(hf_token = os.environ["HF_TOKEN"])
|
75 |
-
self.pipe =
|
76 |
self.agent = CodeAgent(llm=self.pipe, tools=[], add_base_tools=True)
|
77 |
|
78 |
# self.agent = CodeAgent(
|
|
|
13 |
# --- Constants ---
|
14 |
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
15 |
|
16 |
+
class LocalLLM:
|
17 |
+
def __init__(self, pipe):
|
18 |
+
self.pipe = pipe
|
19 |
+
|
20 |
+
def generate(self, prompt, **kwargs):
|
21 |
+
return self.pipe(prompt)[0]["generated_text"]
|
22 |
+
|
23 |
@spaces.GPU
|
24 |
def load_llm(hf_token):
|
25 |
model_id = "meta-llama/Llama-3.1-8B-Instruct"
|
|
|
79 |
|
80 |
# Initialize the agent
|
81 |
pipe = load_llm(hf_token = os.environ["HF_TOKEN"])
|
82 |
+
self.pipe = LocalLLM(pipe)
|
83 |
self.agent = CodeAgent(llm=self.pipe, tools=[], add_base_tools=True)
|
84 |
|
85 |
# self.agent = CodeAgent(
|