Spaces:
Configuration error
Configuration error
Update agent.py
Browse files
agent.py
CHANGED
@@ -195,6 +195,10 @@ def initialize_models(use_api_mode=False):
|
|
195 |
for token in response.text:
|
196 |
yield CompletionResponse(text=token, delta=token)
|
197 |
|
|
|
|
|
|
|
|
|
198 |
proj_llm = QwenVL7BCustomLLM()
|
199 |
|
200 |
# Code LLM
|
@@ -205,11 +209,6 @@ def initialize_models(use_api_mode=False):
|
|
205 |
model_kwargs={"torch_dtype": "auto"},
|
206 |
generate_kwargs={"do_sample": False}
|
207 |
)
|
208 |
-
|
209 |
-
# Embedding model
|
210 |
-
embed_model = HuggingFaceEmbedding(
|
211 |
-
model_name="nomic-ai/colnomic-embed-multimodal-3b",
|
212 |
-
device="cuda:1")
|
213 |
|
214 |
return proj_llm, code_llm, embed_model
|
215 |
except Exception as e:
|
|
|
195 |
for token in response.text:
|
196 |
yield CompletionResponse(text=token, delta=token)
|
197 |
|
198 |
+
|
199 |
+
embed_model = HuggingFaceEmbedding(
|
200 |
+
model_name="nomic-ai/nomic-embed-multimodal-3b",
|
201 |
+
device="cuda:1")
|
202 |
proj_llm = QwenVL7BCustomLLM()
|
203 |
|
204 |
# Code LLM
|
|
|
209 |
model_kwargs={"torch_dtype": "auto"},
|
210 |
generate_kwargs={"do_sample": False}
|
211 |
)
|
|
|
|
|
|
|
|
|
|
|
212 |
|
213 |
return proj_llm, code_llm, embed_model
|
214 |
except Exception as e:
|