Update app.py
Browse files
app.py
CHANGED
@@ -104,7 +104,7 @@ class BasicModel:
|
|
104 |
# This serialization might need adjustment based on how CodeAgent sends prompts.
|
105 |
# It needs to handle both initial strings and potential chat histories (as a list of ChatMessage objects).
|
106 |
if isinstance(messages, str):
|
107 |
-
return messages
|
108 |
|
109 |
prompt = []
|
110 |
for m in messages: ## <- For each ChatMessage object
|
@@ -125,7 +125,6 @@ class BasicModel:
|
|
125 |
return "".join(prompt)
|
126 |
|
127 |
def generate(self, prompt: str | list, stop_sequences=None, **kwargs) -> str: ## <- 'prompt' is either a string or a list of ChatMessages
|
128 |
-
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
129 |
# 1. Build the HF kwargs
|
130 |
allowed = {"max_new_tokens", "temperature", "top_k", "top_p"}
|
131 |
gen_kwargs = {k: v for k, v in kwargs.items() if k in allowed}
|
|
|
104 |
# This serialization might need adjustment based on how CodeAgent sends prompts.
|
105 |
# It needs to handle both initial strings and potential chat histories (as a list of ChatMessage objects).
|
106 |
if isinstance(messages, str):
|
107 |
+
return f"<|start_header_id|>user<|end_header_id|>\n\n{messages}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n"
|
108 |
|
109 |
prompt = []
|
110 |
for m in messages: ## <- For each ChatMessage object
|
|
|
125 |
return "".join(prompt)
|
126 |
|
127 |
def generate(self, prompt: str | list, stop_sequences=None, **kwargs) -> str: ## <- 'prompt' is either a string or a list of ChatMessages
|
|
|
128 |
# 1. Build the HF kwargs
|
129 |
allowed = {"max_new_tokens", "temperature", "top_k", "top_p"}
|
130 |
gen_kwargs = {k: v for k, v in kwargs.items() if k in allowed}
|