Spaces:
Sleeping
Sleeping
| import os | |
| import json | |
| from tools import TOOLS | |
| from metadata import load_metadata | |
| from mistral_hf_wrapper import MistralInference | |
| API_URL = os.getenv("HF_MISTRAL_ENDPOINT") | |
| API_TOKEN = os.getenv("HF_TOKEN") | |
| mistral = MistralInference(api_url=API_URL, api_token=API_TOKEN) | |
| def load_tasks(): | |
| tasks = [] | |
| with open("metadata.jsonl", "r", encoding="utf-8") as f: | |
| for line in f: | |
| item = json.loads(line) | |
| task_id = item.get("task_id") | |
| question = item.get("Question") or item.get("question") | |
| if task_id and question: | |
| tasks.append({ | |
| "task_id": task_id, | |
| "question": question | |
| }) | |
| return tasks | |
| def solve_task(task, tools=TOOLS): | |
| system_prompt = ( | |
| "You are a helpful agent. Use reasoning, tools if needed, and return the answer only." | |
| ) | |
| user_prompt = task["question"] | |
| prompt = f"<s>[INST] <<SYS>>\n{system_prompt}\n<</SYS>>\n{user_prompt} [/INST]" | |
| response = mistral.run(prompt) | |
| return { | |
| "task_id": task["task_id"], | |
| "question": task["question"], | |
| "submitted_answer": response.strip() if response else "ERROR: Empty model response" | |
| } |