Spaces:
Sleeping
Sleeping
File size: 1,222 Bytes
1c4cf5d 2f13f99 1c4cf5d 991437e f34ea4b a1b5009 ab4a23f 1c4cf5d a1b5009 2f13f99 1c4cf5d 4ed3db0 f0f3fcb 4ed3db0 02b62d9 4ed3db0 f0f3fcb 4ed3db0 a1b5009 1c4cf5d 2f13f99 1c4cf5d a1b5009 f34ea4b 1c4cf5d f0f3fcb 2f13f99 1c4cf5d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
import os
import json
from tools import TOOLS
from metadata import load_metadata
from mistral_hf_wrapper import MistralInference
API_URL = os.getenv("HF_MISTRAL_ENDPOINT")
API_TOKEN = os.getenv("HF_TOKEN")
mistral = MistralInference(api_url=API_URL, api_token=API_TOKEN)
def load_tasks():
tasks = []
with open("metadata.jsonl", "r", encoding="utf-8") as f:
for line in f:
item = json.loads(line)
task_id = item.get("task_id")
question = item.get("Question") or item.get("question")
if task_id and question:
tasks.append({
"task_id": task_id,
"question": question
})
return tasks
def solve_task(task, tools=TOOLS):
system_prompt = (
"You are a helpful agent. Use reasoning, tools if needed, and return the answer only."
)
user_prompt = task["question"]
prompt = f"<s>[INST] <<SYS>>\n{system_prompt}\n<</SYS>>\n{user_prompt} [/INST]"
response = mistral.run(prompt)
return {
"task_id": task["task_id"],
"question": task["question"],
"submitted_answer": response.strip() if response else "ERROR: Empty model response"
} |