File size: 1,736 Bytes
1af45d7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
import json, os
from evaluator import evaluate_model

app = FastAPI()
DB_PATH = "models_results.json"

# Add CORS Middleware
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"], 
    allow_credentials=True,
    allow_methods=["*"], 
    allow_headers=["*"], 
)

class ModelIn(BaseModel):
    model_name: str

@app.get("/results")
def get_results():
    try:
        with open(DB_PATH, "r") as f:
            return json.load(f)
    except (json.JSONDecodeError, FileNotFoundError):
        # Return an empty list if the file is empty or missing
        return []

@app.post("/evaluate")
def eval_and_store(req: ModelIn):
    model_name = req.model_name
    # 1. Check if already evaluated
    data = json.load(open(DB_PATH))
    if any(d["model"] == model_name for d in data):
        raise HTTPException(400, "Model already evaluated")
    # 2. Run evaluation
    try:
        metrics = evaluate_model(
                    model_name=model_name,       # any 🤗 model ID
                    dataset_name="sunbird/salt",     # your test split
                    split="dev",                      # or whatever split you’ve prepared
                )

    except Exception as e:
        raise HTTPException(500, f"Evaluation failed: {e}")
    # 3. Append & save
    data.append({"model": model_name, "metrics": metrics})
    with open(DB_PATH, "w") as f:
        json.dump(data, f, indent=2)
    return {"status": "ok", "metrics": metrics}

# Serve React's build folder
from fastapi.staticfiles import StaticFiles
app.mount("/", StaticFiles(directory="frontend/build", html=True), name="static")