File size: 2,428 Bytes
6fdfe39
 
 
 
 
 
 
 
 
 
 
 
 
 
b4826be
 
6fdfe39
b4826be
 
6fdfe39
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
from fastapi import FastAPI, HTTPException, Header
from pydantic import BaseModel
import numpy as np
from tensorflow.keras.models import load_model
from fastapi.middleware.cors import CORSMiddleware
import torch
import torch.nn.functional as F
from transformers import AutoTokenizer, AutoModelForSequenceClassification

# Load model Keras untuk prediksi stres
model = load_model("model_stresss.h5")
labels = ['Tidak Stress', 'Sedikit Stress', 'Normal', 'Stress', 'Sangat Stress']

# Load model Hugging Face
cache_path = "/tmp/huggingface"

model_dir = "Chipan/indobert-emotion"
tokenizer = AutoTokenizer.from_pretrained(model_dir, cache_dir=cache_path)
model_bert = AutoModelForSequenceClassification.from_pretrained(model_dir, cache_dir=cache_path)
model_bert.eval()

# Mapping label untuk emosi
label_map = {0: "Bersyukur", 1: "Marah", 2: "Sedih", 3: "Senang", 4: "Stress"}

# Setup FastAPI
app = FastAPI()
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

# Data schemas
class CheckInData(BaseModel):
    mood: float
    sleep: float
    anxiety: float
    exercise: float
    support: float

class TextInput(BaseModel):
    text: str

@app.post("/predict")
def predict(data: CheckInData, authorization: str = Header(None)):
    try:
        raw = np.array([[data.mood, data.sleep, data.anxiety, data.exercise, data.support]])
        prediction = model.predict(raw)
        idx = int(np.argmax(prediction))
        return {
            "predicted_index": idx,
            "predicted_label": labels[idx],
            "raw_prediction": prediction.tolist()
        }
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"Prediction error: {str(e)}")

@app.post("/analyze")
def analyze_emotion(input: TextInput):
    try:
        inputs = tokenizer(input.text, return_tensors="pt", padding=True, truncation=True, max_length=128)
        with torch.no_grad():
            logits = model_bert(**inputs).logits
            probs = F.softmax(logits, dim=1)
            idx = int(torch.argmax(probs))
        return {
            "emotion": label_map.get(idx, "unknown"),
            "confidence": round(probs[0, idx].item(), 4)
        }
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"Emotion analysis error: {str(e)}")

@app.get("/")
def root():
    return {"status": "ok"}