ntam0001's picture
Update app.py
dc83180 verified
import gradio as gr
import pandas as pd
import numpy as np
import pickle
import json
import tensorflow as tf
from tensorflow.keras.models import model_from_json
import os
import logging
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Initialize model components
model = None
scaler = None
feature_names = []
model_loaded = False
def load_model():
global model, scaler, feature_names, model_loaded
try:
# Verify all required files exist
required_files = ['model_architecture.json', 'final_model.h5', 'scaler.pkl', 'metadata.json']
for file in required_files:
if not os.path.exists(file):
raise FileNotFoundError(f"Missing required file: {file}")
logger.info("Loading model architecture...")
with open('model_architecture.json', 'r') as json_file:
model_json = json_file.read()
model = model_from_json(model_json)
logger.info("Loading model weights...")
model.load_weights('final_model.h5')
logger.info("Loading scaler...")
with open('scaler.pkl', 'rb') as f:
scaler = pickle.load(f)
logger.info("Loading metadata...")
with open('metadata.json', 'r') as f:
metadata = json.load(f)
feature_names = metadata.get('feature_names', ['Score 1', 'Score 2']) # Default names
model_loaded = True
logger.info("βœ… Model loaded successfully!")
logger.info(f"Features: {feature_names}")
except Exception as e:
logger.error(f"❌ Model loading failed: {str(e)}")
feature_names = ['Score 1', 'Score 2'] # Default names if loading fails
model_loaded = False
# Load model at startup
load_model()
def predict(*args):
try:
if not model_loaded:
raise RuntimeError("Model failed to load. Check server logs for details.")
if len(args) != len(feature_names):
raise ValueError(f"Expected {len(feature_names)} features, got {len(args)}")
# Create input dictionary
input_data = {}
for i, val in enumerate(args):
try:
input_data[feature_names[i]] = float(val)
except ValueError:
raise ValueError(f"Invalid value for {feature_names[i]}: {val}")
# Create DataFrame
input_df = pd.DataFrame([input_data], columns=feature_names)
# Scale features
scaled_input = scaler.transform(input_df)
# Predict
probability = float(model.predict(scaled_input, verbose=0)[0][0])
prediction = "Eligible" if probability > 0.5 else "Not Eligible"
confidence = abs(probability - 0.5) * 2
return prediction, f"{probability:.4f}", f"{confidence:.4f}"
except Exception as e:
logger.error(f"Prediction error: {str(e)}")
return f"Error: {str(e)}", "N/A", "N/A"
# Create Gradio interface
with gr.Blocks(title="Student Eligibility Predictor") as demo:
gr.Markdown("# πŸŽ“ Student Eligibility Predictor")
gr.Markdown("Predict student eligibility based on academic performance metrics")
with gr.Row():
with gr.Column():
# Create input components based on actual features
inputs = []
for feature in feature_names:
inputs.append(gr.Number(label=feature, value=75))
predict_btn = gr.Button("Predict", variant="primary")
with gr.Column():
prediction_output = gr.Textbox(label="Prediction")
probability_output = gr.Textbox(label="Probability")
confidence_output = gr.Textbox(label="Confidence")
# Setup examples
examples = []
if len(feature_names) >= 2:
examples = [[75, 80]] # Basic example with two features
else:
examples = [[75]] # Fallback example
gr.Examples(
examples=examples,
inputs=inputs,
outputs=[prediction_output, probability_output, confidence_output],
fn=predict,
cache_examples=False
)
predict_btn.click(
fn=predict,
inputs=inputs,
outputs=[prediction_output, probability_output, confidence_output]
)
if __name__ == "__main__":
demo.launch(server_name="0.0.0.0", server_port=7860)