Spaces:
Sleeping
Sleeping
File size: 4,461 Bytes
444ee18 f978017 444ee18 bffed60 444ee18 2b0cb4e bffed60 8b11118 2b0cb4e dc83180 2b0cb4e 444ee18 bffed60 8b11118 5529e78 bffed60 a915b02 5529e78 bffed60 444ee18 5529e78 bffed60 444ee18 dc83180 2b0cb4e bffed60 444ee18 bffed60 dc83180 bffed60 444ee18 2b0cb4e 444ee18 2b0cb4e 444ee18 bffed60 2b0cb4e 5529e78 bffed60 374abdb 5529e78 2b0cb4e 5529e78 2b0cb4e bffed60 444ee18 ed024cc 374abdb dc83180 bffed60 444ee18 bffed60 dc83180 2b0cb4e bffed60 dc83180 bffed60 dc83180 bffed60 dc83180 bffed60 dc83180 bffed60 f978017 374abdb bffed60 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 |
import gradio as gr
import pandas as pd
import numpy as np
import pickle
import json
import tensorflow as tf
from tensorflow.keras.models import model_from_json
import os
import logging
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Initialize model components
model = None
scaler = None
feature_names = []
model_loaded = False
def load_model():
global model, scaler, feature_names, model_loaded
try:
# Verify all required files exist
required_files = ['model_architecture.json', 'final_model.h5', 'scaler.pkl', 'metadata.json']
for file in required_files:
if not os.path.exists(file):
raise FileNotFoundError(f"Missing required file: {file}")
logger.info("Loading model architecture...")
with open('model_architecture.json', 'r') as json_file:
model_json = json_file.read()
model = model_from_json(model_json)
logger.info("Loading model weights...")
model.load_weights('final_model.h5')
logger.info("Loading scaler...")
with open('scaler.pkl', 'rb') as f:
scaler = pickle.load(f)
logger.info("Loading metadata...")
with open('metadata.json', 'r') as f:
metadata = json.load(f)
feature_names = metadata.get('feature_names', ['Score 1', 'Score 2']) # Default names
model_loaded = True
logger.info("β
Model loaded successfully!")
logger.info(f"Features: {feature_names}")
except Exception as e:
logger.error(f"β Model loading failed: {str(e)}")
feature_names = ['Score 1', 'Score 2'] # Default names if loading fails
model_loaded = False
# Load model at startup
load_model()
def predict(*args):
try:
if not model_loaded:
raise RuntimeError("Model failed to load. Check server logs for details.")
if len(args) != len(feature_names):
raise ValueError(f"Expected {len(feature_names)} features, got {len(args)}")
# Create input dictionary
input_data = {}
for i, val in enumerate(args):
try:
input_data[feature_names[i]] = float(val)
except ValueError:
raise ValueError(f"Invalid value for {feature_names[i]}: {val}")
# Create DataFrame
input_df = pd.DataFrame([input_data], columns=feature_names)
# Scale features
scaled_input = scaler.transform(input_df)
# Predict
probability = float(model.predict(scaled_input, verbose=0)[0][0])
prediction = "Eligible" if probability > 0.5 else "Not Eligible"
confidence = abs(probability - 0.5) * 2
return prediction, f"{probability:.4f}", f"{confidence:.4f}"
except Exception as e:
logger.error(f"Prediction error: {str(e)}")
return f"Error: {str(e)}", "N/A", "N/A"
# Create Gradio interface
with gr.Blocks(title="Student Eligibility Predictor") as demo:
gr.Markdown("# π Student Eligibility Predictor")
gr.Markdown("Predict student eligibility based on academic performance metrics")
with gr.Row():
with gr.Column():
# Create input components based on actual features
inputs = []
for feature in feature_names:
inputs.append(gr.Number(label=feature, value=75))
predict_btn = gr.Button("Predict", variant="primary")
with gr.Column():
prediction_output = gr.Textbox(label="Prediction")
probability_output = gr.Textbox(label="Probability")
confidence_output = gr.Textbox(label="Confidence")
# Setup examples
examples = []
if len(feature_names) >= 2:
examples = [[75, 80]] # Basic example with two features
else:
examples = [[75]] # Fallback example
gr.Examples(
examples=examples,
inputs=inputs,
outputs=[prediction_output, probability_output, confidence_output],
fn=predict,
cache_examples=False
)
predict_btn.click(
fn=predict,
inputs=inputs,
outputs=[prediction_output, probability_output, confidence_output]
)
if __name__ == "__main__":
demo.launch(server_name="0.0.0.0", server_port=7860) |