ntam0001 commited on
Commit
bffed60
Β·
verified Β·
1 Parent(s): d3e3b55

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +78 -27
app.py CHANGED
@@ -6,56 +6,81 @@ import json
6
  import tensorflow as tf
7
  from tensorflow.keras.models import model_from_json
8
  import os
 
 
 
 
 
9
 
10
  # Initialize model components
11
  model = None
12
  scaler = None
13
  metadata = {}
14
  feature_names = []
 
15
 
16
  def load_model():
17
- global model, scaler, metadata, feature_names
18
 
19
  try:
20
- # Load model architecture
 
 
 
 
 
 
21
  with open('model_architecture.json', 'r') as json_file:
22
  model_json = json_file.read()
23
  model = model_from_json(model_json)
24
 
25
- # Load weights
26
  model.load_weights('final_model.h5')
27
 
28
- # Load scaler
29
  with open('scaler.pkl', 'rb') as f:
30
  scaler = pickle.load(f)
31
 
32
- # Load metadata
33
  with open('metadata.json', 'r') as f:
34
  metadata = json.load(f)
35
- feature_names = metadata['feature_names']
36
 
37
- print("βœ… Model loaded successfully!")
38
- print(f"Using features: {feature_names}")
 
 
39
  except Exception as e:
40
- print(f"❌ Error loading model: {str(e)}")
 
41
 
42
  # Load model at startup
43
  load_model()
44
 
45
  def predict(*args):
46
  try:
47
- if model is None or scaler is None:
48
- raise Exception("Model not loaded. Please check the model files.")
 
 
 
49
 
50
  # Create input dictionary
51
- input_data = {feature_names[i]: float(args[i]) for i in range(len(feature_names))}
52
- input_df = pd.DataFrame([input_data])
 
 
 
 
 
 
 
53
 
54
  # Scale features
55
  scaled_input = scaler.transform(input_df)
56
 
57
  # Predict
58
- probability = float(model.predict(scaled_input)[0][0])
59
  prediction = "Eligible" if probability > 0.5 else "Not Eligible"
60
  confidence = abs(probability - 0.5) * 2
61
 
@@ -64,22 +89,48 @@ def predict(*args):
64
  "Probability": f"{probability:.4f}",
65
  "Confidence": f"{confidence:.4f}"
66
  }
 
67
  except Exception as e:
 
68
  return {"Error": str(e)}
69
 
70
  # Create Gradio interface
71
- iface = gr.Interface(
72
- fn=predict,
73
- inputs=[gr.Number(label=name) for name in feature_names],
74
- outputs=[
75
- gr.Textbox(label="Prediction"),
76
- gr.Textbox(label="Probability"),
77
- gr.Textbox(label="Confidence")
78
- ],
79
- title="πŸŽ“ Student Eligibility Predictor",
80
- description="Predict student eligibility based on academic performance metrics",
81
- examples=[[75, 80, 85] if len(feature_names) >= 3 else [75, 80]] # Example inputs
82
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
83
 
84
  if __name__ == "__main__":
85
- iface.launch()
 
6
  import tensorflow as tf
7
  from tensorflow.keras.models import model_from_json
8
  import os
9
+ import logging
10
+
11
+ # Configure logging
12
+ logging.basicConfig(level=logging.INFO)
13
+ logger = logging.getLogger(__name__)
14
 
15
  # Initialize model components
16
  model = None
17
  scaler = None
18
  metadata = {}
19
  feature_names = []
20
+ model_loaded = False
21
 
22
  def load_model():
23
+ global model, scaler, metadata, feature_names, model_loaded
24
 
25
  try:
26
+ # Verify all required files exist
27
+ required_files = ['model_architecture.json', 'final_model.h5', 'scaler.pkl', 'metadata.json']
28
+ for file in required_files:
29
+ if not os.path.exists(file):
30
+ raise FileNotFoundError(f"Missing required file: {file}")
31
+
32
+ logger.info("Loading model architecture...")
33
  with open('model_architecture.json', 'r') as json_file:
34
  model_json = json_file.read()
35
  model = model_from_json(model_json)
36
 
37
+ logger.info("Loading model weights...")
38
  model.load_weights('final_model.h5')
39
 
40
+ logger.info("Loading scaler...")
41
  with open('scaler.pkl', 'rb') as f:
42
  scaler = pickle.load(f)
43
 
44
+ logger.info("Loading metadata...")
45
  with open('metadata.json', 'r') as f:
46
  metadata = json.load(f)
47
+ feature_names = metadata.get('feature_names', [])
48
 
49
+ model_loaded = True
50
+ logger.info("βœ… Model loaded successfully!")
51
+ logger.info(f"Features: {feature_names}")
52
+
53
  except Exception as e:
54
+ logger.error(f"❌ Model loading failed: {str(e)}")
55
+ model_loaded = False
56
 
57
  # Load model at startup
58
  load_model()
59
 
60
  def predict(*args):
61
  try:
62
+ if not model_loaded:
63
+ raise RuntimeError("Model failed to load. Check server logs for details.")
64
+
65
+ if len(args) != len(feature_names):
66
+ raise ValueError(f"Expected {len(feature_names)} features, got {len(args)}")
67
 
68
  # Create input dictionary
69
+ input_data = {}
70
+ for i, val in enumerate(args):
71
+ try:
72
+ input_data[feature_names[i]] = float(val)
73
+ except ValueError:
74
+ raise ValueError(f"Invalid value for {feature_names[i]}: {val}")
75
+
76
+ # Create DataFrame
77
+ input_df = pd.DataFrame([input_data], columns=feature_names)
78
 
79
  # Scale features
80
  scaled_input = scaler.transform(input_df)
81
 
82
  # Predict
83
+ probability = float(model.predict(scaled_input, verbose=0)[0][0])
84
  prediction = "Eligible" if probability > 0.5 else "Not Eligible"
85
  confidence = abs(probability - 0.5) * 2
86
 
 
89
  "Probability": f"{probability:.4f}",
90
  "Confidence": f"{confidence:.4f}"
91
  }
92
+
93
  except Exception as e:
94
+ logger.error(f"Prediction error: {str(e)}")
95
  return {"Error": str(e)}
96
 
97
  # Create Gradio interface
98
+ with gr.Blocks(title="Student Eligibility Predictor") as demo:
99
+ gr.Markdown("# πŸŽ“ Student Eligibility Predictor")
100
+ gr.Markdown("Predict student eligibility based on academic performance metrics")
101
+
102
+ with gr.Row():
103
+ with gr.Column():
104
+ input_components = [gr.Number(label=name) for name in feature_names]
105
+ predict_btn = gr.Button("Predict", variant="primary")
106
+ with gr.Column():
107
+ prediction_output = gr.Textbox(label="Prediction")
108
+ probability_output = gr.Textbox(label="Probability")
109
+ confidence_output = gr.Textbox(label="Confidence")
110
+
111
+ # Add examples if features exist
112
+ if len(feature_names) > 0:
113
+ examples = []
114
+ if len(feature_names) >= 3:
115
+ examples.append([75, 80, 85] + [0]*(len(feature_names)-3))
116
+ elif len(feature_names) == 2:
117
+ examples.append([75, 80])
118
+ else:
119
+ examples.append([75])
120
+
121
+ gr.Examples(
122
+ examples=examples,
123
+ inputs=input_components,
124
+ outputs=[prediction_output, probability_output, confidence_output],
125
+ fn=predict,
126
+ cache_examples=False
127
+ )
128
+
129
+ predict_btn.click(
130
+ fn=predict,
131
+ inputs=input_components,
132
+ outputs=[prediction_output, probability_output, confidence_output]
133
+ )
134
 
135
  if __name__ == "__main__":
136
+ demo.launch(server_name="0.0.0.0", server_port=7860)