ntam0001 commited on
Commit
8b11118
·
verified ·
1 Parent(s): 11fde79

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +47 -198
app.py CHANGED
@@ -4,17 +4,23 @@ import numpy as np
4
  import pickle
5
  import json
6
  import tensorflow as tf
7
- from tensorflow.keras.models import load_model
8
  import plotly.graph_objects as go
9
- import plotly.express as px
10
- from plotly.subplots import make_subplots
11
  import os
12
 
 
 
 
13
  # Load model artifacts
14
  def load_model_artifacts():
15
  try:
16
- # Load the trained model
17
- model = load_model('final_model.h5')
 
 
 
 
 
18
 
19
  # Load the scaler
20
  with open('scaler.pkl', 'rb') as f:
@@ -40,9 +46,7 @@ except Exception as e:
40
  feature_names = ['Feature_1', 'Feature_2', 'Feature_3', 'Feature_4']
41
 
42
  def predict_student_eligibility(*args):
43
- """
44
- Predict student eligibility based on input features
45
- """
46
  try:
47
  if model is None or scaler is None:
48
  return "Model not loaded", "N/A", "N/A", create_error_plot()
@@ -89,9 +93,7 @@ def create_error_plot():
89
  return fig
90
 
91
  def create_prediction_viz(probability, prediction, input_data):
92
- """
93
- Create visualization for prediction results
94
- """
95
  try:
96
  # Create subplots
97
  fig = make_subplots(
@@ -179,36 +181,8 @@ def create_prediction_viz(probability, prediction, input_data):
179
  except Exception as e:
180
  return create_error_plot()
181
 
182
- def create_model_info():
183
- """
184
- Create model information display
185
- """
186
- if metadata:
187
- info_html = f"""
188
- <div style="padding: 20px; background-color: #f0f2f6; border-radius: 10px; margin: 10px 0;">
189
- <h3>🤖 Model Information</h3>
190
- <ul>
191
- <li><strong>Model Type:</strong> {metadata.get('model_type', 'CNN')}</li>
192
- <li><strong>Test Accuracy:</strong> {metadata.get('performance_metrics', {}).get('test_accuracy', 'N/A')}</li>
193
- <li><strong>AUC Score:</strong> {metadata.get('performance_metrics', {}).get('auc_score', 'N/A')}</li>
194
- <li><strong>Creation Date:</strong> {metadata.get('creation_date', 'N/A')}</li>
195
- <li><strong>Features:</strong> {len(feature_names)} input features</li>
196
- </ul>
197
- </div>
198
- """
199
- else:
200
- info_html = """
201
- <div style="padding: 20px; background-color: #ffebee; border-radius: 10px; margin: 10px 0;">
202
- <h3>⚠️ Model Information</h3>
203
- <p>Model artifacts not loaded. Please ensure all required files are uploaded.</p>
204
- </div>
205
- """
206
- return info_html
207
-
208
  def batch_predict(file):
209
- """
210
- Batch prediction from uploaded CSV file
211
- """
212
  try:
213
  if model is None or scaler is None:
214
  return "Model not loaded. Please check if all model files are uploaded.", None
@@ -217,7 +191,7 @@ def batch_predict(file):
217
  return "Please upload a CSV file.", None
218
 
219
  # Read the uploaded file
220
- df = pd.read_csv(file.name)
221
 
222
  # Check if all required features are present
223
  missing_features = set(feature_names) - set(df.columns)
@@ -269,169 +243,44 @@ Results saved to: {output_file}
269
  return f"Error processing file: {str(e)}", None
270
 
271
  # Create Gradio interface
272
- with gr.Blocks(
273
- theme=gr.themes.Soft(),
274
- title="Student Eligibility Prediction",
275
- css="""
276
- .gradio-container {
277
- max-width: 1200px !important;
278
- }
279
- .main-header {
280
- text-align: center;
281
- padding: 20px;
282
- background: linear-gradient(45deg, #667eea 0%, #764ba2 100%);
283
- color: white;
284
- border-radius: 10px;
285
- margin-bottom: 20px;
286
- }
287
- .feature-input {
288
- margin: 5px 0;
289
- }
290
- """
291
- ) as demo:
292
-
293
- # Header
294
- gr.HTML("""
295
- <div class="main-header">
296
- <h1>🎓 Student Eligibility Prediction System</h1>
297
- <p>AI-powered CNN model for predicting student eligibility with advanced analytics</p>
298
- </div>
299
- """)
300
 
301
  with gr.Tabs():
302
- # Single Prediction Tab
303
- with gr.TabItem("🔮 Single Prediction"):
304
- gr.Markdown("### Enter student information to predict eligibility")
305
-
306
- with gr.Row():
307
- with gr.Column(scale=1):
308
- gr.Markdown("#### Input Features")
309
- # Create input components dynamically based on features
310
- inputs = []
311
- for i, feature in enumerate(feature_names):
312
- inputs.append(
313
- gr.Number(
314
- label=f"📊 {feature}",
315
- value=75 + i*5, # Different default values
316
- minimum=0,
317
- maximum=100,
318
- step=0.1,
319
- elem_classes=["feature-input"]
320
- )
321
- )
322
-
323
- predict_btn = gr.Button(
324
- "🔮 Predict Eligibility",
325
- variant="primary",
326
- size="lg",
327
- elem_id="predict-btn"
328
- )
329
-
330
- with gr.Column(scale=2):
331
- gr.Markdown("#### Prediction Results")
332
- with gr.Row():
333
- prediction_output = gr.Textbox(label="🎯 Prediction", scale=1)
334
- probability_output = gr.Textbox(label="📊 Probability", scale=1)
335
- confidence_output = gr.Textbox(label="🎯 Confidence", scale=1)
336
-
337
- prediction_plot = gr.Plot(label="📈 Prediction Visualization")
338
-
339
- # Model information
340
- gr.HTML(create_model_info())
341
-
342
- # Batch Prediction Tab
343
- with gr.TabItem("📊 Batch Prediction"):
344
- gr.Markdown("### Upload a CSV file for batch predictions")
345
- gr.Markdown(f"**Required columns:** `{', '.join(feature_names)}`")
346
 
347
- # Sample CSV format
348
- gr.Markdown("""
349
- **Example CSV format:**
350
- ```csv
351
- Feature_1,Feature_2,Feature_3,Feature_4
352
- 85,90,75,88
353
- 92,78,85,91
354
- ```
355
- """)
356
 
357
  with gr.Row():
358
- with gr.Column():
359
- file_input = gr.File(
360
- label="📁 Upload CSV File",
361
- file_types=[".csv"],
362
- type="file"
363
- )
364
- batch_predict_btn = gr.Button(
365
- "📊 Process Batch",
366
- variant="primary",
367
- size="lg"
368
- )
369
-
370
- with gr.Column():
371
- batch_output = gr.Textbox(
372
- label="📋 Batch Results Summary",
373
- lines=15,
374
- max_lines=20
375
- )
376
- download_file = gr.File(label="⬇️ Download Results")
377
-
378
- # Model Analytics Tab
379
- with gr.TabItem("📈 Model Analytics"):
380
- gr.Markdown("### Model Performance Metrics")
381
 
382
- if metadata and 'performance_metrics' in metadata:
383
- # Performance metrics
384
- metrics_data = metadata['performance_metrics']
385
- metrics_df = pd.DataFrame([{
386
- 'Metric': k.replace('_', ' ').title(),
387
- 'Value': f"{v:.4f}" if isinstance(v, float) else str(v)
388
- } for k, v in metrics_data.items()])
389
-
390
- gr.Dataframe(
391
- metrics_df,
392
- label="🎯 Performance Metrics",
393
- headers=['Metric', 'Value']
394
- )
395
- else:
396
- gr.Markdown("⚠️ **Performance metrics not available**")
397
 
398
- # Feature information
399
- gr.Markdown("### 📊 Model Features")
400
- feature_info = pd.DataFrame({
401
- 'Feature Name': feature_names,
402
- 'Index': range(len(feature_names)),
403
- 'Type': ['Numerical'] * len(feature_names)
404
- })
405
- gr.Dataframe(feature_info, label="Feature Information")
 
 
 
 
 
 
 
406
 
407
- # Model architecture info
408
- if metadata:
409
- gr.Markdown("### 🏗️ Model Architecture")
410
- arch_info = f"""
411
- - **Model Type**: {metadata.get('model_type', 'CNN')}
412
- - **Input Shape**: {metadata.get('input_shape', 'N/A')}
413
- - **Total Features**: {len(feature_names)}
414
- - **Output Classes**: {len(metadata.get('target_classes', {}))}
415
- """
416
- gr.Markdown(arch_info)
417
-
418
- # Event handlers
419
- predict_btn.click(
420
- fn=predict_student_eligibility,
421
- inputs=inputs,
422
- outputs=[prediction_output, probability_output, confidence_output, prediction_plot]
423
- )
424
-
425
- batch_predict_btn.click(
426
- fn=batch_predict,
427
- inputs=[file_input],
428
- outputs=[batch_output, download_file]
429
- )
430
 
431
- # Launch the app
432
- if __name__ == "__main__":
433
- demo.launch(
434
- share=False,
435
- server_name="0.0.0.0",
436
- server_port=7860
437
- )
 
4
  import pickle
5
  import json
6
  import tensorflow as tf
7
+ from tensorflow.keras.models import load_model, model_from_json
8
  import plotly.graph_objects as go
 
 
9
  import os
10
 
11
+ # Set environment variable to avoid oneDNN warnings
12
+ os.environ['TF_ENABLE_ONEDNN_OPTS'] = '0'
13
+
14
  # Load model artifacts
15
  def load_model_artifacts():
16
  try:
17
+ # Load model architecture first
18
+ with open('model_architecture.json', 'r') as json_file:
19
+ model_json = json_file.read()
20
+ model = model_from_json(model_json)
21
+
22
+ # Then load weights
23
+ model.load_weights('best_model.h5')
24
 
25
  # Load the scaler
26
  with open('scaler.pkl', 'rb') as f:
 
46
  feature_names = ['Feature_1', 'Feature_2', 'Feature_3', 'Feature_4']
47
 
48
  def predict_student_eligibility(*args):
49
+ """Predict student eligibility based on input features"""
 
 
50
  try:
51
  if model is None or scaler is None:
52
  return "Model not loaded", "N/A", "N/A", create_error_plot()
 
93
  return fig
94
 
95
  def create_prediction_viz(probability, prediction, input_data):
96
+ """Create visualization for prediction results"""
 
 
97
  try:
98
  # Create subplots
99
  fig = make_subplots(
 
181
  except Exception as e:
182
  return create_error_plot()
183
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
184
  def batch_predict(file):
185
+ """Batch prediction from uploaded CSV file"""
 
 
186
  try:
187
  if model is None or scaler is None:
188
  return "Model not loaded. Please check if all model files are uploaded.", None
 
191
  return "Please upload a CSV file.", None
192
 
193
  # Read the uploaded file
194
+ df = pd.read_csv(file)
195
 
196
  # Check if all required features are present
197
  missing_features = set(feature_names) - set(df.columns)
 
243
  return f"Error processing file: {str(e)}", None
244
 
245
  # Create Gradio interface
246
+ with gr.Blocks(theme=gr.themes.Soft()) as demo:
247
+ gr.Markdown("# 🎓 Student Eligibility Prediction")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
248
 
249
  with gr.Tabs():
250
+ with gr.Tab("Single Prediction"):
251
+ inputs = []
252
+ for feature in feature_names:
253
+ inputs.append(gr.Number(label=feature, value=75))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
254
 
255
+ predict_btn = gr.Button("Predict")
 
 
 
 
 
 
 
 
256
 
257
  with gr.Row():
258
+ prediction = gr.Textbox(label="Prediction")
259
+ probability = gr.Textbox(label="Probability")
260
+ confidence = gr.Textbox(label="Confidence")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
261
 
262
+ plot = gr.Plot()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
263
 
264
+ predict_btn.click(
265
+ predict_student_eligibility,
266
+ inputs=inputs,
267
+ outputs=[prediction, probability, confidence, plot]
268
+ )
269
+
270
+ with gr.Tab("Batch Prediction"):
271
+ file_input = gr.File(
272
+ label="Upload CSV",
273
+ file_types=[".csv"],
274
+ type="filepath" # Fixed: Changed from 'file' to 'filepath'
275
+ )
276
+ batch_btn = gr.Button("Process Batch")
277
+ batch_output = gr.Textbox(label="Results")
278
+ download = gr.File(label="Download")
279
 
280
+ batch_btn.click(
281
+ batch_predict,
282
+ inputs=file_input,
283
+ outputs=[batch_output, download]
284
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
285
 
286
+ demo.launch()