joko333 commited on
Commit
0858163
Β·
1 Parent(s): 67bf242

Enhance prediction function with detailed logging and error handling for improved user feedback

Browse files
Files changed (1) hide show
  1. utils/prediction.py +15 -5
utils/prediction.py CHANGED
@@ -73,24 +73,30 @@ def predict_sentence(model, sentence, tokenizer, label_encoder):
73
  """
74
  Make prediction for a single sentence with label validation.
75
  """
 
 
 
76
  # Validation checks
 
77
  if model is None:
78
- print("Error: Model not loaded")
79
  return "Error: Model not loaded", 0.0
80
  if tokenizer is None:
81
- print("Error: Tokenizer not loaded")
82
  return "Error: Tokenizer not loaded", 0.0
83
  if label_encoder is None:
84
- print("Error: Label encoder not loaded")
85
  return "Error: Label encoder not loaded", 0.0
86
 
87
  # Force CPU device
 
88
  device = torch.device('cpu')
89
  model = model.to(device)
90
  model.eval()
91
 
92
  # Tokenize
93
  try:
 
94
  encoding = tokenizer(
95
  sentence,
96
  add_special_tokens=True,
@@ -100,15 +106,19 @@ def predict_sentence(model, sentence, tokenizer, label_encoder):
100
  return_tensors='pt'
101
  ).to(device)
102
 
 
103
  with torch.no_grad():
104
  outputs = model(encoding['input_ids'], encoding['attention_mask'])
105
  probabilities = torch.softmax(outputs, dim=1)
106
  prob, pred_idx = torch.max(probabilities, dim=1)
107
  predicted_label = label_encoder.classes_[pred_idx.item()]
108
- return predicted_label, prob.item()
 
 
 
109
 
110
  except Exception as e:
111
- print(f"Prediction error: {str(e)}")
112
  return f"Error: {str(e)}", 0.0
113
 
114
  def print_labels(label_encoder, show_counts=False):
 
73
  """
74
  Make prediction for a single sentence with label validation.
75
  """
76
+ import time
77
+ start_time = time.time()
78
+
79
  # Validation checks
80
+ st.write("πŸ”„ Starting prediction process...")
81
  if model is None:
82
+ st.error("Error: Model not loaded")
83
  return "Error: Model not loaded", 0.0
84
  if tokenizer is None:
85
+ st.error("Error: Tokenizer not loaded")
86
  return "Error: Tokenizer not loaded", 0.0
87
  if label_encoder is None:
88
+ st.error("Error: Label encoder not loaded")
89
  return "Error: Label encoder not loaded", 0.0
90
 
91
  # Force CPU device
92
+ st.write("βš™οΈ Preparing model...")
93
  device = torch.device('cpu')
94
  model = model.to(device)
95
  model.eval()
96
 
97
  # Tokenize
98
  try:
99
+ st.write(f"πŸ“ Processing text: {sentence[:50]}...")
100
  encoding = tokenizer(
101
  sentence,
102
  add_special_tokens=True,
 
106
  return_tensors='pt'
107
  ).to(device)
108
 
109
+ st.write("πŸ€– Running model inference...")
110
  with torch.no_grad():
111
  outputs = model(encoding['input_ids'], encoding['attention_mask'])
112
  probabilities = torch.softmax(outputs, dim=1)
113
  prob, pred_idx = torch.max(probabilities, dim=1)
114
  predicted_label = label_encoder.classes_[pred_idx.item()]
115
+
116
+ elapsed_time = time.time() - start_time
117
+ st.write(f"βœ… Prediction completed in {elapsed_time:.2f} seconds")
118
+ return predicted_label, prob.item()
119
 
120
  except Exception as e:
121
+ st.error(f"❌ Prediction error: {str(e)}")
122
  return f"Error: {str(e)}", 0.0
123
 
124
  def print_labels(label_encoder, show_counts=False):