devcool20 commited on
Commit
1d0912a
·
verified ·
1 Parent(s): 7a1d86c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -59,7 +59,8 @@ print("Debug Point: Attempting to instantiate sales.Agent (core RL model).")
59
  if sales is not None:
60
  try:
61
  # --- Relying on Dockerfile to make /.deepmost writable ---
62
- # NO local_model_path argument here.
 
63
  sales_agent = sales.Agent(
64
  model_path="https://huggingface.co/DeepMostInnovations/sales-conversion-model-reinf-learning/resolve/main/sales_conversion_model.zip",
65
  auto_download=True,
@@ -214,7 +215,7 @@ def get_llm_advice():
214
  print(f"ERROR: JSON parsing error for overall advice: {json_e}. Raw string: {raw_json_string}")
215
  return jsonify({"points": ["Error parsing LLM JSON advice. This happens with incomplete LLM responses (e.g., due to API rate limits or max tokens). Please try a shorter conversation or wait a moment. Raw response starts with: " + raw_json_string[:100] + "..."]})
216
  except Exception as parse_e:
217
- print(f"ERROR: General error parsing LLM JSON advice: {parse_e}. Raw string: {raw_json_string}")
218
  return jsonify({"points": ["General error with LLM JSON parsing. Raw response starts with: " + raw_json_string[:100] + "..."]})
219
 
220
  except google.api_core.exceptions.ResourceExhausted as quota_e:
 
59
  if sales is not None:
60
  try:
61
  # --- Relying on Dockerfile to make /.deepmost writable ---
62
+ # NO local_model_path argument here. deepmost expects to write to /.deepmost by default
63
+ # and doesn't take local_model_path in this constructor.
64
  sales_agent = sales.Agent(
65
  model_path="https://huggingface.co/DeepMostInnovations/sales-conversion-model-reinf-learning/resolve/main/sales_conversion_model.zip",
66
  auto_download=True,
 
215
  print(f"ERROR: JSON parsing error for overall advice: {json_e}. Raw string: {raw_json_string}")
216
  return jsonify({"points": ["Error parsing LLM JSON advice. This happens with incomplete LLM responses (e.g., due to API rate limits or max tokens). Please try a shorter conversation or wait a moment. Raw response starts with: " + raw_json_string[:100] + "..."]})
217
  except Exception as parse_e:
218
+ print(f"ERROR: General error during JSON parsing attempt for chat_llm (Gemini): {parse_e}. Raw string: {json_response}")
219
  return jsonify({"points": ["General error with LLM JSON parsing. Raw response starts with: " + raw_json_string[:100] + "..."]})
220
 
221
  except google.api_core.exceptions.ResourceExhausted as quota_e: