wolfofbackstreet commited on
Commit
334a0ec
·
verified ·
1 Parent(s): 38e32d9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -28
app.py CHANGED
@@ -1,38 +1,22 @@
1
  import os
2
  from flask import Flask, request, jsonify, send_file
3
- from optimum.intel.openvino.modeling_diffusion import OVStableDiffusionPipeline
4
  from PIL import Image
5
  import io
6
  import torch
7
  import logging
 
 
8
 
9
  # Set up logging
10
  logging.basicConfig(level=logging.INFO)
11
  logger = logging.getLogger(__name__)
12
 
13
  app = Flask(__name__)
14
-
15
- # Set cache directories
16
- os.environ["HF_HOME"] = "/app/cache/huggingface"
17
- os.environ["MPLCONFIGDIR"] = "/app/matplotlib_cache"
18
- os.environ["OPENVINO_TELEMETRY_DIR"] = "/app/openvino_cache"
19
-
20
- # Ensure cache directories exist
21
- for cache_dir in ["/app/cache/huggingface", "/app/matplotlib_cache", "/app/openvino_cache"]:
22
- os.makedirs(cache_dir, exist_ok=True)
23
-
24
- # Load the base pre-converted OpenVINO SDXL model
25
- base_model_id = "rupeshs/hyper-sd-sdxl-1-step-openvino-int8"
26
- try:
27
- pipeline = OVStableDiffusionPipeline.from_pretrained(
28
- base_model_id,
29
- ov_config={"CACHE_DIR": "/app/cache/openvino"},
30
- device="CPU"
31
- )
32
- logger.info("Base model loaded successfully")
33
- except Exception as e:
34
- logger.error(f"Failed to load base model: {str(e)}")
35
- raise
36
 
37
  @app.route('/generate', methods=['POST'])
38
  def generate_image():
@@ -48,10 +32,10 @@ def generate_image():
48
  # Generate image
49
  image = pipeline(
50
  prompt=prompt,
51
- width=768,
52
- height=768,
53
- num_inference_steps=1,
54
- guidance_scale=1.0,
55
  ).images[0]
56
 
57
  # Save image to a bytes buffer
@@ -69,6 +53,35 @@ def generate_image():
69
  logger.error(f"Image generation failed: {str(e)}")
70
  return jsonify({'error': str(e)}), 500
71
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72
  if __name__ == '__main__':
73
  port = int(os.getenv('PORT', 7860))
74
- app.run(host='0.0.0.0', port=port)
 
1
  import os
2
  from flask import Flask, request, jsonify, send_file
3
+ from optimum.intel.openvino.modeling_diffusion import OVStableDiffusionXLPipeline
4
  from PIL import Image
5
  import io
6
  import torch
7
  import logging
8
+ from datetime import datetime
9
+
10
 
11
  # Set up logging
12
  logging.basicConfig(level=logging.INFO)
13
  logger = logging.getLogger(__name__)
14
 
15
  app = Flask(__name__)
16
+ pipeline = OVStableDiffusionXLPipeline.from_pretrained(
17
+ "rupeshs/hyper-sd-sdxl-1-step-openvino-int8",
18
+ ov_config={"CACHE_DIR": ""},
19
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
 
21
  @app.route('/generate', methods=['POST'])
22
  def generate_image():
 
32
  # Generate image
33
  image = pipeline(
34
  prompt=prompt,
35
+ width=width,
36
+ height=height,
37
+ num_inference_steps=num_inference_steps,
38
+ guidance_scale=guidance_scale
39
  ).images[0]
40
 
41
  # Save image to a bytes buffer
 
53
  logger.error(f"Image generation failed: {str(e)}")
54
  return jsonify({'error': str(e)}), 500
55
 
56
+
57
+ @app.route('/health', methods=['GET'])
58
+ def health_check():
59
+ """
60
+ Health check endpoint that returns the status of the application
61
+ """
62
+ try:
63
+ # Basic health check response
64
+ health_status = {
65
+ 'status': 'healthy',
66
+ 'message': 'Application is running',
67
+ 'timestamp': datetime.now().isoformat()
68
+ }
69
+
70
+ # Optional: Add additional checks (e.g., database connectivity)
71
+ # Example: Check database connection
72
+ # db_status = check_database_connection()
73
+ # health_status['database'] = db_status
74
+
75
+ return jsonify(health_status), 200
76
+
77
+ except Exception as e:
78
+ error_response = {
79
+ 'status': 'unhealthy',
80
+ 'message': f'Health check failed: {str(e)}',
81
+ 'timestamp': datetime.now().isoformat()
82
+ }
83
+ return jsonify(error_response), 500
84
+
85
  if __name__ == '__main__':
86
  port = int(os.getenv('PORT', 7860))
87
+ app.run(host='localhost', port=port)