Spaces:
Running
on
Zero
Running
on
Zero
Commit
Β·
e8293cd
1
Parent(s):
2153bff
added more logs
Browse files- core/ai_pipeline.py +39 -16
- models/model_3d_generator.py +15 -4
- test_pipeline_fix.py +132 -0
- utils/fallbacks.py +14 -0
core/ai_pipeline.py
CHANGED
@@ -90,7 +90,7 @@ class MonsterGenerationPipeline:
|
|
90 |
audio_input: Optional[str] = None,
|
91 |
text_input: Optional[str] = None,
|
92 |
reference_images: Optional[List] = None,
|
93 |
-
user_id: str = None) -> Dict[str, Any]:
|
94 |
"""Main monster generation pipeline"""
|
95 |
|
96 |
generation_log = {
|
@@ -98,51 +98,62 @@ class MonsterGenerationPipeline:
|
|
98 |
'timestamp': datetime.now().isoformat(),
|
99 |
'stages_completed': [],
|
100 |
'fallbacks_used': [],
|
101 |
-
'success': False
|
|
|
102 |
}
|
103 |
|
104 |
try:
|
|
|
|
|
105 |
# Stage 1: Speech to Text (if audio provided)
|
106 |
description = ""
|
107 |
if audio_input and os.path.exists(audio_input):
|
108 |
try:
|
|
|
109 |
stt_model = self._lazy_load_model('stt')
|
110 |
if stt_model:
|
111 |
description = stt_model.transcribe(audio_input)
|
112 |
generation_log['stages_completed'].append('stt')
|
|
|
113 |
else:
|
114 |
raise Exception("STT model failed to load")
|
115 |
except Exception as e:
|
116 |
-
print(f"STT failed: {e}")
|
117 |
description = text_input or "Create a friendly digital monster"
|
118 |
generation_log['fallbacks_used'].append('stt')
|
|
|
119 |
finally:
|
120 |
# Unload STT to free memory
|
121 |
self._unload_model('stt')
|
122 |
else:
|
123 |
description = text_input or "Create a friendly digital monster"
|
|
|
124 |
|
125 |
# Stage 2: Generate monster characteristics
|
126 |
monster_traits = {}
|
127 |
monster_dialogue = ""
|
128 |
try:
|
|
|
129 |
text_gen = self._lazy_load_model('text_gen')
|
130 |
if text_gen:
|
131 |
monster_traits = text_gen.generate_traits(description)
|
132 |
monster_dialogue = text_gen.generate_dialogue(monster_traits)
|
133 |
generation_log['stages_completed'].append('text_gen')
|
|
|
134 |
else:
|
135 |
raise Exception("Text generation model failed to load")
|
136 |
except Exception as e:
|
137 |
-
print(f"Text generation failed: {e}")
|
138 |
monster_traits, monster_dialogue = self.fallback_manager.handle_text_gen_failure(description)
|
139 |
generation_log['fallbacks_used'].append('text_gen')
|
|
|
140 |
finally:
|
141 |
self._unload_model('text_gen')
|
142 |
|
143 |
# Stage 3: Generate monster image
|
144 |
monster_image = None
|
145 |
try:
|
|
|
146 |
image_gen = self._lazy_load_model('image_gen')
|
147 |
if image_gen:
|
148 |
# Create enhanced prompt from traits
|
@@ -154,12 +165,14 @@ class MonsterGenerationPipeline:
|
|
154 |
height=512
|
155 |
)
|
156 |
generation_log['stages_completed'].append('image_gen')
|
|
|
157 |
else:
|
158 |
raise Exception("Image generation model failed to load")
|
159 |
except Exception as e:
|
160 |
-
print(f"Image generation failed: {e}")
|
161 |
monster_image = self.fallback_manager.handle_image_gen_failure(description)
|
162 |
generation_log['fallbacks_used'].append('image_gen')
|
|
|
163 |
finally:
|
164 |
self._unload_model('image_gen')
|
165 |
|
@@ -167,18 +180,21 @@ class MonsterGenerationPipeline:
|
|
167 |
model_3d = None
|
168 |
model_3d_path = None
|
169 |
try:
|
|
|
170 |
model_3d_gen = self._lazy_load_model('3d_gen')
|
171 |
if model_3d_gen and monster_image:
|
172 |
model_3d = model_3d_gen.image_to_3d(monster_image)
|
173 |
# Save 3D model
|
174 |
model_3d_path = self._save_3d_model(model_3d, user_id)
|
175 |
generation_log['stages_completed'].append('3d_gen')
|
|
|
176 |
else:
|
177 |
-
raise Exception("3D generation failed")
|
178 |
except Exception as e:
|
179 |
-
print(f"3D generation failed: {e}")
|
180 |
model_3d = self.fallback_manager.handle_3d_gen_failure(monster_image)
|
181 |
generation_log['fallbacks_used'].append('3d_gen')
|
|
|
182 |
finally:
|
183 |
self._unload_model('3d_gen')
|
184 |
|
@@ -186,13 +202,16 @@ class MonsterGenerationPipeline:
|
|
186 |
rigged_model = model_3d
|
187 |
if model_3d and self.config.get('enable_rigging', False):
|
188 |
try:
|
|
|
189 |
rigging_proc = self._lazy_load_model('rigging')
|
190 |
if rigging_proc:
|
191 |
rigged_model = rigging_proc.rig_mesh(model_3d)
|
192 |
generation_log['stages_completed'].append('rigging')
|
|
|
193 |
except Exception as e:
|
194 |
-
print(f"Rigging failed: {e}")
|
195 |
generation_log['fallbacks_used'].append('rigging')
|
|
|
196 |
finally:
|
197 |
self._unload_model('rigging')
|
198 |
|
@@ -204,6 +223,7 @@ class MonsterGenerationPipeline:
|
|
204 |
)
|
205 |
|
206 |
generation_log['success'] = True
|
|
|
207 |
|
208 |
return {
|
209 |
'description': description,
|
@@ -217,8 +237,9 @@ class MonsterGenerationPipeline:
|
|
217 |
}
|
218 |
|
219 |
except Exception as e:
|
|
|
220 |
generation_log['error'] = str(e)
|
221 |
-
|
222 |
return self.fallback_generation(description or "digital monster", generation_log)
|
223 |
|
224 |
def _create_image_prompt(self, base_description: str, traits: Dict) -> str:
|
@@ -243,13 +264,14 @@ class MonsterGenerationPipeline:
|
|
243 |
|
244 |
return ", ".join(prompt_parts)
|
245 |
|
246 |
-
def _save_3d_model(self, model_3d, user_id: str) -> str:
|
247 |
"""Save 3D model to persistent storage"""
|
248 |
if not model_3d:
|
249 |
return None
|
250 |
|
251 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
252 |
-
|
|
|
253 |
|
254 |
# Use HuggingFace Spaces persistent storage
|
255 |
if os.path.exists("/data"):
|
@@ -270,30 +292,31 @@ class MonsterGenerationPipeline:
|
|
270 |
|
271 |
return filepath
|
272 |
|
273 |
-
def _prepare_download_files(self, model_3d, image, user_id: str) -> List[str]:
|
274 |
"""Prepare downloadable files for user"""
|
275 |
files = []
|
276 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
|
277 |
|
278 |
# Save image
|
279 |
if image:
|
280 |
if isinstance(image, Image.Image):
|
281 |
-
image_path = f"/tmp/monster_{
|
282 |
image.save(image_path)
|
283 |
files.append(image_path)
|
284 |
elif isinstance(image, np.ndarray):
|
285 |
-
image_path = f"/tmp/monster_{
|
286 |
Image.fromarray(image).save(image_path)
|
287 |
files.append(image_path)
|
288 |
|
289 |
# Save 3D model in multiple formats if available
|
290 |
if model_3d:
|
291 |
# GLB format
|
292 |
-
glb_path = f"/tmp/monster_{
|
293 |
files.append(glb_path)
|
294 |
|
295 |
# OBJ format (optional)
|
296 |
-
obj_path = f"/tmp/monster_{
|
297 |
files.append(obj_path)
|
298 |
|
299 |
return files
|
|
|
90 |
audio_input: Optional[str] = None,
|
91 |
text_input: Optional[str] = None,
|
92 |
reference_images: Optional[List] = None,
|
93 |
+
user_id: Optional[str] = None) -> Dict[str, Any]:
|
94 |
"""Main monster generation pipeline"""
|
95 |
|
96 |
generation_log = {
|
|
|
98 |
'timestamp': datetime.now().isoformat(),
|
99 |
'stages_completed': [],
|
100 |
'fallbacks_used': [],
|
101 |
+
'success': False,
|
102 |
+
'errors': []
|
103 |
}
|
104 |
|
105 |
try:
|
106 |
+
print("π Starting monster generation pipeline...")
|
107 |
+
|
108 |
# Stage 1: Speech to Text (if audio provided)
|
109 |
description = ""
|
110 |
if audio_input and os.path.exists(audio_input):
|
111 |
try:
|
112 |
+
print("π€ Processing audio input...")
|
113 |
stt_model = self._lazy_load_model('stt')
|
114 |
if stt_model:
|
115 |
description = stt_model.transcribe(audio_input)
|
116 |
generation_log['stages_completed'].append('stt')
|
117 |
+
print(f"β
STT completed: {description[:100]}...")
|
118 |
else:
|
119 |
raise Exception("STT model failed to load")
|
120 |
except Exception as e:
|
121 |
+
print(f"β STT failed: {e}")
|
122 |
description = text_input or "Create a friendly digital monster"
|
123 |
generation_log['fallbacks_used'].append('stt')
|
124 |
+
generation_log['errors'].append(f"STT error: {str(e)}")
|
125 |
finally:
|
126 |
# Unload STT to free memory
|
127 |
self._unload_model('stt')
|
128 |
else:
|
129 |
description = text_input or "Create a friendly digital monster"
|
130 |
+
print(f"π Using text input: {description}")
|
131 |
|
132 |
# Stage 2: Generate monster characteristics
|
133 |
monster_traits = {}
|
134 |
monster_dialogue = ""
|
135 |
try:
|
136 |
+
print("π§ Generating monster traits and dialogue...")
|
137 |
text_gen = self._lazy_load_model('text_gen')
|
138 |
if text_gen:
|
139 |
monster_traits = text_gen.generate_traits(description)
|
140 |
monster_dialogue = text_gen.generate_dialogue(monster_traits)
|
141 |
generation_log['stages_completed'].append('text_gen')
|
142 |
+
print(f"β
Text generation completed: {monster_traits.get('name', 'Unknown')}")
|
143 |
else:
|
144 |
raise Exception("Text generation model failed to load")
|
145 |
except Exception as e:
|
146 |
+
print(f"β Text generation failed: {e}")
|
147 |
monster_traits, monster_dialogue = self.fallback_manager.handle_text_gen_failure(description)
|
148 |
generation_log['fallbacks_used'].append('text_gen')
|
149 |
+
generation_log['errors'].append(f"Text generation error: {str(e)}")
|
150 |
finally:
|
151 |
self._unload_model('text_gen')
|
152 |
|
153 |
# Stage 3: Generate monster image
|
154 |
monster_image = None
|
155 |
try:
|
156 |
+
print("π¨ Generating monster image...")
|
157 |
image_gen = self._lazy_load_model('image_gen')
|
158 |
if image_gen:
|
159 |
# Create enhanced prompt from traits
|
|
|
165 |
height=512
|
166 |
)
|
167 |
generation_log['stages_completed'].append('image_gen')
|
168 |
+
print("β
Image generation completed")
|
169 |
else:
|
170 |
raise Exception("Image generation model failed to load")
|
171 |
except Exception as e:
|
172 |
+
print(f"β Image generation failed: {e}")
|
173 |
monster_image = self.fallback_manager.handle_image_gen_failure(description)
|
174 |
generation_log['fallbacks_used'].append('image_gen')
|
175 |
+
generation_log['errors'].append(f"Image generation error: {str(e)}")
|
176 |
finally:
|
177 |
self._unload_model('image_gen')
|
178 |
|
|
|
180 |
model_3d = None
|
181 |
model_3d_path = None
|
182 |
try:
|
183 |
+
print("π² Converting to 3D model...")
|
184 |
model_3d_gen = self._lazy_load_model('3d_gen')
|
185 |
if model_3d_gen and monster_image:
|
186 |
model_3d = model_3d_gen.image_to_3d(monster_image)
|
187 |
# Save 3D model
|
188 |
model_3d_path = self._save_3d_model(model_3d, user_id)
|
189 |
generation_log['stages_completed'].append('3d_gen')
|
190 |
+
print("β
3D generation completed")
|
191 |
else:
|
192 |
+
raise Exception("3D generation failed - no model or image")
|
193 |
except Exception as e:
|
194 |
+
print(f"β 3D generation failed: {e}")
|
195 |
model_3d = self.fallback_manager.handle_3d_gen_failure(monster_image)
|
196 |
generation_log['fallbacks_used'].append('3d_gen')
|
197 |
+
generation_log['errors'].append(f"3D generation error: {str(e)}")
|
198 |
finally:
|
199 |
self._unload_model('3d_gen')
|
200 |
|
|
|
202 |
rigged_model = model_3d
|
203 |
if model_3d and self.config.get('enable_rigging', False):
|
204 |
try:
|
205 |
+
print("𦴠Adding rigging...")
|
206 |
rigging_proc = self._lazy_load_model('rigging')
|
207 |
if rigging_proc:
|
208 |
rigged_model = rigging_proc.rig_mesh(model_3d)
|
209 |
generation_log['stages_completed'].append('rigging')
|
210 |
+
print("β
Rigging completed")
|
211 |
except Exception as e:
|
212 |
+
print(f"β Rigging failed: {e}")
|
213 |
generation_log['fallbacks_used'].append('rigging')
|
214 |
+
generation_log['errors'].append(f"Rigging error: {str(e)}")
|
215 |
finally:
|
216 |
self._unload_model('rigging')
|
217 |
|
|
|
223 |
)
|
224 |
|
225 |
generation_log['success'] = True
|
226 |
+
print("π Monster generation pipeline completed successfully!")
|
227 |
|
228 |
return {
|
229 |
'description': description,
|
|
|
237 |
}
|
238 |
|
239 |
except Exception as e:
|
240 |
+
print(f"π₯ Pipeline error: {e}")
|
241 |
generation_log['error'] = str(e)
|
242 |
+
generation_log['errors'].append(f"Pipeline error: {str(e)}")
|
243 |
return self.fallback_generation(description or "digital monster", generation_log)
|
244 |
|
245 |
def _create_image_prompt(self, base_description: str, traits: Dict) -> str:
|
|
|
264 |
|
265 |
return ", ".join(prompt_parts)
|
266 |
|
267 |
+
def _save_3d_model(self, model_3d, user_id: Optional[str]) -> Optional[str]:
|
268 |
"""Save 3D model to persistent storage"""
|
269 |
if not model_3d:
|
270 |
return None
|
271 |
|
272 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
273 |
+
user_id_str = user_id or "anonymous"
|
274 |
+
filename = f"monster_{user_id_str}_{timestamp}.glb"
|
275 |
|
276 |
# Use HuggingFace Spaces persistent storage
|
277 |
if os.path.exists("/data"):
|
|
|
292 |
|
293 |
return filepath
|
294 |
|
295 |
+
def _prepare_download_files(self, model_3d, image, user_id: Optional[str]) -> List[str]:
|
296 |
"""Prepare downloadable files for user"""
|
297 |
files = []
|
298 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
299 |
+
user_id_str = user_id or "anonymous"
|
300 |
|
301 |
# Save image
|
302 |
if image:
|
303 |
if isinstance(image, Image.Image):
|
304 |
+
image_path = f"/tmp/monster_{user_id_str}_{timestamp}.png"
|
305 |
image.save(image_path)
|
306 |
files.append(image_path)
|
307 |
elif isinstance(image, np.ndarray):
|
308 |
+
image_path = f"/tmp/monster_{user_id_str}_{timestamp}.png"
|
309 |
Image.fromarray(image).save(image_path)
|
310 |
files.append(image_path)
|
311 |
|
312 |
# Save 3D model in multiple formats if available
|
313 |
if model_3d:
|
314 |
# GLB format
|
315 |
+
glb_path = f"/tmp/monster_{user_id_str}_{timestamp}.glb"
|
316 |
files.append(glb_path)
|
317 |
|
318 |
# OBJ format (optional)
|
319 |
+
obj_path = f"/tmp/monster_{user_id_str}_{timestamp}.obj"
|
320 |
files.append(obj_path)
|
321 |
|
322 |
return files
|
models/model_3d_generator.py
CHANGED
@@ -100,6 +100,7 @@ class Hunyuan3DGenerator:
|
|
100 |
# Safe device movement
|
101 |
logger.info(f"π¦ Moving model to device: {self.device}")
|
102 |
try:
|
|
|
103 |
if hasattr(self.model, 'to_empty'):
|
104 |
# Use to_empty for meta tensors
|
105 |
logger.info("π¦ Using to_empty() for safe device movement...")
|
@@ -115,14 +116,24 @@ class Hunyuan3DGenerator:
|
|
115 |
if "meta tensor" in str(device_error):
|
116 |
logger.info("π Attempting CPU fallback for meta tensor issue...")
|
117 |
self.device = "cpu"
|
118 |
-
|
119 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
120 |
else:
|
121 |
-
|
|
|
|
|
122 |
|
123 |
# Enable optimizations safely
|
124 |
logger.info("π¦ Applying model optimizations...")
|
125 |
-
if hasattr(self.model, 'enable_attention_slicing'):
|
126 |
self.model.enable_attention_slicing()
|
127 |
logger.info("β
Attention slicing enabled")
|
128 |
else:
|
|
|
100 |
# Safe device movement
|
101 |
logger.info(f"π¦ Moving model to device: {self.device}")
|
102 |
try:
|
103 |
+
# Check if model is a meta tensor
|
104 |
if hasattr(self.model, 'to_empty'):
|
105 |
# Use to_empty for meta tensors
|
106 |
logger.info("π¦ Using to_empty() for safe device movement...")
|
|
|
116 |
if "meta tensor" in str(device_error):
|
117 |
logger.info("π Attempting CPU fallback for meta tensor issue...")
|
118 |
self.device = "cpu"
|
119 |
+
try:
|
120 |
+
if hasattr(self.model, 'to_empty'):
|
121 |
+
self.model = self.model.to_empty(device="cpu", dtype=torch_dtype)
|
122 |
+
else:
|
123 |
+
self.model = self.model.to("cpu", dtype=torch_dtype)
|
124 |
+
logger.info("β
Fallback to CPU successful")
|
125 |
+
except Exception as cpu_error:
|
126 |
+
logger.error(f"β CPU fallback also failed: {cpu_error}")
|
127 |
+
logger.info("π Using fallback mode...")
|
128 |
+
self.model = "fallback"
|
129 |
else:
|
130 |
+
logger.error(f"β Non-meta tensor device error: {device_error}")
|
131 |
+
logger.info("π Using fallback mode...")
|
132 |
+
self.model = "fallback"
|
133 |
|
134 |
# Enable optimizations safely
|
135 |
logger.info("π¦ Applying model optimizations...")
|
136 |
+
if self.model != "fallback" and hasattr(self.model, 'enable_attention_slicing'):
|
137 |
self.model.enable_attention_slicing()
|
138 |
logger.info("β
Attention slicing enabled")
|
139 |
else:
|
test_pipeline_fix.py
ADDED
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python3
|
2 |
+
"""
|
3 |
+
Test script to verify the pipeline fixes work correctly
|
4 |
+
"""
|
5 |
+
|
6 |
+
import sys
|
7 |
+
import os
|
8 |
+
import traceback
|
9 |
+
from typing import Dict, Any
|
10 |
+
|
11 |
+
# Add the project root to the path
|
12 |
+
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
13 |
+
|
14 |
+
def test_pipeline_fixes():
|
15 |
+
"""Test the pipeline with improved error handling"""
|
16 |
+
|
17 |
+
print("π§ͺ Testing Monster Generation Pipeline Fixes")
|
18 |
+
print("=" * 50)
|
19 |
+
|
20 |
+
try:
|
21 |
+
# Import the pipeline
|
22 |
+
from core.ai_pipeline import MonsterGenerationPipeline
|
23 |
+
print("β
Successfully imported MonsterGenerationPipeline")
|
24 |
+
|
25 |
+
# Initialize pipeline
|
26 |
+
print("π§ Initializing pipeline...")
|
27 |
+
pipeline = MonsterGenerationPipeline(device="cpu") # Use CPU for testing
|
28 |
+
print("β
Pipeline initialized successfully")
|
29 |
+
|
30 |
+
# Test with a simple text input
|
31 |
+
print("\nπ Testing monster generation...")
|
32 |
+
test_input = "Create a friendly fire monster with wings"
|
33 |
+
|
34 |
+
result = pipeline.generate_monster(
|
35 |
+
text_input=test_input,
|
36 |
+
user_id="test_user"
|
37 |
+
)
|
38 |
+
|
39 |
+
print(f"\nπ Generation Result:")
|
40 |
+
print(f"Status: {result.get('status', 'unknown')}")
|
41 |
+
print(f"Success: {result.get('generation_log', {}).get('success', False)}")
|
42 |
+
print(f"Stages completed: {result.get('generation_log', {}).get('stages_completed', [])}")
|
43 |
+
print(f"Fallbacks used: {result.get('generation_log', {}).get('fallbacks_used', [])}")
|
44 |
+
print(f"Errors: {result.get('generation_log', {}).get('errors', [])}")
|
45 |
+
|
46 |
+
if result.get('traits'):
|
47 |
+
print(f"Monster name: {result.get('traits', {}).get('name', 'Unknown')}")
|
48 |
+
print(f"Monster element: {result.get('traits', {}).get('element', 'Unknown')}")
|
49 |
+
|
50 |
+
if result.get('dialogue'):
|
51 |
+
print(f"Monster dialogue: {result.get('dialogue', '')}")
|
52 |
+
|
53 |
+
print(f"Download files: {result.get('download_files', [])}")
|
54 |
+
|
55 |
+
# Clean up
|
56 |
+
pipeline.cleanup()
|
57 |
+
print("\nπ§Ή Pipeline cleaned up successfully")
|
58 |
+
|
59 |
+
return True
|
60 |
+
|
61 |
+
except Exception as e:
|
62 |
+
print(f"β Test failed with error: {e}")
|
63 |
+
print(f"Error type: {type(e).__name__}")
|
64 |
+
print("Full traceback:")
|
65 |
+
traceback.print_exc()
|
66 |
+
return False
|
67 |
+
|
68 |
+
def test_fallback_manager():
|
69 |
+
"""Test the fallback manager"""
|
70 |
+
|
71 |
+
print("\nπ§ͺ Testing Fallback Manager")
|
72 |
+
print("=" * 30)
|
73 |
+
|
74 |
+
try:
|
75 |
+
from utils.fallbacks import FallbackManager
|
76 |
+
|
77 |
+
fallback = FallbackManager()
|
78 |
+
|
79 |
+
# Test text generation fallback
|
80 |
+
print("π Testing text generation fallback...")
|
81 |
+
traits, dialogue = fallback.handle_text_gen_failure("Create a water monster")
|
82 |
+
print(f"β
Generated traits: {traits.get('name', 'Unknown')}")
|
83 |
+
print(f"β
Generated dialogue: {dialogue}")
|
84 |
+
|
85 |
+
# Test image generation fallback
|
86 |
+
print("π¨ Testing image generation fallback...")
|
87 |
+
image = fallback.handle_image_gen_failure("Create a fire monster")
|
88 |
+
print(f"β
Generated image: {type(image)}")
|
89 |
+
|
90 |
+
# Test 3D generation fallback
|
91 |
+
print("π² Testing 3D generation fallback...")
|
92 |
+
model_3d = fallback.handle_3d_gen_failure(image)
|
93 |
+
print(f"β
Generated 3D model: {type(model_3d)}")
|
94 |
+
|
95 |
+
print("β
All fallback tests passed!")
|
96 |
+
return True
|
97 |
+
|
98 |
+
except Exception as e:
|
99 |
+
print(f"β Fallback test failed: {e}")
|
100 |
+
traceback.print_exc()
|
101 |
+
return False
|
102 |
+
|
103 |
+
def main():
|
104 |
+
"""Main test function"""
|
105 |
+
|
106 |
+
print("π Starting Pipeline Fix Verification")
|
107 |
+
print("=" * 50)
|
108 |
+
|
109 |
+
# Test fallback manager first (doesn't require heavy models)
|
110 |
+
fallback_success = test_fallback_manager()
|
111 |
+
|
112 |
+
# Test full pipeline (may fail due to missing models, but should show better error handling)
|
113 |
+
pipeline_success = test_pipeline_fixes()
|
114 |
+
|
115 |
+
print("\n" + "=" * 50)
|
116 |
+
print("π Test Results Summary:")
|
117 |
+
print(f"Fallback Manager: {'β
PASSED' if fallback_success else 'β FAILED'}")
|
118 |
+
print(f"Pipeline: {'β
PASSED' if pipeline_success else 'β FAILED'}")
|
119 |
+
|
120 |
+
if fallback_success and pipeline_success:
|
121 |
+
print("\nπ All tests passed! Pipeline fixes are working correctly.")
|
122 |
+
elif fallback_success:
|
123 |
+
print("\nβ οΈ Fallback manager works, but pipeline may need model dependencies.")
|
124 |
+
print("This is expected if models aren't installed.")
|
125 |
+
else:
|
126 |
+
print("\nβ Some tests failed. Check the error messages above.")
|
127 |
+
|
128 |
+
return fallback_success and pipeline_success
|
129 |
+
|
130 |
+
if __name__ == "__main__":
|
131 |
+
success = main()
|
132 |
+
sys.exit(0 if success else 1)
|
utils/fallbacks.py
CHANGED
@@ -148,18 +148,32 @@ class FallbackManager:
|
|
148 |
def complete_fallback_generation(self, description: str, generation_log: Dict) -> Dict[str, Any]:
|
149 |
"""Complete fallback generation when entire pipeline fails"""
|
150 |
|
|
|
|
|
151 |
# Generate all components using fallbacks
|
|
|
152 |
traits, dialogue = self.handle_text_gen_failure(description)
|
|
|
|
|
|
|
153 |
image = self.handle_image_gen_failure(description)
|
|
|
|
|
|
|
154 |
model_3d = self.handle_3d_gen_failure(image)
|
|
|
155 |
|
156 |
# Save fallback results
|
157 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
158 |
image_path = f"/tmp/fallback_monster_{timestamp}.png"
|
159 |
model_path = f"/tmp/fallback_monster_{timestamp}.glb"
|
160 |
|
|
|
161 |
image.save(image_path)
|
162 |
model_3d.export(model_path)
|
|
|
|
|
|
|
163 |
|
164 |
return {
|
165 |
'description': description,
|
|
|
148 |
def complete_fallback_generation(self, description: str, generation_log: Dict) -> Dict[str, Any]:
|
149 |
"""Complete fallback generation when entire pipeline fails"""
|
150 |
|
151 |
+
print("π Starting complete fallback generation...")
|
152 |
+
|
153 |
# Generate all components using fallbacks
|
154 |
+
print("π Generating fallback text...")
|
155 |
traits, dialogue = self.handle_text_gen_failure(description)
|
156 |
+
print(f"β
Fallback text generated: {traits.get('name', 'Unknown')}")
|
157 |
+
|
158 |
+
print("π¨ Generating fallback image...")
|
159 |
image = self.handle_image_gen_failure(description)
|
160 |
+
print("β
Fallback image generated")
|
161 |
+
|
162 |
+
print("π² Generating fallback 3D model...")
|
163 |
model_3d = self.handle_3d_gen_failure(image)
|
164 |
+
print("β
Fallback 3D model generated")
|
165 |
|
166 |
# Save fallback results
|
167 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
168 |
image_path = f"/tmp/fallback_monster_{timestamp}.png"
|
169 |
model_path = f"/tmp/fallback_monster_{timestamp}.glb"
|
170 |
|
171 |
+
print(f"πΎ Saving fallback files...")
|
172 |
image.save(image_path)
|
173 |
model_3d.export(model_path)
|
174 |
+
print(f"β
Fallback files saved: {image_path}, {model_path}")
|
175 |
+
|
176 |
+
print("π Complete fallback generation finished!")
|
177 |
|
178 |
return {
|
179 |
'description': description,
|