Abhiroopvanaone commited on
Commit
feccca5
Β·
verified Β·
1 Parent(s): bef1633

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +150 -150
app.py CHANGED
@@ -1,152 +1,152 @@
1
- import spaces
2
- import gradio as gr
3
- import torch
4
- from transformers import pipeline
5
- from PIL import Image
6
-
7
- # Global model storage
8
- models = {}
9
-
10
- @spaces.GPU(duration=120)
11
- def generate_code(image, model_choice, prompt_style):
12
- """Generate CADQuery code - single GPU function."""
13
- if image is None:
14
- return "❌ Please upload an image first."
15
-
16
- # Create prompts
17
- prompts = {
18
- "Simple": "Generate CADQuery Python code for this 3D model:",
19
- "Detailed": "Analyze this 3D CAD model and generate Python CADQuery
20
- code.\n\nRequirements:\n- Import cadquery as cq\n- Store result in 'result' variable\n- Use
21
- proper CADQuery syntax\n\nCode:",
22
- "Chain-of-Thought": "Analyze this 3D CAD model step by step:\n\nStep 1: Identify the
23
- basic geometry\nStep 2: Note any features\nStep 3: Generate clean CADQuery Python
24
- code\n\n```python\nimport cadquery as cq\n\n# Generated code:"
25
- }
26
-
27
- try:
28
- # Model mapping
29
- model_map = {
30
- "GLM-4.5V-AWQ": "QuantTrio/GLM-4.5V-AWQ",
31
- "GLM-4.5V-FP8": "zai-org/GLM-4.5V-FP8",
32
- "GLM-4.5V": "zai-org/GLM-4.5V"
33
- }
34
-
35
- model_name = model_map[model_choice]
36
-
37
- # Load or get cached model
38
- if model_name not in models:
39
- pipe = pipeline(
40
- "image-text-to-text",
41
- model=model_name,
42
- device_map="auto",
43
- torch_dtype=torch.float16,
44
- trust_remote_code=True
45
- )
46
- models[model_name] = pipe
47
- else:
48
- pipe = models[model_name]
49
-
50
- # Generate
51
- messages = [{
52
- "role": "user",
53
- "content": [
54
- {"type": "image", "image": image},
55
- {"type": "text", "text": prompts[prompt_style]}
56
- ]
57
- }]
58
-
59
- result = pipe(messages, max_new_tokens=512, temperature=0.7)
60
-
61
- if isinstance(result, list) and len(result) > 0:
62
- generated_text = result[0].get("generated_text", str(result))
63
- else:
64
- generated_text = str(result)
65
-
66
- # Extract code
67
- code = generated_text.strip()
68
- if "```python" in code:
69
- start = code.find("```python") + 9
70
- end = code.find("```", start)
71
- if end > start:
72
- code = code[start:end].strip()
73
-
74
- if "import cadquery" not in code:
75
- code = "import cadquery as cq\n\n" + code
76
-
77
- return f"""## 🎯 Generated CADQuery Code
78
-
79
- ```python
80
- {code}
81
-
82
- πŸ“Š Info
83
-
84
- - Model: {model_choice}
85
- - Prompt: {prompt_style}
86
- - Device: GPU
87
-
88
- πŸ”§ Usage
89
-
90
- pip install cadquery
91
- python your_script.py
92
- """
93
-
94
- except Exception as e:
95
- return f"❌ **Generation Failed**: {str(e)[:500]}"
96
- def system_info():
97
- """Get system info - no GPU needed."""
98
- info = f"""## πŸ–₯️ System Information
99
-
100
- - CUDA Available: {torch.cuda.is_available()}
101
- - CUDA Devices: {torch.cuda.device_count() if torch.cuda.is_available() else 0}
102
- - PyTorch Version: {torch.version}
103
- - Device: {"GPU" if torch.cuda.is_available() else "CPU"}
104
- """
105
  return info
106
 
107
- Create interface
108
-
109
- with gr.Blocks(title="GLM-4.5V CAD Generator", theme=gr.themes.Soft()) as demo:
110
- gr.Markdown("""
111
- # πŸ”§ GLM-4.5V CAD Generator
112
-
113
- Generate CADQuery Python code from 3D CAD model images using GLM-4.5V models!
114
-
115
- **Models**: GLM-4.5V-AWQ (fastest) | GLM-4.5V-FP8 (balanced) | GLM-4.5V (best quality)
116
- """)
117
-
118
- with gr.Tab("πŸš€ Generate"):
119
- with gr.Row():
120
- with gr.Column():
121
- image_input = gr.Image(type="pil", label="Upload CAD Model Image")
122
- model_choice = gr.Dropdown(
123
- choices=["GLM-4.5V-AWQ", "GLM-4.5V-FP8", "GLM-4.5V"],
124
- value="GLM-4.5V-AWQ",
125
- label="Select Model"
126
- )
127
- prompt_style = gr.Dropdown(
128
- choices=["Simple", "Detailed", "Chain-of-Thought"],
129
- value="Chain-of-Thought",
130
- label="Prompt Style"
131
- )
132
- generate_btn = gr.Button("πŸš€ Generate CADQuery Code", variant="primary")
133
-
134
- with gr.Column():
135
- output = gr.Markdown("Upload an image and click Generate!")
136
-
137
- generate_btn.click(
138
- fn=generate_code,
139
- inputs=[image_input, model_choice, prompt_style],
140
- outputs=output
141
- )
142
-
143
- with gr.Tab("βš™οΈ System"):
144
- info_display = gr.Markdown()
145
- refresh_btn = gr.Button("πŸ”„ Refresh")
146
-
147
- demo.load(fn=system_info, outputs=info_display)
148
- refresh_btn.click(fn=system_info, outputs=info_display)
149
- if name == "main":
150
- print("πŸš€ Starting GLM-4.5V CAD Generator...")
151
- print(f"CUDA available: {torch.cuda.is_available()}")
152
- demo.launch()
 
 
1
+ import spaces
2
+ import gradio as gr
3
+ import torch
4
+ from transformers import pipeline
5
+ from PIL import Image
6
+ import traceback
7
+
8
+ # Global model storage
9
+ models = {}
10
+
11
+ @spaces.GPU(duration=120)
12
+ def generate_code(image, model_choice, prompt_style):
13
+ """Generate CADQuery code - single GPU function."""
14
+ if image is None:
15
+ return "❌ Please upload an image first."
16
+
17
+ # Create prompts
18
+ prompts = {
19
+ "Simple": "Generate CADQuery Python code for this 3D model:",
20
+ "Detailed": "Analyze this 3D CAD model and generate Python CADQuery code.\n\nRequirements:\n- Import cadquery as cq\n- Store result in 'result' variable\n- Use proper CADQuery syntax\n\nCode:",
21
+ "Chain-of-Thought": "Analyze this 3D CAD model step by step:\n\nStep 1: Identify the basic geometry\nStep 2: Note any features\nStep 3: Generate clean CADQuery Python code\n\n```python\nimport cadquery as cq\n\n# Generated code:"
22
+ }
23
+
24
+ try:
25
+ # Model mapping
26
+ model_map = {
27
+ "GLM-4.5V-AWQ": "QuantTrio/GLM-4.5V-AWQ",
28
+ "GLM-4.5V-FP8": "zai-org/GLM-4.5V-FP8",
29
+ "GLM-4.5V": "zai-org/GLM-4.5V"
30
+ }
31
+
32
+ model_name = model_map[model_choice]
33
+
34
+ # Load or get cached model
35
+ if model_name not in models:
36
+ pipe = pipeline(
37
+ "image-text-to-text",
38
+ model=model_name,
39
+ device_map="auto",
40
+ torch_dtype=torch.float16,
41
+ trust_remote_code=True
42
+ )
43
+ models[model_name] = pipe
44
+ else:
45
+ pipe = models[model_name]
46
+
47
+ # Generate
48
+ messages = [{
49
+ "role": "user",
50
+ "content": [
51
+ {"type": "image", "image": image},
52
+ {"type": "text", "text": prompts[prompt_style]}
53
+ ]
54
+ }]
55
+
56
+ result = pipe(messages, max_new_tokens=512, temperature=0.7)
57
+
58
+ if isinstance(result, list) and len(result) > 0:
59
+ generated_text = result[0].get("generated_text", str(result))
60
+ else:
61
+ generated_text = str(result)
62
+
63
+ # Extract code
64
+ code = generated_text.strip()
65
+ if "```python" in code:
66
+ start = code.find("```python") + 9
67
+ end = code.find("```", start)
68
+ if end > start:
69
+ code = code[start:end].strip()
70
+
71
+ if "import cadquery" not in code:
72
+ code = "import cadquery as cq\n\n" + code
73
+
74
+ return f"""## 🎯 Generated CADQuery Code
75
+
76
+ ```python
77
+ {code}
78
+ ```
79
+
80
+ ## πŸ“Š Info
81
+ - **Model**: {model_choice}
82
+ - **Prompt**: {prompt_style}
83
+ - **Device**: GPU
84
+
85
+ ## πŸ”§ Usage
86
+ ```bash
87
+ pip install cadquery
88
+ python your_script.py
89
+ ```
90
+ """
91
+
92
+ except Exception as e:
93
+ return f"❌ **Generation Failed**: {str(e)[:500]}"
94
+
95
+ def system_info():
96
+ """Get system info - no GPU needed."""
97
+ info = f"""## πŸ–₯️ System Information
98
+
99
+ - **CUDA Available**: {torch.cuda.is_available()}
100
+ - **CUDA Devices**: {torch.cuda.device_count() if torch.cuda.is_available() else 0}
101
+ - **PyTorch Version**: {torch.__version__}
102
+ - **Device**: {"GPU" if torch.cuda.is_available() else "CPU"}
103
+ """
 
104
  return info
105
 
106
+ # Create interface
107
+ with gr.Blocks(title="GLM-4.5V CAD Generator", theme=gr.themes.Soft()) as demo:
108
+ gr.Markdown("""
109
+ # πŸ”§ GLM-4.5V CAD Generator
110
+
111
+ Generate CADQuery Python code from 3D CAD model images using GLM-4.5V models!
112
+
113
+ **Models**: GLM-4.5V-AWQ (fastest) | GLM-4.5V-FP8 (balanced) | GLM-4.5V (best quality)
114
+ """)
115
+
116
+ with gr.Tab("πŸš€ Generate"):
117
+ with gr.Row():
118
+ with gr.Column():
119
+ image_input = gr.Image(type="pil", label="Upload CAD Model Image")
120
+ model_choice = gr.Dropdown(
121
+ choices=["GLM-4.5V-AWQ", "GLM-4.5V-FP8", "GLM-4.5V"],
122
+ value="GLM-4.5V-AWQ",
123
+ label="Select Model"
124
+ )
125
+ prompt_style = gr.Dropdown(
126
+ choices=["Simple", "Detailed", "Chain-of-Thought"],
127
+ value="Chain-of-Thought",
128
+ label="Prompt Style"
129
+ )
130
+ generate_btn = gr.Button("πŸš€ Generate CADQuery Code", variant="primary")
131
+
132
+ with gr.Column():
133
+ output = gr.Markdown("Upload an image and click Generate!")
134
+
135
+ generate_btn.click(
136
+ fn=generate_code,
137
+ inputs=[image_input, model_choice, prompt_style],
138
+ outputs=output
139
+ )
140
+
141
+ with gr.Tab("βš™οΈ System"):
142
+ info_display = gr.Markdown()
143
+ refresh_btn = gr.Button("πŸ”„ Refresh")
144
+
145
+ demo.load(fn=system_info, outputs=info_display)
146
+ refresh_btn.click(fn=system_info, outputs=info_display)
147
+
148
+ if __name__ == "__main__":
149
+ print("πŸš€ Starting GLM-4.5V CAD Generator...")
150
+ print(f"CUDA available: {torch.cuda.is_available()}")
151
+ # Remove share=True for HF Spaces
152
+ demo.launch()