Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -4,9 +4,8 @@ import base64
|
|
4 |
import streamlit.components.v1 as components
|
5 |
from transformers import pipeline
|
6 |
from gtts import gTTS
|
7 |
-
import os
|
8 |
-
import qrcode
|
9 |
from io import BytesIO
|
|
|
10 |
|
11 |
# Page config
|
12 |
st.set_page_config(page_title="AR/VR Code Visualizer", layout="wide")
|
@@ -49,19 +48,19 @@ if code.strip():
|
|
49 |
for fn, callees in call_graph.items():
|
50 |
st.write(f"πΉ `{fn}` calls: {', '.join(callees) if callees else 'None'}")
|
51 |
|
52 |
-
#
|
53 |
prompt = f"Explain the structure and purpose of the following functions and how they call each other: {call_graph}"
|
54 |
summary = summarizer(prompt, max_length=60, min_length=15, do_sample=False)
|
55 |
summary_text = summary[0]['summary_text']
|
56 |
st.success(summary_text)
|
57 |
|
58 |
-
# Voice narration
|
59 |
st.subheader("π Voice Narration")
|
60 |
tts = gTTS(text=summary_text)
|
61 |
tts.save("summary.mp3")
|
62 |
st.audio("summary.mp3", format="audio/mp3")
|
63 |
|
64 |
-
#
|
65 |
def generate_aframe(call_graph):
|
66 |
function_data = {
|
67 |
"functions": [],
|
@@ -82,42 +81,43 @@ if code.strip():
|
|
82 |
x2, y2, z2 = function_positions[callee]
|
83 |
function_data["relationships"].append({"start": [x1, y1, z1], "end": [x2, y2, z2]})
|
84 |
|
85 |
-
js = """
|
86 |
<script>
|
87 |
-
const functionData =
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
|
|
118 |
const utter = new SpeechSynthesisUtterance(text);
|
119 |
speechSynthesis.speak(utter);
|
120 |
-
}
|
121 |
</script>
|
122 |
"""
|
123 |
|
@@ -126,18 +126,17 @@ if code.strip():
|
|
126 |
<html>
|
127 |
<head>
|
128 |
<script src="https://aframe.io/releases/1.3.0/aframe.min.js"></script>
|
129 |
-
<script src="https://cdn.jsdelivr.net/gh/donmccurdy/aframe-extras@6.1.1/dist/aframe-extras.min.js"></script>
|
130 |
-
<script src="https://unpkg.com/aframe-screenshot-component@3.3.0/dist/aframe-screenshot-component.min.js"></script>
|
131 |
-
{js}
|
132 |
</head>
|
133 |
<body>
|
134 |
-
<a-scene
|
135 |
-
<a-entity position="0
|
136 |
-
|
|
|
137 |
</a-entity>
|
138 |
<a-light type="ambient" color="#FFF"></a-light>
|
139 |
<a-plane rotation="-90 0 0" width="40" height="40" color="#7BC8A4"></a-plane>
|
140 |
</a-scene>
|
|
|
141 |
</body>
|
142 |
</html>
|
143 |
"""
|
@@ -150,23 +149,12 @@ if code.strip():
|
|
150 |
st.subheader("π Interactive 3D Function Visualizer")
|
151 |
components.iframe(data_url, height=600)
|
152 |
|
153 |
-
#
|
154 |
st.subheader("π± AR View on Mobile")
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
)
|
161 |
-
|
162 |
-
space_url = "https://huggingface.co/spaces/your-space-name" # Replace with actual Hugging Face Space URL
|
163 |
-
qr.add_data(space_url)
|
164 |
-
qr.make(fit=True)
|
165 |
-
|
166 |
-
img = qr.make_image(fill='black', back_color='white')
|
167 |
-
img_byte_arr = BytesIO()
|
168 |
-
img.save(img_byte_arr, format='PNG')
|
169 |
-
img_bytes = img_byte_arr.getvalue()
|
170 |
-
st.image(img_bytes, caption="Scan this QR code to view the VR scene in AR on your mobile!")
|
171 |
else:
|
172 |
st.info("Write some Python code above to visualize, narrate, and explore it in 3D/AR.")
|
|
|
4 |
import streamlit.components.v1 as components
|
5 |
from transformers import pipeline
|
6 |
from gtts import gTTS
|
|
|
|
|
7 |
from io import BytesIO
|
8 |
+
import qrcode
|
9 |
|
10 |
# Page config
|
11 |
st.set_page_config(page_title="AR/VR Code Visualizer", layout="wide")
|
|
|
48 |
for fn, callees in call_graph.items():
|
49 |
st.write(f"πΉ `{fn}` calls: {', '.join(callees) if callees else 'None'}")
|
50 |
|
51 |
+
# AI Summary
|
52 |
prompt = f"Explain the structure and purpose of the following functions and how they call each other: {call_graph}"
|
53 |
summary = summarizer(prompt, max_length=60, min_length=15, do_sample=False)
|
54 |
summary_text = summary[0]['summary_text']
|
55 |
st.success(summary_text)
|
56 |
|
57 |
+
# Voice narration
|
58 |
st.subheader("π Voice Narration")
|
59 |
tts = gTTS(text=summary_text)
|
60 |
tts.save("summary.mp3")
|
61 |
st.audio("summary.mp3", format="audio/mp3")
|
62 |
|
63 |
+
# A-Frame scene generation
|
64 |
def generate_aframe(call_graph):
|
65 |
function_data = {
|
66 |
"functions": [],
|
|
|
81 |
x2, y2, z2 = function_positions[callee]
|
82 |
function_data["relationships"].append({"start": [x1, y1, z1], "end": [x2, y2, z2]})
|
83 |
|
84 |
+
js = f"""
|
85 |
<script>
|
86 |
+
const functionData = {str(function_data).replace("'", '"')};
|
87 |
+
|
88 |
+
AFRAME.registerComponent('dynamic-creator', {{
|
89 |
+
init: function () {{
|
90 |
+
functionData.functions.forEach(function(fn) {{
|
91 |
+
const box = document.createElement('a-box');
|
92 |
+
box.setAttribute('position', fn.position.join(' '));
|
93 |
+
box.setAttribute('depth', '0.5');
|
94 |
+
box.setAttribute('height', '0.5');
|
95 |
+
box.setAttribute('width', '2');
|
96 |
+
box.setAttribute('color', '#FFC65D');
|
97 |
+
box.setAttribute('class', 'clickable');
|
98 |
+
box.setAttribute('onclick', `say('${fn.name}')`);
|
99 |
+
this.el.appendChild(box);
|
100 |
+
|
101 |
+
const text = document.createElement('a-text');
|
102 |
+
text.setAttribute('value', fn.name);
|
103 |
+
text.setAttribute('position', `${fn.position[0]} ${fn.position[1] + 1} ${fn.position[2]}`);
|
104 |
+
text.setAttribute('align', 'center');
|
105 |
+
text.setAttribute('color', '#000');
|
106 |
+
this.el.appendChild(text);
|
107 |
+
}}.bind(this));
|
108 |
+
|
109 |
+
functionData.relationships.forEach(function(rel) {{
|
110 |
+
const line = document.createElement('a-entity');
|
111 |
+
line.setAttribute('line', `start: ${rel.start.join(' ')}; end: ${rel.end.join(' ')}; color: red`);
|
112 |
+
this.el.appendChild(line);
|
113 |
+
}}.bind(this));
|
114 |
+
}}
|
115 |
+
}});
|
116 |
+
|
117 |
+
function say(text) {{
|
118 |
const utter = new SpeechSynthesisUtterance(text);
|
119 |
speechSynthesis.speak(utter);
|
120 |
+
}}
|
121 |
</script>
|
122 |
"""
|
123 |
|
|
|
126 |
<html>
|
127 |
<head>
|
128 |
<script src="https://aframe.io/releases/1.3.0/aframe.min.js"></script>
|
|
|
|
|
|
|
129 |
</head>
|
130 |
<body>
|
131 |
+
<a-scene>
|
132 |
+
<a-entity dynamic-creator position="0 0 0"></a-entity>
|
133 |
+
<a-entity position="0 1.6 4">
|
134 |
+
<a-camera></a-camera>
|
135 |
</a-entity>
|
136 |
<a-light type="ambient" color="#FFF"></a-light>
|
137 |
<a-plane rotation="-90 0 0" width="40" height="40" color="#7BC8A4"></a-plane>
|
138 |
</a-scene>
|
139 |
+
{js}
|
140 |
</body>
|
141 |
</html>
|
142 |
"""
|
|
|
149 |
st.subheader("π Interactive 3D Function Visualizer")
|
150 |
components.iframe(data_url, height=600)
|
151 |
|
152 |
+
# QR code for Hugging Face Space
|
153 |
st.subheader("π± AR View on Mobile")
|
154 |
+
space_url = "https://huggingface.co/spaces/your-space-name" # Replace with actual URL
|
155 |
+
qr = qrcode.make(space_url)
|
156 |
+
buf = BytesIO()
|
157 |
+
qr.save(buf, format="PNG")
|
158 |
+
st.image(buf.getvalue(), caption="Scan this QR code to view the VR scene in AR on your mobile!")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
159 |
else:
|
160 |
st.info("Write some Python code above to visualize, narrate, and explore it in 3D/AR.")
|