xinjie.wang commited on
Commit
43c5d2f
Β·
1 Parent(s): c6daec8
app.py CHANGED
@@ -51,7 +51,7 @@ with gr.Blocks(delete_cache=(43200, 43200), theme=custom_theme) as demo:
51
  <a href="https://arxiv.org/abs/xxxx.xxxxx">
52
  <img alt="πŸ“„ arXiv" src="https://img.shields.io/badge/πŸ“„-arXiv-b31b1b">
53
  </a>
54
- <a href="https://github.com/horizon-research/EmbodiedGen">
55
  <img alt="πŸ’» GitHub" src="https://img.shields.io/badge/GitHub-000000?logo=github">
56
  </a>
57
  <a href="https://www.youtube.com/watch?v=SnHhzHeb_aI">
@@ -68,7 +68,7 @@ with gr.Blocks(delete_cache=(43200, 43200), theme=custom_theme) as demo:
68
  )
69
 
70
  gr.HTML(image_css)
71
- gr.HTML(lighting_css)
72
  with gr.Row():
73
  with gr.Column(scale=2):
74
  with gr.Tabs() as input_tabs:
 
51
  <a href="https://arxiv.org/abs/xxxx.xxxxx">
52
  <img alt="πŸ“„ arXiv" src="https://img.shields.io/badge/πŸ“„-arXiv-b31b1b">
53
  </a>
54
+ <a href="https://github.com/HorizonRobotics/EmbodiedGen">
55
  <img alt="πŸ’» GitHub" src="https://img.shields.io/badge/GitHub-000000?logo=github">
56
  </a>
57
  <a href="https://www.youtube.com/watch?v=SnHhzHeb_aI">
 
68
  )
69
 
70
  gr.HTML(image_css)
71
+ # gr.HTML(lighting_css)
72
  with gr.Row():
73
  with gr.Column(scale=2):
74
  with gr.Tabs() as input_tabs:
embodied_gen/data/backproject_v2.py CHANGED
@@ -606,7 +606,17 @@ def parse_args():
606
  "--delight", action="store_true", help="Use delighting model."
607
  )
608
  parser.add_argument(
609
- "--smooth_texture", type=bool, default=True, help="Smooth the texture."
 
 
 
 
 
 
 
 
 
 
610
  )
611
 
612
  args, unknown = parser.parse_known_args()
@@ -642,7 +652,8 @@ def entrypoint(
642
  save_dir = os.path.dirname(args.output_path)
643
  os.makedirs(save_dir, exist_ok=True)
644
  color_grid = delight_model(color_grid)
645
- color_grid.save(f"{save_dir}/color_grid_delight.png")
 
646
 
647
  multiviews = get_images_from_grid(color_grid, img_size=512)
648
 
@@ -675,11 +686,15 @@ def entrypoint(
675
  view_weights=view_weights,
676
  render_wh=camera_params.resolution_hw,
677
  texture_wh=args.texture_wh,
678
- smooth_texture=args.smooth_texture,
679
  )
680
 
681
  textured_mesh = texture_backer(multiviews, mesh, args.output_path)
682
 
 
 
 
 
683
  return textured_mesh
684
 
685
 
 
606
  "--delight", action="store_true", help="Use delighting model."
607
  )
608
  parser.add_argument(
609
+ "--no_smooth_texture",
610
+ action="store_true",
611
+ help="Do not smooth the texture.",
612
+ )
613
+ parser.add_argument(
614
+ "--save_glb_path", type=str, default=None, help="Save glb path."
615
+ )
616
+ parser.add_argument(
617
+ "--no_save_delight_img",
618
+ action="store_true",
619
+ help="Disable saving delight image",
620
  )
621
 
622
  args, unknown = parser.parse_known_args()
 
652
  save_dir = os.path.dirname(args.output_path)
653
  os.makedirs(save_dir, exist_ok=True)
654
  color_grid = delight_model(color_grid)
655
+ if not args.no_save_delight_img:
656
+ color_grid.save(f"{save_dir}/color_grid_delight.png")
657
 
658
  multiviews = get_images_from_grid(color_grid, img_size=512)
659
 
 
686
  view_weights=view_weights,
687
  render_wh=camera_params.resolution_hw,
688
  texture_wh=args.texture_wh,
689
+ smooth_texture=not args.no_smooth_texture,
690
  )
691
 
692
  textured_mesh = texture_backer(multiviews, mesh, args.output_path)
693
 
694
+ if args.save_glb_path is not None:
695
+ os.makedirs(os.path.dirname(args.save_glb_path), exist_ok=True)
696
+ textured_mesh.export(args.save_glb_path)
697
+
698
  return textured_mesh
699
 
700
 
embodied_gen/data/utils.py CHANGED
@@ -19,6 +19,7 @@ import math
19
  import os
20
  import random
21
  import zipfile
 
22
  from typing import List, Tuple, Union
23
 
24
  import cv2
@@ -66,6 +67,7 @@ __all__ = [
66
  "gamma_shs",
67
  "resize_pil",
68
  "trellis_preprocess",
 
69
  ]
70
 
71
 
@@ -994,3 +996,14 @@ def zip_files(input_paths: list[str], output_zip: str) -> str:
994
  zipf.write(input_path, arcname=arcname)
995
 
996
  return output_zip
 
 
 
 
 
 
 
 
 
 
 
 
19
  import os
20
  import random
21
  import zipfile
22
+ from shutil import rmtree
23
  from typing import List, Tuple, Union
24
 
25
  import cv2
 
67
  "gamma_shs",
68
  "resize_pil",
69
  "trellis_preprocess",
70
+ "delete_dir",
71
  ]
72
 
73
 
 
996
  zipf.write(input_path, arcname=arcname)
997
 
998
  return output_zip
999
+
1000
+
1001
+ def delete_dir(folder_path: str, keep_subs: list[str] = None) -> None:
1002
+ for item in os.listdir(folder_path):
1003
+ if keep_subs is not None and item in keep_subs:
1004
+ continue
1005
+ item_path = os.path.join(folder_path, item)
1006
+ if os.path.isdir(item_path):
1007
+ rmtree(item_path)
1008
+ else:
1009
+ os.remove(item_path)
embodied_gen/scripts/imageto3d.py CHANGED
@@ -20,12 +20,13 @@ import logging
20
  import os
21
  import sys
22
  from glob import glob
 
23
 
24
  import numpy as np
25
  import trimesh
26
  from PIL import Image
27
  from embodied_gen.data.backproject_v2 import entrypoint as backproject_api
28
- from embodied_gen.data.utils import trellis_preprocess
29
  from embodied_gen.models.delight_model import DelightingModel
30
  from embodied_gen.models.gs_model import GaussianOperator
31
  from embodied_gen.models.segment_model import (
@@ -97,9 +98,6 @@ def parse_args():
97
  required=True,
98
  help="Root directory for saving outputs.",
99
  )
100
- parser.add_argument(
101
- "--no_mesh", action="store_true", help="Do not output mesh files."
102
- )
103
  parser.add_argument(
104
  "--height_range",
105
  type=str,
@@ -116,6 +114,7 @@ def parse_args():
116
  parser.add_argument("--skip_exists", action="store_true")
117
  parser.add_argument("--strict_seg", action="store_true")
118
  parser.add_argument("--version", type=str, default=VERSION)
 
119
  args = parser.parse_args()
120
 
121
  assert (
@@ -136,7 +135,7 @@ if __name__ == "__main__":
136
  try:
137
  filename = os.path.basename(image_path).split(".")[0]
138
  output_root = args.output_root
139
- if args.image_root is not None:
140
  output_root = os.path.join(output_root, filename)
141
  os.makedirs(output_root, exist_ok=True)
142
 
@@ -189,117 +188,121 @@ if __name__ == "__main__":
189
  video_path = os.path.join(output_root, "gs_mesh.mp4")
190
  merge_images_video(color_images, normal_images, video_path)
191
 
192
- if not args.no_mesh:
193
- # Save the raw Gaussian model
194
- gs_path = mesh_out.replace(".obj", "_gs.ply")
195
- gs_model.save_ply(gs_path)
196
-
197
- # Rotate mesh and GS by 90 degrees around Z-axis.
198
- rot_matrix = [[0, 0, -1], [0, 1, 0], [1, 0, 0]]
199
- gs_add_rot = [[1, 0, 0], [0, -1, 0], [0, 0, -1]]
200
- mesh_add_rot = [[1, 0, 0], [0, 0, -1], [0, 1, 0]]
201
-
202
- # Addtional rotation for GS to align mesh.
203
- gs_rot = np.array(gs_add_rot) @ np.array(rot_matrix)
204
- pose = GaussianOperator.trans_to_quatpose(gs_rot)
205
- aligned_gs_path = gs_path.replace(".ply", "_aligned.ply")
206
- GaussianOperator.resave_ply(
207
- in_ply=gs_path,
208
- out_ply=aligned_gs_path,
209
- instance_pose=pose,
210
- device="cpu",
211
- )
212
- color_path = os.path.join(output_root, "color.png")
213
- render_gs_api(aligned_gs_path, color_path)
214
-
215
- mesh = trimesh.Trimesh(
216
- vertices=mesh_model.vertices.cpu().numpy(),
217
- faces=mesh_model.faces.cpu().numpy(),
218
- )
219
- mesh.vertices = mesh.vertices @ np.array(mesh_add_rot)
220
- mesh.vertices = mesh.vertices @ np.array(rot_matrix)
221
-
222
- mesh_obj_path = os.path.join(output_root, f"{filename}.obj")
223
- mesh.export(mesh_obj_path)
224
-
225
- mesh = backproject_api(
226
- delight_model=DELIGHT,
227
- imagesr_model=IMAGESR_MODEL,
228
- color_path=color_path,
229
- mesh_path=mesh_obj_path,
230
- output_path=mesh_obj_path,
231
- skip_fix_mesh=False,
232
- delight=True,
233
- texture_wh=[2048, 2048],
 
 
 
 
 
 
 
 
 
 
 
 
234
  )
235
-
236
- mesh_glb_path = os.path.join(output_root, f"{filename}.glb")
237
- mesh.export(mesh_glb_path)
238
-
239
- urdf_convertor = URDFGenerator(GPT_CLIENT, render_view_num=4)
240
- asset_attrs = {
241
- "version": VERSION,
242
- "gs_model": f"{urdf_convertor.output_mesh_dir}/{filename}_gs.ply",
243
- }
244
- if args.height_range:
245
- min_height, max_height = map(
246
- float, args.height_range.split("-")
247
- )
248
- asset_attrs["min_height"] = min_height
249
- asset_attrs["max_height"] = max_height
250
- if args.mass_range:
251
- min_mass, max_mass = map(float, args.mass_range.split("-"))
252
- asset_attrs["min_mass"] = min_mass
253
- asset_attrs["max_mass"] = max_mass
254
- if args.asset_type:
255
- asset_attrs["category"] = args.asset_type
256
- if args.version:
257
- asset_attrs["version"] = args.version
258
-
259
- urdf_path = urdf_convertor(
260
- mesh_path=mesh_obj_path,
261
- output_root=f"{output_root}/URDF_{filename}",
262
- **asset_attrs,
263
- )
264
-
265
- # Rescale GS and save to URDF/mesh folder.
266
- real_height = urdf_convertor.get_attr_from_urdf(
267
- urdf_path, attr_name="real_height"
268
- )
269
- out_gs = f"{output_root}/URDF_{filename}/{urdf_convertor.output_mesh_dir}/{filename}_gs.ply" # noqa
270
- GaussianOperator.resave_ply(
271
- in_ply=aligned_gs_path,
272
- out_ply=out_gs,
273
- real_height=real_height,
274
- device="cpu",
275
- )
276
-
277
- # Quality check and update .urdf file.
278
- mesh_out = f"{output_root}/URDF_{filename}/{urdf_convertor.output_mesh_dir}/{filename}.obj" # noqa
279
- trimesh.load(mesh_out).export(mesh_out.replace(".obj", ".glb"))
280
- # image_paths = render_asset3d(
281
- # mesh_path=mesh_out,
282
- # output_root=f"{output_root}/URDF_{filename}",
283
- # output_subdir="qa_renders",
284
- # num_images=8,
285
- # elevation=(30, -30),
286
- # distance=5.5,
287
- # )
288
-
289
- image_dir = f"{output_root}/URDF_{filename}/{urdf_convertor.output_render_dir}/image_color" # noqa
290
- image_paths = glob(f"{image_dir}/*.png")
291
- images_list = []
292
- for checker in CHECKERS:
293
- images = image_paths
294
- if isinstance(checker, ImageSegChecker):
295
- images = [
296
- f"{output_root}/{filename}_raw.png",
297
- f"{output_root}/{filename}_cond.png",
298
- ]
299
- images_list.append(images)
300
-
301
- results = BaseChecker.validate(CHECKERS, images_list)
302
- urdf_convertor.add_quality_tag(urdf_path, results)
303
 
304
  except Exception as e:
305
  logger.error(f"Failed to process {image_path}: {e}, skip.")
 
20
  import os
21
  import sys
22
  from glob import glob
23
+ from shutil import copy, copytree
24
 
25
  import numpy as np
26
  import trimesh
27
  from PIL import Image
28
  from embodied_gen.data.backproject_v2 import entrypoint as backproject_api
29
+ from embodied_gen.data.utils import delete_dir, trellis_preprocess
30
  from embodied_gen.models.delight_model import DelightingModel
31
  from embodied_gen.models.gs_model import GaussianOperator
32
  from embodied_gen.models.segment_model import (
 
98
  required=True,
99
  help="Root directory for saving outputs.",
100
  )
 
 
 
101
  parser.add_argument(
102
  "--height_range",
103
  type=str,
 
114
  parser.add_argument("--skip_exists", action="store_true")
115
  parser.add_argument("--strict_seg", action="store_true")
116
  parser.add_argument("--version", type=str, default=VERSION)
117
+ parser.add_argument("--remove_intermediate", type=bool, default=True)
118
  args = parser.parse_args()
119
 
120
  assert (
 
135
  try:
136
  filename = os.path.basename(image_path).split(".")[0]
137
  output_root = args.output_root
138
+ if args.image_root is not None or len(args.image_path) > 1:
139
  output_root = os.path.join(output_root, filename)
140
  os.makedirs(output_root, exist_ok=True)
141
 
 
188
  video_path = os.path.join(output_root, "gs_mesh.mp4")
189
  merge_images_video(color_images, normal_images, video_path)
190
 
191
+ # Save the raw Gaussian model
192
+ gs_path = mesh_out.replace(".obj", "_gs.ply")
193
+ gs_model.save_ply(gs_path)
194
+
195
+ # Rotate mesh and GS by 90 degrees around Z-axis.
196
+ rot_matrix = [[0, 0, -1], [0, 1, 0], [1, 0, 0]]
197
+ gs_add_rot = [[1, 0, 0], [0, -1, 0], [0, 0, -1]]
198
+ mesh_add_rot = [[1, 0, 0], [0, 0, -1], [0, 1, 0]]
199
+
200
+ # Addtional rotation for GS to align mesh.
201
+ gs_rot = np.array(gs_add_rot) @ np.array(rot_matrix)
202
+ pose = GaussianOperator.trans_to_quatpose(gs_rot)
203
+ aligned_gs_path = gs_path.replace(".ply", "_aligned.ply")
204
+ GaussianOperator.resave_ply(
205
+ in_ply=gs_path,
206
+ out_ply=aligned_gs_path,
207
+ instance_pose=pose,
208
+ device="cpu",
209
+ )
210
+ color_path = os.path.join(output_root, "color.png")
211
+ render_gs_api(aligned_gs_path, color_path)
212
+
213
+ mesh = trimesh.Trimesh(
214
+ vertices=mesh_model.vertices.cpu().numpy(),
215
+ faces=mesh_model.faces.cpu().numpy(),
216
+ )
217
+ mesh.vertices = mesh.vertices @ np.array(mesh_add_rot)
218
+ mesh.vertices = mesh.vertices @ np.array(rot_matrix)
219
+
220
+ mesh_obj_path = os.path.join(output_root, f"{filename}.obj")
221
+ mesh.export(mesh_obj_path)
222
+
223
+ mesh = backproject_api(
224
+ delight_model=DELIGHT,
225
+ imagesr_model=IMAGESR_MODEL,
226
+ color_path=color_path,
227
+ mesh_path=mesh_obj_path,
228
+ output_path=mesh_obj_path,
229
+ skip_fix_mesh=False,
230
+ delight=True,
231
+ texture_wh=[2048, 2048],
232
+ )
233
+
234
+ mesh_glb_path = os.path.join(output_root, f"{filename}.glb")
235
+ mesh.export(mesh_glb_path)
236
+
237
+ urdf_convertor = URDFGenerator(GPT_CLIENT, render_view_num=4)
238
+ asset_attrs = {
239
+ "version": VERSION,
240
+ "gs_model": f"{urdf_convertor.output_mesh_dir}/{filename}_gs.ply",
241
+ }
242
+ if args.height_range:
243
+ min_height, max_height = map(
244
+ float, args.height_range.split("-")
245
  )
246
+ asset_attrs["min_height"] = min_height
247
+ asset_attrs["max_height"] = max_height
248
+ if args.mass_range:
249
+ min_mass, max_mass = map(float, args.mass_range.split("-"))
250
+ asset_attrs["min_mass"] = min_mass
251
+ asset_attrs["max_mass"] = max_mass
252
+ if args.asset_type:
253
+ asset_attrs["category"] = args.asset_type
254
+ if args.version:
255
+ asset_attrs["version"] = args.version
256
+
257
+ urdf_root = f"{output_root}/URDF_{filename}"
258
+ urdf_path = urdf_convertor(
259
+ mesh_path=mesh_obj_path,
260
+ output_root=urdf_root,
261
+ **asset_attrs,
262
+ )
263
+
264
+ # Rescale GS and save to URDF/mesh folder.
265
+ real_height = urdf_convertor.get_attr_from_urdf(
266
+ urdf_path, attr_name="real_height"
267
+ )
268
+ out_gs = f"{urdf_root}/{urdf_convertor.output_mesh_dir}/{filename}_gs.ply" # noqa
269
+ GaussianOperator.resave_ply(
270
+ in_ply=aligned_gs_path,
271
+ out_ply=out_gs,
272
+ real_height=real_height,
273
+ device="cpu",
274
+ )
275
+
276
+ # Quality check and update .urdf file.
277
+ mesh_out = f"{urdf_root}/{urdf_convertor.output_mesh_dir}/{filename}.obj" # noqa
278
+ trimesh.load(mesh_out).export(mesh_out.replace(".obj", ".glb"))
279
+
280
+ image_dir = f"{urdf_root}/{urdf_convertor.output_render_dir}/image_color" # noqa
281
+ image_paths = glob(f"{image_dir}/*.png")
282
+ images_list = []
283
+ for checker in CHECKERS:
284
+ images = image_paths
285
+ if isinstance(checker, ImageSegChecker):
286
+ images = [
287
+ f"{output_root}/{filename}_raw.png",
288
+ f"{output_root}/{filename}_cond.png",
289
+ ]
290
+ images_list.append(images)
291
+
292
+ results = BaseChecker.validate(CHECKERS, images_list)
293
+ urdf_convertor.add_quality_tag(urdf_path, results)
294
+
295
+ # Organize the final result files
296
+ result_dir = f"{output_root}/result"
297
+ os.makedirs(result_dir, exist_ok=True)
298
+ copy(urdf_path, f"{result_dir}/{os.path.basename(urdf_path)}")
299
+ copytree(
300
+ f"{urdf_root}/{urdf_convertor.output_mesh_dir}",
301
+ f"{result_dir}/{urdf_convertor.output_mesh_dir}",
302
+ )
303
+ copy(video_path, f"{result_dir}/video.mp4")
304
+ if args.remove_intermediate:
305
+ delete_dir(output_root, keep_subs=["result"])
 
 
 
 
 
 
 
 
306
 
307
  except Exception as e:
308
  logger.error(f"Failed to process {image_path}: {e}, skip.")
embodied_gen/scripts/texture_gen.sh CHANGED
@@ -56,8 +56,10 @@ python embodied_gen/scripts/render_mv.py \
56
  backproject-cli --mesh_path ${mesh_path} \
57
  --color_path ${output_root}/multi_view/color_sample0.png \
58
  --output_path "${output_root}/texture_mesh/${uuid}.obj" \
 
59
  --skip_fix_mesh \
60
- --delight
 
61
 
62
  # Step 4: final rendering of textured mesh
63
  drender-cli --mesh_path "${output_root}/texture_mesh/${uuid}.obj" \
@@ -67,3 +69,12 @@ drender-cli --mesh_path "${output_root}/texture_mesh/${uuid}.obj" \
67
  --with_mtl \
68
  --gen_color_mp4 \
69
  --pbr_light_factor 1.2
 
 
 
 
 
 
 
 
 
 
56
  backproject-cli --mesh_path ${mesh_path} \
57
  --color_path ${output_root}/multi_view/color_sample0.png \
58
  --output_path "${output_root}/texture_mesh/${uuid}.obj" \
59
+ --save_glb_path "${output_root}/texture_mesh/${uuid}.glb" \
60
  --skip_fix_mesh \
61
+ --delight \
62
+ --no_save_delight_img
63
 
64
  # Step 4: final rendering of textured mesh
65
  drender-cli --mesh_path "${output_root}/texture_mesh/${uuid}.obj" \
 
69
  --with_mtl \
70
  --gen_color_mp4 \
71
  --pbr_light_factor 1.2
72
+
73
+ # Organize folders
74
+ rm -rf ${output_root}/condition
75
+ video_path="${output_root}/texture_mesh/${uuid}/color.mp4"
76
+ if [ -f "${video_path}" ]; then
77
+ cp "${video_path}" "${output_root}/texture_mesh/color.mp4"
78
+ echo "Resave video to ${output_root}/texture_mesh/color.mp4"
79
+ fi
80
+ rm -rf ${output_root}/texture_mesh/${uuid}