notso-glb 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,647 @@
1
+ "GLB/glTF import and export functions."
2
+
3
+ import os
4
+ from dataclasses import dataclass
5
+ from pathlib import Path
6
+ from typing import Any, cast
7
+
8
+ import bpy
9
+
10
+ from notso_glb.analyzers import (
11
+ analyze_bone_animation,
12
+ analyze_duplicate_names,
13
+ analyze_mesh_bloat,
14
+ analyze_skinned_mesh_parents,
15
+ analyze_unused_uv_maps,
16
+ )
17
+ from notso_glb.cleaners import (
18
+ auto_fix_bloat,
19
+ auto_fix_duplicate_names,
20
+ clean_vertex_groups,
21
+ delete_bone_shape_objects,
22
+ mark_static_bones_non_deform,
23
+ remove_unused_uv_maps,
24
+ resize_textures,
25
+ )
26
+ from notso_glb.utils import get_scene_stats
27
+ from notso_glb.utils.logging import (
28
+ StepTimer,
29
+ bold,
30
+ bright_cyan,
31
+ bright_green,
32
+ bright_red,
33
+ bright_yellow,
34
+ cyan,
35
+ dim,
36
+ format_bytes,
37
+ format_duration,
38
+ green,
39
+ log_detail,
40
+ log_error,
41
+ log_info,
42
+ log_ok,
43
+ log_warn,
44
+ magenta,
45
+ print_header,
46
+ timed,
47
+ yellow,
48
+ )
49
+
50
+
51
+ @dataclass
52
+ class ExportConfig:
53
+ """Configuration for GLB export and optimization."""
54
+
55
+ output_path: Path | None = None
56
+ export_format: str = "GLB"
57
+ use_draco: bool = True
58
+ use_webp: bool = True
59
+ max_texture_size: int = 1024
60
+ force_pot_textures: bool = False
61
+ analyze_animations: bool = True
62
+ check_bloat: bool = True
63
+ experimental_autofix: bool = False
64
+ quiet: bool = False
65
+
66
+
67
+ def import_gltf(filepath: str, quiet: bool = False) -> None:
68
+ """Import GLB/glTF file into Blender scene (standalone, no step timing)."""
69
+ _do_import_gltf(filepath, quiet=quiet, step=None)
70
+
71
+
72
+ def _do_import_gltf(
73
+ filepath: str, quiet: bool = False, step: StepTimer | None = None
74
+ ) -> None:
75
+ """Import GLB/glTF file into Blender scene.
76
+
77
+ Args:
78
+ filepath: Path to GLB/glTF file
79
+ quiet: Suppress verbose Blender output
80
+ step: Optional StepTimer for progress tracking
81
+ """
82
+ from notso_glb.utils.logging import filter_blender_output
83
+
84
+ with timed("Clearing scene", print_on_exit=False):
85
+ bpy.ops.object.select_all(action="SELECT")
86
+ bpy.ops.object.delete()
87
+
88
+ ext = os.path.splitext(filepath)[1].lower()
89
+ if ext not in (".glb", ".gltf"):
90
+ raise ValueError(f"Unsupported format: {ext}")
91
+
92
+ # Log level: 0=all, 10=DEBUG, 20=INFO, 30=WARNING, 40=ERROR, 50=CRITICAL
93
+ log_level = 30 if quiet else 0 # WARNING level when quiet
94
+
95
+ if step:
96
+ step.step("Importing into Blender...")
97
+ log_detail(dim(os.path.basename(filepath)))
98
+ else:
99
+ log_info(f"Importing {cyan(os.path.basename(filepath))}...")
100
+
101
+ if quiet:
102
+ with filter_blender_output():
103
+ with timed("glTF import", print_on_exit=False) as t:
104
+ bpy.ops.import_scene.gltf(filepath=filepath, loglevel=log_level)
105
+ else:
106
+ with timed("glTF import") as t:
107
+ bpy.ops.import_scene.gltf(filepath=filepath, loglevel=log_level)
108
+
109
+ msg = f"Imported in {bright_cyan(format_duration(t.elapsed))}"
110
+ if step:
111
+ log_detail(msg)
112
+ else:
113
+ log_ok(msg)
114
+
115
+
116
+ def _analyze_bloat(step: StepTimer, config: ExportConfig) -> None:
117
+ """Analyze and optionally fix mesh bloat."""
118
+ if not config.check_bloat:
119
+ step.step("Skipping bloat check")
120
+ log_detail(dim("--skip-bloat-check flag set"))
121
+ return
122
+
123
+ step.step("Analyzing mesh complexity...")
124
+
125
+ with timed("Bloat analysis", print_on_exit=False) as t:
126
+ bloat_warnings = analyze_mesh_bloat()
127
+
128
+ if not bloat_warnings:
129
+ log_detail(
130
+ f"{green('No bloat issues detected')} {dim(f'({format_duration(t.elapsed)})')}"
131
+ )
132
+ return
133
+
134
+ critical_count = sum(1 for w in bloat_warnings if w["severity"] == "CRITICAL")
135
+ warning_count = sum(1 for w in bloat_warnings if w["severity"] == "WARNING")
136
+
137
+ # Print warning box
138
+ border = "!" * 60
139
+ if critical_count > 0:
140
+ print(f"\n{bright_red(border)}")
141
+ print(
142
+ f" {bright_red('BLOAT WARNINGS')}: {bright_red(str(critical_count))} critical, {yellow(str(warning_count))} warnings"
143
+ )
144
+ else:
145
+ print(f"\n{bright_yellow(border)}")
146
+ print(
147
+ f" {bright_yellow('BLOAT WARNINGS')}: {yellow(str(warning_count))} warnings"
148
+ )
149
+ print(f"{bright_yellow(border) if critical_count == 0 else bright_red(border)}")
150
+
151
+ for w in bloat_warnings:
152
+ if w["severity"] == "CRITICAL":
153
+ icon = bright_red("!!!")
154
+ obj_str = bright_red(f"[{w['issue']}]") + f" {w['object']}"
155
+ else:
156
+ icon = yellow(" ! ")
157
+ obj_str = yellow(f"[{w['issue']}]") + f" {w['object']}"
158
+ print(f" {icon} {obj_str}")
159
+ print(f" {dim(str(w['detail']))}")
160
+ print(f" {dim('->')} {w['suggestion']}")
161
+
162
+ print(f"{bright_yellow(border) if critical_count == 0 else bright_red(border)}")
163
+
164
+ if config.experimental_autofix:
165
+ _run_bloat_autofix(bloat_warnings)
166
+ else:
167
+ print(
168
+ f" {dim('Fix these in Blender before export for optimal web delivery.')}"
169
+ )
170
+ print(f" {dim('Or use')} {cyan('--autofix')} {dim('to auto-decimate props.')}")
171
+
172
+ print(f"{bright_yellow(border) if critical_count == 0 else bright_red(border)}\n")
173
+
174
+
175
+ def _run_bloat_autofix(bloat_warnings: list[dict]) -> None:
176
+ """Run experimental autofix for bloat warnings."""
177
+ print(f"\n {magenta('[EXPERIMENTAL]')} Running auto-fix pipeline...")
178
+
179
+ with timed("Auto-fix pipeline", print_on_exit=False) as t:
180
+ results = auto_fix_bloat(bloat_warnings)
181
+
182
+ if results["cleanup"]:
183
+ print(f"\n {cyan('Phase 1: BMesh Cleanup')}")
184
+ total_saved = 0
185
+ for c in results["cleanup"]:
186
+ details = []
187
+ if c["doubles"]:
188
+ details.append(f"{c['doubles']} doubles")
189
+ if c["degenerate"]:
190
+ details.append(f"{c['degenerate']} degenerate")
191
+ if c["loose"]:
192
+ details.append(f"{c['loose']} loose")
193
+ verts_saved = c["verts_saved"]
194
+ saved_int = int(verts_saved) if isinstance(verts_saved, (int, float)) else 0
195
+ total_saved += saved_int
196
+ print(
197
+ f" {c['object']}: {', '.join(details)} ({bright_green(f'-{saved_int}')} verts)"
198
+ )
199
+ print(
200
+ f" {bold('Cleanup total')}: {bright_green(f'{total_saved:,}')} verts removed"
201
+ )
202
+ else:
203
+ print(f"\n {cyan('Phase 1')}: {dim('No cleanup needed')}")
204
+
205
+ if results["decimation"]:
206
+ print(f"\n {cyan('Phase 2: Decimation')}")
207
+ for fix in results["decimation"]:
208
+ reduction_str = bright_green(f"-{fix['reduction']:.0f}%")
209
+ print(
210
+ f" {fix['object']}: "
211
+ f"{fix['original']:,} -> {fix['new']:,} verts "
212
+ f"({reduction_str})"
213
+ )
214
+ print(f" {bold('Decimated')} {len(results['decimation'])} mesh(es)")
215
+ else:
216
+ print(f"\n {cyan('Phase 2')}: {dim('No decimation needed')}")
217
+
218
+ print(f" {dim(f'Auto-fix completed in {format_duration(t.elapsed)}')}")
219
+
220
+
221
+ def _check_duplicates(step: StepTimer, config: ExportConfig) -> None:
222
+ """Check for and optionally fix duplicate names."""
223
+ step.step("Checking for duplicate names...")
224
+
225
+ with timed("Duplicate name check", print_on_exit=False) as t:
226
+ duplicates = analyze_duplicate_names()
227
+
228
+ if not duplicates:
229
+ log_detail(
230
+ f"{green('No duplicate names found')} {dim(f'({format_duration(t.elapsed)})')}"
231
+ )
232
+ return
233
+
234
+ border = "#" * 60
235
+ print(f"\n{yellow(border)}")
236
+ print(f" {yellow('DUPLICATE NAME WARNINGS')}: {len(duplicates)} found")
237
+ print(f"{yellow(border)}")
238
+
239
+ for dup in duplicates:
240
+ issue = dup.get("issue", "DUPLICATE")
241
+ if issue == "SANITIZATION_COLLISION":
242
+ print(f" [{dup['type']}] {bright_yellow('COLLISION')}: {dup['name']}")
243
+ else:
244
+ print(f" [{dup['type']}] '{dup['name']}' x{dup['count']}")
245
+
246
+ if config.experimental_autofix:
247
+ print(f"\n {magenta('[EXPERIMENTAL]')} Auto-fixing duplicate names...")
248
+ with timed("Rename duplicates", print_on_exit=False) as t2:
249
+ renames = auto_fix_duplicate_names(duplicates)
250
+ for r in renames:
251
+ print(f" [{r['type']}] {r['old']} -> {green(r['new'])}")
252
+ if renames:
253
+ print(
254
+ f" {bold('Renamed')} {len(renames)} item(s) {dim(f'({format_duration(t2.elapsed)})')}"
255
+ )
256
+ else:
257
+ log_detail(dim("No renames needed"))
258
+ else:
259
+ print(
260
+ f"\n {dim('These will cause JS identifier collisions in generated components.')}"
261
+ )
262
+ print(
263
+ f" {dim('Fix in Blender or use')} {cyan('--autofix')} {dim('to auto-rename.')}"
264
+ )
265
+
266
+ print(f"{yellow(border)}\n")
267
+
268
+
269
+ def _check_skinned_meshes(step: StepTimer) -> None:
270
+ """Check skinned mesh hierarchy."""
271
+ step.step("Checking skinned mesh hierarchy...")
272
+
273
+ with timed("Skinned mesh check", print_on_exit=False) as t:
274
+ skinned_warnings = analyze_skinned_mesh_parents()
275
+
276
+ if not skinned_warnings:
277
+ log_detail(
278
+ f"{green('All skinned meshes OK')} {dim(f'({format_duration(t.elapsed)})')}"
279
+ )
280
+ return
281
+
282
+ critical = [w for w in skinned_warnings if w["severity"] == "CRITICAL"]
283
+ info_only = [w for w in skinned_warnings if w["severity"] != "CRITICAL"]
284
+
285
+ border = "~" * 60
286
+ print(f"\n{cyan(border)}")
287
+ print(
288
+ f" {cyan('SKINNED MESH WARNINGS')}: "
289
+ f"{bright_red(str(len(critical)))} critical, "
290
+ f"{dim(str(len(info_only)))} info"
291
+ )
292
+ print(f"{cyan(border)}")
293
+ print(f" {dim('(Parent transforms do not affect skinned meshes in glTF)')}")
294
+
295
+ # Group by parent
296
+ by_parent: dict[str, list[dict]] = {}
297
+ for w in skinned_warnings:
298
+ parent = str(w["parent"])
299
+ if parent not in by_parent:
300
+ by_parent[parent] = []
301
+ by_parent[parent].append(w)
302
+
303
+ # Display grouped by parent in columns
304
+ col_width = 20
305
+ num_cols = 3
306
+
307
+ for parent, items in sorted(by_parent.items()):
308
+ # Check if any in this group are critical
309
+ has_critical = any(w["severity"] == "CRITICAL" for w in items)
310
+ parent_color = bright_red if has_critical else dim
311
+ print(f"\n parent: {parent_color(parent)} ({len(items)})")
312
+
313
+ # Sort items by mesh name
314
+ sorted_items = sorted(items, key=lambda w: str(w["mesh"]))
315
+
316
+ # Build display items with icons
317
+ display_items: list[str] = []
318
+ for w in sorted_items:
319
+ mesh_name = str(w["mesh"])
320
+ if w["severity"] == "CRITICAL":
321
+ icon = bright_red("!!!")
322
+ name = bright_red(mesh_name[: col_width - 5])
323
+ elif w["has_transform"]:
324
+ icon = yellow(" ! ")
325
+ name = yellow(mesh_name[: col_width - 5])
326
+ else:
327
+ icon = dim(" i ")
328
+ name = mesh_name[: col_width - 5]
329
+ display_items.append(f"{icon} {name:<{col_width - 5}}")
330
+
331
+ # Print in columns
332
+ for i in range(0, len(display_items), num_cols):
333
+ row = display_items[i : i + num_cols]
334
+ print(f" {''.join(row)}")
335
+
336
+ print(f"{cyan(border)}")
337
+ print(f" {dim('To fix: Apply parent transforms or reparent to scene root')}")
338
+ print("")
339
+
340
+
341
+ def _check_uv_maps(step: StepTimer, config: ExportConfig) -> None:
342
+ """Check for and optionally remove unused UV maps."""
343
+ step.step("Checking for unused UV maps...")
344
+
345
+ with timed("UV map check", print_on_exit=False) as t:
346
+ unused_uv_warnings = analyze_unused_uv_maps()
347
+
348
+ if not unused_uv_warnings:
349
+ log_detail(
350
+ f"{green('No unused UV maps found')} {dim(f'({format_duration(t.elapsed)})')}"
351
+ )
352
+ return
353
+
354
+ total_unused = sum(
355
+ len(cast(list[str], w["unused_uvs"])) for w in unused_uv_warnings
356
+ )
357
+
358
+ border = "-" * 60
359
+ print(f"\n{dim(border)}")
360
+ print(
361
+ f" {yellow('UNUSED UV MAPS')}: {total_unused} found in {len(unused_uv_warnings)} mesh(es)"
362
+ )
363
+ print(f"{dim(border)}")
364
+
365
+ for w in unused_uv_warnings:
366
+ unused_uvs = cast(list[str], w["unused_uvs"])
367
+ total_uvs = cast(int, w["total_uvs"])
368
+ print(
369
+ f" {w['mesh']}: {yellow(str(unused_uvs))} {dim(f'(keeping {total_uvs - len(unused_uvs)})')}"
370
+ )
371
+
372
+ if config.experimental_autofix:
373
+ print(f"\n {magenta('[EXPERIMENTAL]')} Removing unused UV maps...")
374
+ with timed("Remove UVs", print_on_exit=False) as t2:
375
+ removed = remove_unused_uv_maps(unused_uv_warnings)
376
+ print(
377
+ f" {bold('Removed')} {bright_green(str(removed))} unused UV map(s) {dim(f'({format_duration(t2.elapsed)})')}"
378
+ )
379
+ else:
380
+ print(f"\n {dim('These cause UNUSED_OBJECT warnings in glTF validation.')}")
381
+ print(f" {dim('Use')} {cyan('--autofix')} {dim('to auto-remove them.')}")
382
+
383
+ print(f"{dim(border)}\n")
384
+
385
+
386
+ def _clean_and_optimize(step: StepTimer, config: ExportConfig) -> None:
387
+ """Run cleaning and optimization steps."""
388
+ # Clean bone shapes
389
+ step.step("Cleaning bone shape objects...")
390
+ with timed("Delete bone shapes", print_on_exit=False) as t:
391
+ deleted = delete_bone_shape_objects()
392
+ log_detail(
393
+ f"Deleted {bright_cyan(str(deleted))} objects {dim(f'({format_duration(t.elapsed)})')}"
394
+ )
395
+
396
+ # Clean vertex groups
397
+ step.step("Cleaning unused vertex groups...")
398
+ with timed("Clean vertex groups", print_on_exit=False) as t:
399
+ removed_vg = clean_vertex_groups()
400
+ log_detail(
401
+ f"Removed {bright_cyan(f'{removed_vg:,}')} empty vertex groups {dim(f'({format_duration(t.elapsed)})')}"
402
+ )
403
+
404
+ # Analyze and mark static bones
405
+ if config.analyze_animations:
406
+ step.step("Analyzing bone animations...")
407
+ with timed("Bone animation analysis") as t:
408
+ static_bones = analyze_bone_animation()
409
+ with timed("Mark non-deform bones", print_on_exit=False) as t2:
410
+ marked, skipped = mark_static_bones_non_deform(static_bones)
411
+ log_detail(f"Found {cyan(str(len(static_bones)))} static bones")
412
+ log_detail(
413
+ f"Marked {bright_green(str(marked))} as non-deform, kept {yellow(str(skipped))} for skinning {dim(f'({format_duration(t2.elapsed)})')}"
414
+ )
415
+ else:
416
+ step.step("Skipping animation analysis")
417
+ log_detail(dim("--skip-animation-analysis flag set"))
418
+
419
+ # Resize textures
420
+ if config.max_texture_size > 0 or config.force_pot_textures:
421
+ pot_msg = f" {magenta('(forcing POT)')}" if config.force_pot_textures else ""
422
+ step.step(f"Resizing textures > {config.max_texture_size}px{pot_msg}")
423
+ with timed("Texture resize", print_on_exit=False) as t:
424
+ resized = resize_textures(
425
+ config.max_texture_size, force_pot=config.force_pot_textures
426
+ )
427
+ if resized > 0:
428
+ log_detail(
429
+ f"Resized {bright_cyan(str(resized))} textures {dim(f'({format_duration(t.elapsed)})')}"
430
+ )
431
+ else:
432
+ log_detail(
433
+ f"{green('No textures needed resizing')} {dim(f'({format_duration(t.elapsed)})')}"
434
+ )
435
+ else:
436
+ step.step("Skipping texture resize")
437
+ log_detail(dim("max_texture_size=0"))
438
+
439
+
440
+ def _do_export(output_path: str, config: ExportConfig, use_draco: bool) -> None:
441
+ """Execute the actual glTF export call."""
442
+ export_params: dict[str, Any] = {
443
+ "filepath": output_path,
444
+ "export_format": config.export_format,
445
+ # Bones: deform only
446
+ "export_def_bones": True,
447
+ "export_hierarchy_flatten_bones": False,
448
+ "export_leaf_bone": False,
449
+ # Animation optimization
450
+ "export_animations": True,
451
+ "export_nla_strips": True,
452
+ "export_optimize_animation_size": True,
453
+ "export_optimize_animation_keep_anim_armature": True,
454
+ "export_force_sampling": True,
455
+ "export_frame_step": 1,
456
+ "export_skins": True,
457
+ # Mesh compression (Draco)
458
+ "export_draco_mesh_compression_enable": use_draco,
459
+ "export_draco_mesh_compression_level": 6,
460
+ "export_draco_position_quantization": 14,
461
+ "export_draco_normal_quantization": 10,
462
+ "export_draco_texcoord_quantization": 12,
463
+ # Textures
464
+ "export_image_format": "WEBP" if config.use_webp else "AUTO",
465
+ # Standard settings
466
+ "export_yup": True,
467
+ "export_texcoords": True,
468
+ "export_normals": True,
469
+ "export_materials": "EXPORT",
470
+ "export_shared_accessors": True,
471
+ }
472
+
473
+ # Blender 5.0+ requires export_loglevel for proper logging initialization
474
+ # The addon only sets internal 'loglevel' when export_loglevel < 0
475
+ if bpy.app.version >= (5, 0, 0):
476
+ export_params["export_loglevel"] = -1
477
+
478
+ bpy.ops.export_scene.gltf(**export_params) # pyright: ignore[reportCallIssue]
479
+
480
+
481
+ def _try_export(output_path: str, config: ExportConfig, use_draco: bool) -> str | None:
482
+ """Single export attempt. Returns path on success, None on failure."""
483
+ from notso_glb.utils.logging import filter_blender_output, log_error
484
+
485
+ try:
486
+ if config.quiet:
487
+ with filter_blender_output():
488
+ _do_export(output_path, config, use_draco)
489
+ else:
490
+ _do_export(output_path, config, use_draco)
491
+ return output_path
492
+ except Exception as e:
493
+ log_error(f"Export exception: {e}")
494
+ return None
495
+
496
+
497
+ def _export_file(step: StepTimer, config: ExportConfig) -> str | None:
498
+ """Export with automatic Draco fallback on encoder crash."""
499
+ output_path_str: str
500
+ if config.output_path is None:
501
+ blend_path = bpy.data.filepath
502
+ if blend_path:
503
+ base = os.path.splitext(blend_path)[0]
504
+ output_path_str = f"{base}_optimized.glb"
505
+ else:
506
+ output_path_str = os.path.join(os.getcwd(), "optimized_export.glb")
507
+ else:
508
+ output_path_str = str(config.output_path)
509
+
510
+ output_path_str = bpy.path.abspath(output_path_str)
511
+ ext = "GLB" if config.export_format == "GLB" else "glTF"
512
+ step.step(f"Exporting {ext}...")
513
+ log_detail(dim(output_path_str))
514
+
515
+ draco_str = bright_green("ON") if config.use_draco else dim("OFF")
516
+ webp_str = bright_green("ON") if config.use_webp else dim("OFF")
517
+ log_detail(
518
+ f"Format: {cyan(config.export_format)}, Draco: {draco_str}, WebP: {webp_str}"
519
+ )
520
+
521
+ if config.use_draco:
522
+ log_detail(f"{dim('Attempting export with Draco compression...')}")
523
+ with timed("glTF export (Draco)", print_on_exit=False) as t:
524
+ result = _try_export(output_path_str, config, use_draco=True)
525
+ if result:
526
+ log_detail(
527
+ f"{bright_green('Export successful')} {dim(f'({format_duration(t.elapsed)})')}"
528
+ )
529
+ return result
530
+
531
+ # Draco crashed - fallback
532
+ log_warn("Draco encoder crashed, retrying without compression...")
533
+ with timed("glTF export (no Draco)", print_on_exit=False) as t:
534
+ result = _try_export(output_path_str, config, use_draco=False)
535
+ if result:
536
+ log_ok(f"Exported without Draco {dim(f'({format_duration(t.elapsed)})')}")
537
+ return result
538
+
539
+ log_error("Export failed")
540
+ return None
541
+
542
+ with timed("glTF export", print_on_exit=False) as t:
543
+ result = _try_export(output_path_str, config, use_draco=False)
544
+ if result:
545
+ log_detail(
546
+ f"{bright_green('Export successful')} {dim(f'({format_duration(t.elapsed)})')}"
547
+ )
548
+ else:
549
+ log_error("Export failed")
550
+ return result
551
+
552
+
553
+ def optimize_and_export(
554
+ output_path: Path | None = None,
555
+ export_format: str = "GLB",
556
+ use_draco: bool = True,
557
+ use_webp: bool = True,
558
+ max_texture_size: int = 1024,
559
+ force_pot_textures: bool = False,
560
+ analyze_animations: bool = True,
561
+ check_bloat: bool = True,
562
+ experimental_autofix: bool = False,
563
+ quiet: bool = False,
564
+ input_path: str | None = None,
565
+ ) -> str | None:
566
+ """
567
+ Main optimization and export function.
568
+
569
+ Args:
570
+ output_path: Where to save the file (default: next to .blend file)
571
+ export_format: 'GLB', 'GLTF_SEPARATE', or 'GLTF_EMBEDDED'
572
+ use_draco: Enable Draco mesh compression
573
+ use_webp: Export textures as WebP
574
+ max_texture_size: Resize textures larger than this (0 = no resize)
575
+ force_pot_textures: Force power-of-two texture dimensions
576
+ analyze_animations: Analyze bones for static/animated (slow but worth it)
577
+ check_bloat: Analyze meshes for unreasonable complexity
578
+ experimental_autofix: Auto-decimate bloated props (EXPERIMENTAL)
579
+ quiet: Suppress Blender's verbose output (show only warnings/errors)
580
+ input_path: Path to GLB/glTF file to import (if None, uses current scene)
581
+ """
582
+ config = ExportConfig(
583
+ output_path=output_path,
584
+ export_format=export_format,
585
+ use_draco=use_draco,
586
+ use_webp=use_webp,
587
+ max_texture_size=max_texture_size,
588
+ force_pot_textures=force_pot_textures,
589
+ analyze_animations=analyze_animations,
590
+ check_bloat=check_bloat,
591
+ experimental_autofix=experimental_autofix,
592
+ quiet=quiet,
593
+ )
594
+
595
+ # 10 steps if no import, 11 if importing
596
+ total_steps = 11 if input_path else 10
597
+ step = StepTimer(total_steps=total_steps)
598
+
599
+ # Header
600
+ print_header("GLB EXPORT OPTIMIZER")
601
+
602
+ # Import if path provided
603
+ if input_path:
604
+ _do_import_gltf(input_path, quiet=quiet, step=step)
605
+
606
+ # Show before stats
607
+ stats = get_scene_stats()
608
+ verts_str = f"{stats['vertices']:,}"
609
+ print(
610
+ f"\n Scene: {cyan(str(stats['meshes']))} meshes, "
611
+ f"{cyan(verts_str)} verts, "
612
+ f"{cyan(str(stats['bones']))} bones, "
613
+ f"{cyan(str(stats['actions']))} animations"
614
+ )
615
+
616
+ _analyze_bloat(step, config)
617
+ _check_duplicates(step, config)
618
+ _check_skinned_meshes(step)
619
+ _check_uv_maps(step, config)
620
+ _clean_and_optimize(step, config)
621
+
622
+ result_path = _export_file(step, config)
623
+
624
+ # Finalize timing
625
+ step.finish()
626
+
627
+ # Report results based on success/failure
628
+ if result_path and os.path.exists(result_path):
629
+ step.final_message("Export complete!", success=True)
630
+ size = os.path.getsize(result_path)
631
+ print(f"\n{cyan('=' * 60)}")
632
+ print(f" {bold('OUTPUT')}: {bright_green(os.path.basename(result_path))}")
633
+ print(f" {bold('SIZE')}: {bright_cyan(format_bytes(size))} ({size:,} bytes)")
634
+ print(
635
+ f" {bold('TIME')}: {bright_cyan(format_duration(step.total_elapsed()))}"
636
+ )
637
+ print(f"{cyan('=' * 60)}")
638
+
639
+ # Print timing summary
640
+ step.print_summary()
641
+
642
+ return result_path
643
+ else:
644
+ step.final_message("Export FAILED", success=False)
645
+ if result_path: # Path was determined but file missing?
646
+ log_warn("Output file not found at expected path")
647
+ return None
@@ -0,0 +1,20 @@
1
+ """Utility functions for Blender scene access and naming."""
2
+
3
+ from .blender import (
4
+ get_armature_data,
5
+ get_mesh_data,
6
+ get_scene,
7
+ get_scene_stats,
8
+ get_view_layer,
9
+ )
10
+ from .naming import nearest_power_of_two, sanitize_gltf_name
11
+
12
+ __all__ = [
13
+ "get_armature_data",
14
+ "get_mesh_data",
15
+ "get_scene",
16
+ "get_scene_stats",
17
+ "get_view_layer",
18
+ "nearest_power_of_two",
19
+ "sanitize_gltf_name",
20
+ ]