notso-glb 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- notso_glb/__init__.py +38 -0
- notso_glb/__main__.py +6 -0
- notso_glb/analyzers/__init__.py +20 -0
- notso_glb/analyzers/bloat.py +117 -0
- notso_glb/analyzers/bones.py +100 -0
- notso_glb/analyzers/duplicates.py +71 -0
- notso_glb/analyzers/skinned_mesh.py +47 -0
- notso_glb/analyzers/uv_maps.py +59 -0
- notso_glb/cleaners/__init__.py +23 -0
- notso_glb/cleaners/bones.py +49 -0
- notso_glb/cleaners/duplicates.py +110 -0
- notso_glb/cleaners/mesh.py +183 -0
- notso_glb/cleaners/textures.py +116 -0
- notso_glb/cleaners/uv_maps.py +29 -0
- notso_glb/cleaners/vertex_groups.py +34 -0
- notso_glb/cli.py +330 -0
- notso_glb/exporters/__init__.py +8 -0
- notso_glb/exporters/gltf.py +647 -0
- notso_glb/utils/__init__.py +20 -0
- notso_glb/utils/blender.py +49 -0
- notso_glb/utils/constants.py +41 -0
- notso_glb/utils/gltfpack.py +273 -0
- notso_glb/utils/logging.py +421 -0
- notso_glb/utils/naming.py +24 -0
- notso_glb/wasm/__init__.py +32 -0
- notso_glb/wasm/constants.py +8 -0
- notso_glb/wasm/gltfpack.version +1 -0
- notso_glb/wasm/gltfpack.wasm +0 -0
- notso_glb/wasm/py.typed +0 -0
- notso_glb/wasm/runner.py +137 -0
- notso_glb/wasm/runtime.py +244 -0
- notso_glb/wasm/wasi.py +347 -0
- notso_glb-0.1.0.dist-info/METADATA +150 -0
- notso_glb-0.1.0.dist-info/RECORD +36 -0
- notso_glb-0.1.0.dist-info/WHEEL +4 -0
- notso_glb-0.1.0.dist-info/entry_points.txt +3 -0
notso_glb/__init__.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""
|
|
2
|
+
GLB Export Optimizer for Mascot Models
|
|
3
|
+
======================================
|
|
4
|
+
Cleans up Blender files and exports optimized GLB for web delivery.
|
|
5
|
+
|
|
6
|
+
Optimizations:
|
|
7
|
+
- Detects bloated props (high-vert non-skinned meshes, repetitive geometry)
|
|
8
|
+
- Detects skinned meshes with non-root parents (glTF spec issue)
|
|
9
|
+
- Detects unused UV maps (TEXCOORD bloat)
|
|
10
|
+
- Detects duplicate names and sanitization collisions
|
|
11
|
+
- Removes unused vertex groups (bone weight bloat)
|
|
12
|
+
- Marks static bones as non-deform (animation bloat)
|
|
13
|
+
- Removes bone shape objects (Icosphere artifacts)
|
|
14
|
+
- Resizes textures to max 1024px (optional POT enforcement)
|
|
15
|
+
- Exports with Draco mesh compression
|
|
16
|
+
- Exports with WebP textures
|
|
17
|
+
|
|
18
|
+
Usage:
|
|
19
|
+
CLI:
|
|
20
|
+
notso-glb model.glb -o output.glb
|
|
21
|
+
notso-glb model.blend --format gltf-embedded
|
|
22
|
+
notso-glb model.gltf --no-draco --max-texture 2048
|
|
23
|
+
|
|
24
|
+
Python:
|
|
25
|
+
from notso_glb import main
|
|
26
|
+
main()
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
from importlib.metadata import PackageNotFoundError, version
|
|
30
|
+
|
|
31
|
+
from notso_glb.cli import main
|
|
32
|
+
|
|
33
|
+
try:
|
|
34
|
+
__version__ = version("notso-glb")
|
|
35
|
+
except PackageNotFoundError:
|
|
36
|
+
__version__ = "unknown"
|
|
37
|
+
|
|
38
|
+
__all__ = ["main"]
|
notso_glb/__main__.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"""Analyzers for mesh bloat, bones, duplicates, and UV maps."""
|
|
2
|
+
|
|
3
|
+
from notso_glb.analyzers.bloat import analyze_mesh_bloat, count_mesh_islands
|
|
4
|
+
from notso_glb.analyzers.bones import (
|
|
5
|
+
analyze_bone_animation,
|
|
6
|
+
get_bones_used_for_skinning,
|
|
7
|
+
)
|
|
8
|
+
from notso_glb.analyzers.duplicates import analyze_duplicate_names
|
|
9
|
+
from notso_glb.analyzers.skinned_mesh import analyze_skinned_mesh_parents
|
|
10
|
+
from notso_glb.analyzers.uv_maps import analyze_unused_uv_maps
|
|
11
|
+
|
|
12
|
+
__all__ = [
|
|
13
|
+
"analyze_bone_animation",
|
|
14
|
+
"analyze_duplicate_names",
|
|
15
|
+
"analyze_mesh_bloat",
|
|
16
|
+
"analyze_skinned_mesh_parents",
|
|
17
|
+
"analyze_unused_uv_maps",
|
|
18
|
+
"count_mesh_islands",
|
|
19
|
+
"get_bones_used_for_skinning",
|
|
20
|
+
]
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
"""Mesh bloat analysis for detecting overly complex geometry."""
|
|
2
|
+
|
|
3
|
+
import bpy
|
|
4
|
+
|
|
5
|
+
from notso_glb.utils import get_mesh_data
|
|
6
|
+
from notso_glb.utils.constants import BLOAT_THRESHOLDS
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def count_mesh_islands(obj) -> int:
|
|
10
|
+
"""Count disconnected mesh parts (islands) using BFS."""
|
|
11
|
+
import bmesh
|
|
12
|
+
|
|
13
|
+
bm = bmesh.new()
|
|
14
|
+
bm.from_mesh(obj.data)
|
|
15
|
+
bm.verts.ensure_lookup_table()
|
|
16
|
+
|
|
17
|
+
visited: set[int] = set()
|
|
18
|
+
islands = 0
|
|
19
|
+
|
|
20
|
+
for v in bm.verts:
|
|
21
|
+
if v.index in visited:
|
|
22
|
+
continue
|
|
23
|
+
islands += 1
|
|
24
|
+
stack = [v]
|
|
25
|
+
while stack:
|
|
26
|
+
current = stack.pop()
|
|
27
|
+
if current.index in visited:
|
|
28
|
+
continue
|
|
29
|
+
visited.add(current.index)
|
|
30
|
+
for edge in current.link_edges:
|
|
31
|
+
other = edge.other_vert(current)
|
|
32
|
+
if other.index not in visited:
|
|
33
|
+
stack.append(other)
|
|
34
|
+
|
|
35
|
+
bm.free()
|
|
36
|
+
return islands
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def analyze_mesh_bloat() -> list[dict[str, object]]:
|
|
40
|
+
"""
|
|
41
|
+
Detect unreasonably complex meshes for web delivery.
|
|
42
|
+
|
|
43
|
+
Returns list of warnings with severity levels:
|
|
44
|
+
- CRITICAL: Must fix before web deployment
|
|
45
|
+
- WARNING: Should review, likely bloated
|
|
46
|
+
- INFO: Notable but may be intentional
|
|
47
|
+
"""
|
|
48
|
+
warnings: list[dict[str, object]] = []
|
|
49
|
+
|
|
50
|
+
total_verts = 0
|
|
51
|
+
for obj in bpy.data.objects:
|
|
52
|
+
if obj.type != "MESH":
|
|
53
|
+
continue
|
|
54
|
+
|
|
55
|
+
mesh = get_mesh_data(obj)
|
|
56
|
+
verts = len(mesh.vertices)
|
|
57
|
+
total_verts += verts
|
|
58
|
+
|
|
59
|
+
if verts < 100:
|
|
60
|
+
continue
|
|
61
|
+
|
|
62
|
+
# Check if skinned (character mesh vs prop)
|
|
63
|
+
is_skinned = any(mod.type == "ARMATURE" for mod in obj.modifiers)
|
|
64
|
+
|
|
65
|
+
# Count islands for non-skinned meshes (expensive operation)
|
|
66
|
+
islands = 1
|
|
67
|
+
if not is_skinned and verts < 20000:
|
|
68
|
+
islands = count_mesh_islands(obj)
|
|
69
|
+
|
|
70
|
+
verts_per_island = verts / max(islands, 1)
|
|
71
|
+
|
|
72
|
+
# Bloat detection rules
|
|
73
|
+
if not is_skinned:
|
|
74
|
+
if verts > BLOAT_THRESHOLDS["prop_critical"]:
|
|
75
|
+
warnings.append({
|
|
76
|
+
"severity": "CRITICAL",
|
|
77
|
+
"object": obj.name,
|
|
78
|
+
"issue": "BLOATED_PROP",
|
|
79
|
+
"detail": f"{verts:,} verts (limit: {BLOAT_THRESHOLDS['prop_critical']:,})",
|
|
80
|
+
"suggestion": "Decimate or replace with baked texture",
|
|
81
|
+
})
|
|
82
|
+
elif verts > BLOAT_THRESHOLDS["prop_warning"]:
|
|
83
|
+
warnings.append({
|
|
84
|
+
"severity": "WARNING",
|
|
85
|
+
"object": obj.name,
|
|
86
|
+
"issue": "HIGH_VERT_PROP",
|
|
87
|
+
"detail": f"{verts:,} verts",
|
|
88
|
+
"suggestion": "Consider simplifying",
|
|
89
|
+
})
|
|
90
|
+
|
|
91
|
+
if (
|
|
92
|
+
islands > BLOAT_THRESHOLDS["repetitive_islands"]
|
|
93
|
+
and verts_per_island > BLOAT_THRESHOLDS["repetitive_verts"]
|
|
94
|
+
):
|
|
95
|
+
warnings.append({
|
|
96
|
+
"severity": "CRITICAL",
|
|
97
|
+
"object": obj.name,
|
|
98
|
+
"issue": "REPETITIVE_DETAIL",
|
|
99
|
+
"detail": f"{islands} islands x {verts_per_island:.0f} verts each",
|
|
100
|
+
"suggestion": "Merge islands or use instancing/texture",
|
|
101
|
+
})
|
|
102
|
+
|
|
103
|
+
# Scene-level check
|
|
104
|
+
if total_verts > BLOAT_THRESHOLDS["scene_total"]:
|
|
105
|
+
warnings.append({
|
|
106
|
+
"severity": "WARNING",
|
|
107
|
+
"object": "SCENE",
|
|
108
|
+
"issue": "HIGH_TOTAL_VERTS",
|
|
109
|
+
"detail": f"{total_verts:,} verts (target: <{BLOAT_THRESHOLDS['scene_total']:,})",
|
|
110
|
+
"suggestion": "Review all meshes for optimization opportunities",
|
|
111
|
+
})
|
|
112
|
+
|
|
113
|
+
# Sort by severity
|
|
114
|
+
severity_order = {"CRITICAL": 0, "WARNING": 1, "INFO": 2}
|
|
115
|
+
warnings.sort(key=lambda w: severity_order.get(str(w["severity"]), 99))
|
|
116
|
+
|
|
117
|
+
return warnings
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"""Bone animation analysis for detecting static bones."""
|
|
2
|
+
|
|
3
|
+
import bpy
|
|
4
|
+
from bpy.types import Object
|
|
5
|
+
|
|
6
|
+
from notso_glb.utils import get_scene, get_view_layer
|
|
7
|
+
from notso_glb.utils.logging import log_debug
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def get_bones_used_for_skinning() -> set[str]:
|
|
11
|
+
"""Find all bones that have vertex weights on skinned meshes."""
|
|
12
|
+
used_bones: set[str] = set()
|
|
13
|
+
|
|
14
|
+
for obj in bpy.data.objects:
|
|
15
|
+
if obj.type != "MESH":
|
|
16
|
+
continue
|
|
17
|
+
|
|
18
|
+
# Check if mesh is skinned (has armature modifier)
|
|
19
|
+
has_armature = any(mod.type == "ARMATURE" for mod in obj.modifiers)
|
|
20
|
+
if not has_armature:
|
|
21
|
+
continue
|
|
22
|
+
|
|
23
|
+
# All vertex groups on skinned meshes are bone references
|
|
24
|
+
for vg in obj.vertex_groups:
|
|
25
|
+
used_bones.add(vg.name)
|
|
26
|
+
|
|
27
|
+
return used_bones
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def analyze_bone_animation() -> set[str]:
|
|
31
|
+
"""Find bones that never animate across all actions.
|
|
32
|
+
|
|
33
|
+
Optimized to batch frame evaluations - evaluates all bones at once per frame
|
|
34
|
+
instead of switching frames per-bone, reducing scene updates from O(bones*actions)
|
|
35
|
+
to O(actions).
|
|
36
|
+
"""
|
|
37
|
+
armature: Object | None = None
|
|
38
|
+
for obj in bpy.data.objects:
|
|
39
|
+
if obj.type == "ARMATURE":
|
|
40
|
+
armature = obj
|
|
41
|
+
break
|
|
42
|
+
|
|
43
|
+
if not armature or not armature.animation_data or not armature.pose:
|
|
44
|
+
log_debug("No armature with animation data found")
|
|
45
|
+
return set()
|
|
46
|
+
|
|
47
|
+
scene = get_scene()
|
|
48
|
+
view_layer = get_view_layer()
|
|
49
|
+
bone_movement: dict[str, float] = {b.name: 0.0 for b in armature.pose.bones}
|
|
50
|
+
num_bones = len(armature.pose.bones)
|
|
51
|
+
num_actions = len(bpy.data.actions)
|
|
52
|
+
|
|
53
|
+
log_debug(f"Analyzing {num_bones} bones across {num_actions} actions")
|
|
54
|
+
|
|
55
|
+
orig_action = armature.animation_data.action
|
|
56
|
+
orig_frame = scene.frame_current
|
|
57
|
+
|
|
58
|
+
for action in bpy.data.actions:
|
|
59
|
+
armature.animation_data.action = action
|
|
60
|
+
frame_start = int(action.frame_range[0])
|
|
61
|
+
frame_end = int(action.frame_range[1])
|
|
62
|
+
|
|
63
|
+
# Evaluate start frame ONCE for all bones
|
|
64
|
+
scene.frame_set(frame_start)
|
|
65
|
+
view_layer.update()
|
|
66
|
+
start_poses: dict[str, tuple] = {}
|
|
67
|
+
for bone in armature.pose.bones:
|
|
68
|
+
start_poses[bone.name] = (
|
|
69
|
+
bone.location.copy(),
|
|
70
|
+
bone.rotation_quaternion.copy(),
|
|
71
|
+
bone.rotation_euler.copy(),
|
|
72
|
+
bone.rotation_mode,
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
# Evaluate end frame ONCE for all bones
|
|
76
|
+
scene.frame_set(frame_end)
|
|
77
|
+
view_layer.update()
|
|
78
|
+
|
|
79
|
+
# Now calculate diffs without any frame switching
|
|
80
|
+
for bone in armature.pose.bones:
|
|
81
|
+
start_loc, start_rot_q, start_rot_e, rot_mode = start_poses[bone.name]
|
|
82
|
+
end_loc = bone.location.copy()
|
|
83
|
+
end_rot_q = bone.rotation_quaternion.copy()
|
|
84
|
+
end_rot_e = bone.rotation_euler.copy()
|
|
85
|
+
|
|
86
|
+
loc_diff = (end_loc - start_loc).length
|
|
87
|
+
if rot_mode == "QUATERNION":
|
|
88
|
+
rot_diff = (end_rot_q - start_rot_q).magnitude
|
|
89
|
+
else:
|
|
90
|
+
rot_diff = (
|
|
91
|
+
end_rot_e.to_quaternion() - start_rot_e.to_quaternion()
|
|
92
|
+
).magnitude
|
|
93
|
+
|
|
94
|
+
bone_movement[bone.name] += loc_diff + rot_diff
|
|
95
|
+
|
|
96
|
+
if orig_action:
|
|
97
|
+
armature.animation_data.action = orig_action
|
|
98
|
+
scene.frame_set(orig_frame)
|
|
99
|
+
|
|
100
|
+
return {name for name, movement in bone_movement.items() if movement < 0.01}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
"""Duplicate name detection for glTF export issues."""
|
|
2
|
+
|
|
3
|
+
from collections import Counter, defaultdict
|
|
4
|
+
|
|
5
|
+
import bpy
|
|
6
|
+
|
|
7
|
+
from notso_glb.utils import sanitize_gltf_name
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def analyze_duplicate_names() -> list[dict[str, object]]:
|
|
11
|
+
"""
|
|
12
|
+
Detect duplicate names that will cause glTF export issues.
|
|
13
|
+
|
|
14
|
+
Checks both:
|
|
15
|
+
1. Exact duplicates in Blender
|
|
16
|
+
2. Names that collide after glTF sanitization (e.g., 'Cube.155' and 'Cube_155')
|
|
17
|
+
|
|
18
|
+
Returns list of duplicates grouped by type.
|
|
19
|
+
"""
|
|
20
|
+
duplicates: list[dict[str, object]] = []
|
|
21
|
+
|
|
22
|
+
def check_collection(items, type_name: str) -> None:
|
|
23
|
+
"""Check a collection for exact and sanitized duplicates."""
|
|
24
|
+
names = [item.name for item in items]
|
|
25
|
+
|
|
26
|
+
# 1. Exact duplicates
|
|
27
|
+
for name, count in Counter(names).items():
|
|
28
|
+
if count > 1:
|
|
29
|
+
duplicates.append({
|
|
30
|
+
"type": type_name,
|
|
31
|
+
"name": name,
|
|
32
|
+
"count": count,
|
|
33
|
+
"issue": "EXACT_DUPLICATE",
|
|
34
|
+
})
|
|
35
|
+
|
|
36
|
+
# 2. Sanitization collisions
|
|
37
|
+
sanitized_map: dict[str, list[str]] = defaultdict(list)
|
|
38
|
+
for name in names:
|
|
39
|
+
sanitized = sanitize_gltf_name(name)
|
|
40
|
+
sanitized_map[sanitized].append(name)
|
|
41
|
+
|
|
42
|
+
for sanitized, originals in sanitized_map.items():
|
|
43
|
+
if len(originals) > 1:
|
|
44
|
+
unique_originals = set(originals)
|
|
45
|
+
if len(unique_originals) > 1:
|
|
46
|
+
duplicates.append({
|
|
47
|
+
"type": type_name,
|
|
48
|
+
"name": f"{sanitized} <- {list(unique_originals)}",
|
|
49
|
+
"count": len(originals),
|
|
50
|
+
"issue": "SANITIZATION_COLLISION",
|
|
51
|
+
})
|
|
52
|
+
|
|
53
|
+
# Check all relevant collections
|
|
54
|
+
check_collection(bpy.data.objects, "OBJECT")
|
|
55
|
+
check_collection(bpy.data.meshes, "MESH")
|
|
56
|
+
check_collection(bpy.data.materials, "MATERIAL")
|
|
57
|
+
check_collection(bpy.data.actions, "ACTION")
|
|
58
|
+
|
|
59
|
+
# Special handling for bones (per armature)
|
|
60
|
+
for arm in bpy.data.armatures:
|
|
61
|
+
bone_names = [bone.name for bone in arm.bones]
|
|
62
|
+
for name, count in Counter(bone_names).items():
|
|
63
|
+
if count > 1:
|
|
64
|
+
duplicates.append({
|
|
65
|
+
"type": "BONE",
|
|
66
|
+
"name": f"{arm.name}/{name}",
|
|
67
|
+
"count": count,
|
|
68
|
+
"issue": "EXACT_DUPLICATE",
|
|
69
|
+
})
|
|
70
|
+
|
|
71
|
+
return duplicates
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"""Skinned mesh parent hierarchy analysis."""
|
|
2
|
+
|
|
3
|
+
import bpy
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def analyze_skinned_mesh_parents() -> list[dict[str, object]]:
|
|
7
|
+
"""
|
|
8
|
+
Detect skinned meshes that are not at scene root.
|
|
9
|
+
|
|
10
|
+
glTF spec: parent transforms don't affect skinned meshes, so non-root
|
|
11
|
+
skinned meshes can have unexpected positioning.
|
|
12
|
+
|
|
13
|
+
Returns list of warnings for each non-root skinned mesh.
|
|
14
|
+
"""
|
|
15
|
+
warnings: list[dict[str, object]] = []
|
|
16
|
+
|
|
17
|
+
for obj in bpy.data.objects:
|
|
18
|
+
if obj.type != "MESH":
|
|
19
|
+
continue
|
|
20
|
+
|
|
21
|
+
# Check if mesh is skinned (has armature modifier)
|
|
22
|
+
has_armature = any(mod.type == "ARMATURE" for mod in obj.modifiers)
|
|
23
|
+
if not has_armature:
|
|
24
|
+
continue
|
|
25
|
+
|
|
26
|
+
# Check if it has a parent (not at root)
|
|
27
|
+
parent = obj.parent
|
|
28
|
+
if parent is not None:
|
|
29
|
+
# Check if parent has non-identity transform
|
|
30
|
+
has_transform = (
|
|
31
|
+
parent.location.length > 0.0001
|
|
32
|
+
or parent.rotation_euler.x != 0
|
|
33
|
+
or parent.rotation_euler.y != 0
|
|
34
|
+
or parent.rotation_euler.z != 0
|
|
35
|
+
or abs(parent.scale.x - 1) > 0.0001
|
|
36
|
+
or abs(parent.scale.y - 1) > 0.0001
|
|
37
|
+
or abs(parent.scale.z - 1) > 0.0001
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
warnings.append({
|
|
41
|
+
"mesh": obj.name,
|
|
42
|
+
"parent": parent.name,
|
|
43
|
+
"has_transform": has_transform,
|
|
44
|
+
"severity": "CRITICAL" if has_transform else "WARNING",
|
|
45
|
+
})
|
|
46
|
+
|
|
47
|
+
return warnings
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"""Unused UV map detection."""
|
|
2
|
+
|
|
3
|
+
from typing import cast
|
|
4
|
+
|
|
5
|
+
import bpy
|
|
6
|
+
from bpy.types import ShaderNodeUVMap
|
|
7
|
+
|
|
8
|
+
from notso_glb.utils import get_mesh_data
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def analyze_unused_uv_maps() -> list[dict[str, object]]:
|
|
12
|
+
"""
|
|
13
|
+
Detect UV maps that aren't used by any material.
|
|
14
|
+
|
|
15
|
+
Unused TEXCOORD attributes bloat the glTF file.
|
|
16
|
+
Returns list of meshes with unused UV maps.
|
|
17
|
+
"""
|
|
18
|
+
warnings: list[dict[str, object]] = []
|
|
19
|
+
|
|
20
|
+
for obj in bpy.data.objects:
|
|
21
|
+
if obj.type != "MESH":
|
|
22
|
+
continue
|
|
23
|
+
|
|
24
|
+
mesh = get_mesh_data(obj)
|
|
25
|
+
if not mesh.uv_layers:
|
|
26
|
+
continue
|
|
27
|
+
|
|
28
|
+
# Get UV maps used by materials
|
|
29
|
+
used_uvs: set[str] = set()
|
|
30
|
+
for mat_slot in obj.material_slots:
|
|
31
|
+
if not mat_slot.material or not mat_slot.material.use_nodes:
|
|
32
|
+
continue
|
|
33
|
+
node_tree = mat_slot.material.node_tree
|
|
34
|
+
if node_tree is None:
|
|
35
|
+
continue
|
|
36
|
+
for node in node_tree.nodes:
|
|
37
|
+
if node.type == "UVMAP":
|
|
38
|
+
uv_node = cast(ShaderNodeUVMap, node)
|
|
39
|
+
if uv_node.uv_map:
|
|
40
|
+
used_uvs.add(uv_node.uv_map)
|
|
41
|
+
# Image textures default to first UV if no explicit UV node
|
|
42
|
+
if node.type == "TEX_IMAGE":
|
|
43
|
+
if not used_uvs and mesh.uv_layers:
|
|
44
|
+
used_uvs.add(mesh.uv_layers[0].name)
|
|
45
|
+
|
|
46
|
+
# If no explicit UV usage found, assume first UV is used
|
|
47
|
+
if not used_uvs and mesh.uv_layers:
|
|
48
|
+
used_uvs.add(mesh.uv_layers[0].name)
|
|
49
|
+
|
|
50
|
+
# Find unused UV maps
|
|
51
|
+
unused = [uv.name for uv in mesh.uv_layers if uv.name not in used_uvs]
|
|
52
|
+
if unused:
|
|
53
|
+
warnings.append({
|
|
54
|
+
"mesh": obj.name,
|
|
55
|
+
"unused_uvs": unused,
|
|
56
|
+
"total_uvs": len(mesh.uv_layers),
|
|
57
|
+
})
|
|
58
|
+
|
|
59
|
+
return warnings
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"""Cleaners for mesh, bones, textures, and vertex groups."""
|
|
2
|
+
|
|
3
|
+
from notso_glb.cleaners.bones import (
|
|
4
|
+
delete_bone_shape_objects,
|
|
5
|
+
mark_static_bones_non_deform,
|
|
6
|
+
)
|
|
7
|
+
from notso_glb.cleaners.duplicates import auto_fix_duplicate_names
|
|
8
|
+
from notso_glb.cleaners.mesh import auto_fix_bloat, cleanup_mesh_bmesh, decimate_mesh
|
|
9
|
+
from notso_glb.cleaners.textures import resize_textures
|
|
10
|
+
from notso_glb.cleaners.uv_maps import remove_unused_uv_maps
|
|
11
|
+
from notso_glb.cleaners.vertex_groups import clean_vertex_groups
|
|
12
|
+
|
|
13
|
+
__all__ = [
|
|
14
|
+
"auto_fix_bloat",
|
|
15
|
+
"auto_fix_duplicate_names",
|
|
16
|
+
"clean_vertex_groups",
|
|
17
|
+
"cleanup_mesh_bmesh",
|
|
18
|
+
"decimate_mesh",
|
|
19
|
+
"delete_bone_shape_objects",
|
|
20
|
+
"mark_static_bones_non_deform",
|
|
21
|
+
"remove_unused_uv_maps",
|
|
22
|
+
"resize_textures",
|
|
23
|
+
]
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"""Bone cleanup functions."""
|
|
2
|
+
|
|
3
|
+
import bpy
|
|
4
|
+
from bpy.types import Object
|
|
5
|
+
|
|
6
|
+
from notso_glb.analyzers.bones import get_bones_used_for_skinning
|
|
7
|
+
from notso_glb.utils import get_armature_data
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def mark_static_bones_non_deform(static_bones: set[str]) -> tuple[int, int]:
|
|
11
|
+
"""Mark static bones as non-deform, but KEEP bones used for skinning."""
|
|
12
|
+
armature: Object | None = None
|
|
13
|
+
for obj in bpy.data.objects:
|
|
14
|
+
if obj.type == "ARMATURE":
|
|
15
|
+
armature = obj
|
|
16
|
+
break
|
|
17
|
+
|
|
18
|
+
if not armature:
|
|
19
|
+
return 0, 0
|
|
20
|
+
|
|
21
|
+
# CRITICAL: Don't mark bones as non-deform if they're weighted to meshes
|
|
22
|
+
skinning_bones = get_bones_used_for_skinning()
|
|
23
|
+
safe_to_mark = static_bones - skinning_bones
|
|
24
|
+
skipped = len(static_bones & skinning_bones)
|
|
25
|
+
|
|
26
|
+
arm_data = get_armature_data(armature)
|
|
27
|
+
marked = 0
|
|
28
|
+
for bone_name in safe_to_mark:
|
|
29
|
+
bone = arm_data.bones.get(bone_name)
|
|
30
|
+
if not bone or not bone.use_deform:
|
|
31
|
+
continue
|
|
32
|
+
bone.use_deform = False
|
|
33
|
+
marked += 1
|
|
34
|
+
|
|
35
|
+
return marked, skipped
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def delete_bone_shape_objects() -> int:
|
|
39
|
+
"""Remove objects used as bone custom shapes (Icosphere, etc.)."""
|
|
40
|
+
deleted = 0
|
|
41
|
+
shape_names = ["icosphere", "bone_shape", "widget", "wgt_"]
|
|
42
|
+
|
|
43
|
+
for obj in list(bpy.data.objects):
|
|
44
|
+
name_lower = obj.name.lower()
|
|
45
|
+
if any(s in name_lower for s in shape_names):
|
|
46
|
+
bpy.data.objects.remove(obj, do_unlink=True)
|
|
47
|
+
deleted += 1
|
|
48
|
+
|
|
49
|
+
return deleted
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
"""Duplicate name auto-fix functions."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import re
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
import bpy
|
|
9
|
+
|
|
10
|
+
# Type alias for rename result
|
|
11
|
+
RenameRecord = dict[str, str]
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _get_collection(dtype: str) -> Any | None:
|
|
15
|
+
"""Get the appropriate bpy.data collection for a data type."""
|
|
16
|
+
collections = {
|
|
17
|
+
"OBJECT": bpy.data.objects,
|
|
18
|
+
"MESH": bpy.data.meshes,
|
|
19
|
+
"MATERIAL": bpy.data.materials,
|
|
20
|
+
"ACTION": bpy.data.actions,
|
|
21
|
+
}
|
|
22
|
+
return collections.get(dtype)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _get_ptr_suffix(item: Any) -> str:
|
|
26
|
+
"""Get short unique suffix from memory pointer (last 4 hex digits)."""
|
|
27
|
+
return format(item.as_pointer() & 0xFFFF, "04x")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _parse_colliding_names(name_field: str) -> list[str]:
|
|
31
|
+
"""Parse collision info string to extract list of colliding names."""
|
|
32
|
+
match = re.search(r"\[([^\]]+)\]", name_field)
|
|
33
|
+
if not match:
|
|
34
|
+
return []
|
|
35
|
+
names_str = match.group(1)
|
|
36
|
+
return [n.strip().strip("'\"") for n in names_str.split(",")]
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def _fix_exact_duplicates(
|
|
40
|
+
collection: Any,
|
|
41
|
+
name: str,
|
|
42
|
+
dtype: str,
|
|
43
|
+
processed_ids: set[int],
|
|
44
|
+
renames: list[RenameRecord],
|
|
45
|
+
) -> None:
|
|
46
|
+
"""Rename exact duplicates by appending pointer suffix."""
|
|
47
|
+
matching = [item for item in collection if item.name == name]
|
|
48
|
+
for item in matching[1:]:
|
|
49
|
+
if id(item) in processed_ids:
|
|
50
|
+
continue
|
|
51
|
+
old_name = item.name
|
|
52
|
+
new_name = f"{name}_{_get_ptr_suffix(item)}"
|
|
53
|
+
item.name = new_name
|
|
54
|
+
processed_ids.add(id(item))
|
|
55
|
+
renames.append({"type": dtype, "old": old_name, "new": new_name})
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _fix_sanitization_collision(
|
|
59
|
+
collection: Any,
|
|
60
|
+
name_field: str,
|
|
61
|
+
dtype: str,
|
|
62
|
+
processed_ids: set[int],
|
|
63
|
+
renames: list[RenameRecord],
|
|
64
|
+
) -> None:
|
|
65
|
+
"""Rename colliding names from sanitization by appending pointer suffix."""
|
|
66
|
+
colliding_names = _parse_colliding_names(name_field)
|
|
67
|
+
for name in colliding_names[1:]:
|
|
68
|
+
item = collection.get(name)
|
|
69
|
+
if not item or id(item) in processed_ids:
|
|
70
|
+
continue
|
|
71
|
+
base = re.sub(r"\.", "_", name)
|
|
72
|
+
new_name = f"{base}_{_get_ptr_suffix(item)}"
|
|
73
|
+
item.name = new_name
|
|
74
|
+
processed_ids.add(id(item))
|
|
75
|
+
renames.append({"type": dtype, "old": name, "new": new_name})
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def auto_fix_duplicate_names(
|
|
79
|
+
duplicates: list[dict[str, object]],
|
|
80
|
+
) -> list[RenameRecord]:
|
|
81
|
+
"""
|
|
82
|
+
Automatically rename duplicates by appending memory pointer suffix.
|
|
83
|
+
|
|
84
|
+
Uses as_pointer() for deterministic, session-stable unique IDs.
|
|
85
|
+
Handles both exact duplicates and sanitization collisions.
|
|
86
|
+
Returns list of renames performed.
|
|
87
|
+
"""
|
|
88
|
+
renames: list[RenameRecord] = []
|
|
89
|
+
processed_ids: set[int] = set()
|
|
90
|
+
|
|
91
|
+
for dup in duplicates:
|
|
92
|
+
dtype = str(dup["type"])
|
|
93
|
+
if dtype == "BONE":
|
|
94
|
+
continue
|
|
95
|
+
|
|
96
|
+
collection = _get_collection(dtype)
|
|
97
|
+
if not collection:
|
|
98
|
+
continue
|
|
99
|
+
|
|
100
|
+
issue = dup.get("issue", "EXACT_DUPLICATE")
|
|
101
|
+
name_field = str(dup["name"])
|
|
102
|
+
|
|
103
|
+
if issue == "EXACT_DUPLICATE":
|
|
104
|
+
_fix_exact_duplicates(collection, name_field, dtype, processed_ids, renames)
|
|
105
|
+
elif issue == "SANITIZATION_COLLISION":
|
|
106
|
+
_fix_sanitization_collision(
|
|
107
|
+
collection, name_field, dtype, processed_ids, renames
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
return renames
|