sdf-sampler 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,78 @@
1
+ # ABOUTME: sdf-sampler public API exports
2
+ # ABOUTME: Standalone SDF training data generation from point clouds
3
+
4
+ """
5
+ sdf-sampler: Auto-analysis and sampling of point clouds for SDF training.
6
+
7
+ Example usage:
8
+ from sdf_sampler import SDFAnalyzer, SDFSampler, load_point_cloud
9
+
10
+ # Load point cloud
11
+ xyz, normals = load_point_cloud("scan.ply")
12
+
13
+ # Auto-analyze to detect EMPTY/SOLID regions
14
+ analyzer = SDFAnalyzer()
15
+ result = analyzer.analyze(xyz=xyz, normals=normals)
16
+
17
+ # Generate training samples
18
+ sampler = SDFSampler()
19
+ samples = sampler.generate(
20
+ xyz=xyz,
21
+ normals=normals,
22
+ constraints=result.constraints,
23
+ )
24
+
25
+ # Export to parquet
26
+ sampler.export_parquet(samples, "training_data.parquet")
27
+ """
28
+
29
+ from sdf_sampler.analyzer import SDFAnalyzer
30
+ from sdf_sampler.config import AnalyzerConfig, SamplerConfig
31
+ from sdf_sampler.io import export_parquet, load_point_cloud
32
+ from sdf_sampler.models import (
33
+ AlgorithmType,
34
+ AnalysisResult,
35
+ BoxConstraint,
36
+ BrushStrokeConstraint,
37
+ Constraint,
38
+ HalfspaceConstraint,
39
+ PocketConstraint,
40
+ RayCarveConstraint,
41
+ SamplePointConstraint,
42
+ SamplingStrategy,
43
+ SeedPropagationConstraint,
44
+ SignConvention,
45
+ SphereConstraint,
46
+ TrainingSample,
47
+ )
48
+ from sdf_sampler.sampler import SDFSampler
49
+
50
+ __version__ = "0.1.0"
51
+
52
+ __all__ = [
53
+ # Main classes
54
+ "SDFAnalyzer",
55
+ "SDFSampler",
56
+ # Config
57
+ "AnalyzerConfig",
58
+ "SamplerConfig",
59
+ # I/O
60
+ "load_point_cloud",
61
+ "export_parquet",
62
+ # Models
63
+ "SignConvention",
64
+ "AlgorithmType",
65
+ "SamplingStrategy",
66
+ "AnalysisResult",
67
+ "TrainingSample",
68
+ # Constraints
69
+ "Constraint",
70
+ "BoxConstraint",
71
+ "SphereConstraint",
72
+ "HalfspaceConstraint",
73
+ "BrushStrokeConstraint",
74
+ "SeedPropagationConstraint",
75
+ "RayCarveConstraint",
76
+ "PocketConstraint",
77
+ "SamplePointConstraint",
78
+ ]
@@ -0,0 +1,19 @@
1
+ # ABOUTME: Algorithm module exports
2
+ # ABOUTME: Provides analysis algorithms for SDF region detection
3
+
4
+ from sdf_sampler.algorithms.flood_fill import flood_fill_empty_regions
5
+ from sdf_sampler.algorithms.normal_idw import generate_idw_normal_samples
6
+ from sdf_sampler.algorithms.normal_offset import generate_normal_offset_boxes
7
+ from sdf_sampler.algorithms.pocket import detect_pockets
8
+ from sdf_sampler.algorithms.voxel_grid import build_voxel_grid, compute_hull_mask
9
+ from sdf_sampler.algorithms.voxel_regions import generate_voxel_region_constraints
10
+
11
+ __all__ = [
12
+ "flood_fill_empty_regions",
13
+ "generate_voxel_region_constraints",
14
+ "generate_normal_offset_boxes",
15
+ "generate_idw_normal_samples",
16
+ "detect_pockets",
17
+ "build_voxel_grid",
18
+ "compute_hull_mask",
19
+ ]
@@ -0,0 +1,233 @@
1
+ # ABOUTME: Flood fill algorithm for EMPTY region detection
2
+ # ABOUTME: Uses ray propagation from sky to identify exterior empty space
3
+
4
+ import numpy as np
5
+ from scipy.spatial import KDTree
6
+
7
+ from sdf_sampler.algorithms.voxel_grid import (
8
+ build_voxel_grid,
9
+ compute_hull_mask,
10
+ greedy_2d_mesh,
11
+ ray_propagation_with_bounces,
12
+ )
13
+ from sdf_sampler.config import AutoAnalysisOptions
14
+ from sdf_sampler.models.analysis import AlgorithmType, GeneratedConstraint
15
+ from sdf_sampler.models.constraints import SignConvention
16
+
17
+
18
+ def flood_fill_empty_regions(
19
+ xyz: np.ndarray,
20
+ normals: np.ndarray | None,
21
+ options: AutoAnalysisOptions,
22
+ ) -> list[GeneratedConstraint]:
23
+ """Generate EMPTY constraints using ray propagation with bouncing.
24
+
25
+ Uses the ray model:
26
+ 1. EMPTY rays shine down from +Z (sky)
27
+ 2. Rays bounce to fill occluded areas (trenches, overhangs)
28
+ 3. Output depends on flood_fill_output option
29
+
30
+ Args:
31
+ xyz: Point cloud positions (N, 3)
32
+ normals: Point normals (N, 3) or None
33
+ options: Algorithm options
34
+
35
+ Returns:
36
+ List of GeneratedConstraint objects
37
+ """
38
+ constraints: list[GeneratedConstraint] = []
39
+
40
+ grid_result = build_voxel_grid(xyz, options.min_gap_size, options.max_grid_dim)
41
+ if grid_result is None:
42
+ return constraints
43
+
44
+ occupied, bbox_min, voxel_size, grid_shape = grid_result
45
+ _nx, _ny, nz = grid_shape
46
+
47
+ inside_hull = compute_hull_mask(xyz, bbox_min, voxel_size, grid_shape)
48
+
49
+ empty_mask, _ = ray_propagation_with_bounces(
50
+ occupied, grid_shape, inside_hull, options.cone_angle
51
+ )
52
+
53
+ output_mode = options.flood_fill_output.lower()
54
+
55
+ if output_mode in ("samples", "both"):
56
+ sample_constraints = _generate_samples_from_mask(
57
+ empty_mask,
58
+ bbox_min,
59
+ voxel_size,
60
+ xyz,
61
+ options.flood_fill_sample_count,
62
+ SignConvention.EMPTY,
63
+ AlgorithmType.FLOOD_FILL,
64
+ )
65
+ constraints.extend(sample_constraints)
66
+
67
+ if output_mode in ("boxes", "both"):
68
+ box_constraints = _generate_boxes_from_mask(
69
+ empty_mask,
70
+ bbox_min,
71
+ voxel_size,
72
+ nz,
73
+ options,
74
+ SignConvention.EMPTY,
75
+ AlgorithmType.FLOOD_FILL,
76
+ )
77
+ constraints.extend(box_constraints)
78
+
79
+ return constraints
80
+
81
+
82
+ def _generate_samples_from_mask(
83
+ mask: np.ndarray,
84
+ bbox_min: np.ndarray,
85
+ voxel_size: float,
86
+ xyz: np.ndarray,
87
+ n_samples: int,
88
+ sign: SignConvention,
89
+ algorithm: AlgorithmType,
90
+ ) -> list[GeneratedConstraint]:
91
+ """Generate sample_point constraints from a voxel mask.
92
+
93
+ Uses inverse-square distance weighting: more samples near the surface,
94
+ fewer samples far away.
95
+ """
96
+ constraints: list[GeneratedConstraint] = []
97
+
98
+ marked_indices = np.argwhere(mask)
99
+ if len(marked_indices) == 0:
100
+ return constraints
101
+
102
+ tree = KDTree(xyz)
103
+ rng = np.random.default_rng(42)
104
+
105
+ voxel_centers = bbox_min + (marked_indices + 0.5) * voxel_size
106
+ distances, _ = tree.query(voxel_centers, k=1)
107
+
108
+ epsilon = voxel_size * 0.1
109
+ weights = 1.0 / (distances + epsilon) ** 2
110
+ weights = weights / weights.sum()
111
+
112
+ sample_indices = rng.choice(len(marked_indices), size=n_samples, replace=True, p=weights)
113
+
114
+ for idx in sample_indices:
115
+ voxel_ijk = marked_indices[idx]
116
+ offset = rng.uniform(0, 1, 3)
117
+ world_pos = bbox_min + (voxel_ijk + offset) * voxel_size
118
+
119
+ dist, _ = tree.query(world_pos, k=1)
120
+ signed_dist = float(dist) if sign == SignConvention.EMPTY else -float(dist)
121
+
122
+ constraints.append(
123
+ GeneratedConstraint(
124
+ constraint={
125
+ "type": "sample_point",
126
+ "sign": sign.value,
127
+ "position": tuple(world_pos.tolist()),
128
+ "distance": signed_dist,
129
+ },
130
+ algorithm=algorithm,
131
+ confidence=0.8,
132
+ description=f"Voxel sample at d={signed_dist:.3f}m",
133
+ )
134
+ )
135
+
136
+ return constraints
137
+
138
+
139
+ def _generate_boxes_from_mask(
140
+ empty_mask: np.ndarray,
141
+ bbox_min: np.ndarray,
142
+ voxel_size: float,
143
+ nz: int,
144
+ options: AutoAnalysisOptions,
145
+ sign: SignConvention,
146
+ algorithm: AlgorithmType,
147
+ ) -> list[GeneratedConstraint]:
148
+ """Generate axis-aligned box constraints from a voxel mask using greedy meshing."""
149
+ constraints: list[GeneratedConstraint] = []
150
+
151
+ all_boxes: list[tuple[int, int, int, int, int, int]] = []
152
+
153
+ for iz in range(nz):
154
+ slice_2d = empty_mask[:, :, iz]
155
+ if not slice_2d.any():
156
+ continue
157
+
158
+ rectangles = greedy_2d_mesh(slice_2d)
159
+ for x_min, x_max, y_min, y_max in rectangles:
160
+ all_boxes.append((iz, iz + 1, x_min, x_max, y_min, y_max))
161
+
162
+ if not all_boxes:
163
+ return constraints
164
+
165
+ all_boxes.sort(key=lambda b: (b[2], b[3], b[4], b[5], b[0]))
166
+
167
+ merged_boxes: list[tuple[int, int, int, int, int, int]] = []
168
+ current = all_boxes[0]
169
+
170
+ for box in all_boxes[1:]:
171
+ z_start, z_end, x_min, x_max, y_min, y_max = box
172
+ cz_start, cz_end, cx_min, cx_max, cy_min, cy_max = current
173
+
174
+ if (
175
+ x_min == cx_min
176
+ and x_max == cx_max
177
+ and y_min == cy_min
178
+ and y_max == cy_max
179
+ and z_start == cz_end
180
+ ):
181
+ current = (cz_start, z_end, cx_min, cx_max, cy_min, cy_max)
182
+ else:
183
+ merged_boxes.append(current)
184
+ current = box
185
+
186
+ merged_boxes.append(current)
187
+
188
+ min_extent = 3
189
+ merged_boxes = [
190
+ b
191
+ for b in merged_boxes
192
+ if (b[1] - b[0]) >= min_extent
193
+ and (b[3] - b[2]) >= min_extent
194
+ and (b[5] - b[4]) >= min_extent
195
+ ]
196
+
197
+ if not merged_boxes:
198
+ return constraints
199
+
200
+ max_boxes = options.max_boxes
201
+ if len(merged_boxes) > max_boxes:
202
+ merged_boxes.sort(
203
+ key=lambda b: (b[1] - b[0]) * (b[3] - b[2]) * (b[5] - b[4]),
204
+ reverse=True,
205
+ )
206
+ merged_boxes = merged_boxes[:max_boxes]
207
+
208
+ for z_start, z_end, x_min, x_max, y_min, y_max in merged_boxes:
209
+ world_min = bbox_min + np.array([x_min, y_min, z_start]) * voxel_size
210
+ world_max = bbox_min + np.array([x_max, y_max, z_end]) * voxel_size
211
+
212
+ center = (world_min + world_max) / 2
213
+ half_extents = (world_max - world_min) / 2
214
+
215
+ box_constraint = {
216
+ "type": "box",
217
+ "sign": sign.value,
218
+ "center": tuple(center.tolist()),
219
+ "half_extents": tuple(half_extents.tolist()),
220
+ }
221
+
222
+ volume = float(np.prod(half_extents * 2))
223
+ n_voxels = (z_end - z_start) * (x_max - x_min) * (y_max - y_min)
224
+ constraints.append(
225
+ GeneratedConstraint(
226
+ constraint=box_constraint,
227
+ algorithm=algorithm,
228
+ confidence=0.85,
229
+ description=f"Sky-reachable region ({n_voxels} voxels, {volume:.2f}m³)",
230
+ )
231
+ )
232
+
233
+ return constraints
@@ -0,0 +1,99 @@
1
+ # ABOUTME: IDW normal sampling algorithm
2
+ # ABOUTME: Generates sample constraints along normals with inverse distance weighting
3
+
4
+ import numpy as np
5
+
6
+ from sdf_sampler.algorithms.normal_offset import _farthest_point_sample
7
+ from sdf_sampler.config import AutoAnalysisOptions
8
+ from sdf_sampler.models.analysis import AlgorithmType, GeneratedConstraint
9
+
10
+
11
+ def generate_idw_normal_samples(
12
+ xyz: np.ndarray,
13
+ normals: np.ndarray | None,
14
+ options: AutoAnalysisOptions,
15
+ ) -> list[GeneratedConstraint]:
16
+ """Generate sample constraints along normals with inverse distance weighting.
17
+
18
+ Creates point samples at varying distances along surface normals, with
19
+ more samples concentrated near the surface (IDW = 1/distance^power).
20
+
21
+ Args:
22
+ xyz: Point cloud positions (N, 3)
23
+ normals: Point normals (N, 3) - required for this algorithm
24
+ options: Algorithm options
25
+
26
+ Returns:
27
+ List of GeneratedConstraint objects
28
+ """
29
+ constraints: list[GeneratedConstraint] = []
30
+
31
+ if normals is None or len(normals) != len(xyz):
32
+ return constraints
33
+
34
+ oriented_normals = _orient_normals_outward(xyz, normals)
35
+
36
+ n_surface_pts = min(options.idw_sample_count // 10, len(xyz))
37
+ if n_surface_pts < 1:
38
+ return constraints
39
+
40
+ surface_indices = _farthest_point_sample(xyz, n_surface_pts)
41
+ samples_per_point = options.idw_sample_count // len(surface_indices)
42
+
43
+ if samples_per_point < 1:
44
+ samples_per_point = 1
45
+
46
+ rng = np.random.default_rng(42)
47
+
48
+ for idx in surface_indices:
49
+ point = xyz[idx]
50
+ normal = oriented_normals[idx]
51
+ normal_norm = np.linalg.norm(normal)
52
+ if normal_norm < 0.1:
53
+ continue
54
+ normal = normal / normal_norm
55
+
56
+ # Generate distances with IDW distribution
57
+ u = rng.random(samples_per_point)
58
+ distances = options.idw_max_distance * (1 - u ** (1 / options.idw_power))
59
+
60
+ for dist in distances:
61
+ sign = rng.choice([-1, 1])
62
+ sample_pos = point + sign * dist * normal
63
+ sample_sign = "empty" if sign > 0 else "solid"
64
+
65
+ constraints.append(
66
+ GeneratedConstraint(
67
+ constraint={
68
+ "type": "sample_point",
69
+ "sign": sample_sign,
70
+ "position": tuple(sample_pos.tolist()),
71
+ "distance": float(sign * dist),
72
+ },
73
+ algorithm=AlgorithmType.NORMAL_IDW,
74
+ confidence=0.8,
75
+ description=f"IDW sample at d={sign * dist:.3f}m",
76
+ )
77
+ )
78
+
79
+ return constraints
80
+
81
+
82
+ def _orient_normals_outward(xyz: np.ndarray, normals: np.ndarray) -> np.ndarray:
83
+ """Orient normals to point outward using a viewpoint heuristic.
84
+
85
+ Uses assumption that viewpoint is above the scene (outdoor scenes).
86
+ """
87
+ centroid = xyz.mean(axis=0)
88
+ z_range = xyz[:, 2].max() - xyz[:, 2].min()
89
+ viewpoint = centroid.copy()
90
+ viewpoint[2] = xyz[:, 2].max() + z_range * 0.5
91
+
92
+ to_viewpoint = viewpoint - xyz
93
+ dot_products = np.sum(normals * to_viewpoint, axis=1)
94
+
95
+ oriented = normals.copy()
96
+ flip_mask = dot_products < 0
97
+ oriented[flip_mask] = -oriented[flip_mask]
98
+
99
+ return oriented
@@ -0,0 +1,111 @@
1
+ # ABOUTME: Normal offset algorithm for surface-relative constraints
2
+ # ABOUTME: Generates paired SOLID/EMPTY boxes offset along surface normals
3
+
4
+ import numpy as np
5
+ from scipy.spatial import KDTree
6
+
7
+ from sdf_sampler.algorithms.voxel_grid import estimate_mean_spacing
8
+ from sdf_sampler.config import AutoAnalysisOptions
9
+ from sdf_sampler.models.analysis import AlgorithmType, GeneratedConstraint
10
+ from sdf_sampler.models.constraints import SignConvention
11
+
12
+
13
+ def generate_normal_offset_boxes(
14
+ xyz: np.ndarray,
15
+ normals: np.ndarray | None,
16
+ options: AutoAnalysisOptions,
17
+ ) -> list[GeneratedConstraint]:
18
+ """Generate paired SOLID/EMPTY boxes offset along surface normals.
19
+
20
+ For a surface point with normal N:
21
+ - Box offset in +N direction (outward) -> EMPTY
22
+ - Box offset in -N direction (inward) -> SOLID
23
+
24
+ Args:
25
+ xyz: Point cloud positions (N, 3)
26
+ normals: Point normals (N, 3) - required for this algorithm
27
+ options: Algorithm options
28
+
29
+ Returns:
30
+ List of GeneratedConstraint objects
31
+ """
32
+ constraints: list[GeneratedConstraint] = []
33
+
34
+ if normals is None or len(normals) != len(xyz):
35
+ return constraints
36
+
37
+ tree = KDTree(xyz)
38
+ mean_spacing = estimate_mean_spacing(xyz, tree)
39
+
40
+ sample_indices = _farthest_point_sample(xyz, options.normal_offset_pairs)
41
+ offset_distance = mean_spacing * 3
42
+ box_size = mean_spacing * 2.5
43
+
44
+ for idx in sample_indices:
45
+ point = xyz[idx]
46
+ normal = normals[idx]
47
+ normal_norm = np.linalg.norm(normal)
48
+
49
+ if normal_norm < 0.1:
50
+ continue
51
+
52
+ normal = normal / normal_norm
53
+
54
+ # EMPTY box in +normal direction (outward)
55
+ empty_center = point + normal * offset_distance
56
+ box_constraint_empty = {
57
+ "type": "box",
58
+ "sign": SignConvention.EMPTY.value,
59
+ "center": tuple(empty_center.tolist()),
60
+ "half_extents": (box_size, box_size, box_size),
61
+ }
62
+
63
+ constraints.append(
64
+ GeneratedConstraint(
65
+ constraint=box_constraint_empty,
66
+ algorithm=AlgorithmType.NORMAL_OFFSET,
67
+ confidence=0.75,
68
+ description=f"Exterior offset from surface at ({point[0]:.2f}, {point[1]:.2f}, {point[2]:.2f})",
69
+ )
70
+ )
71
+
72
+ # SOLID box in -normal direction (inward)
73
+ solid_center = point - normal * offset_distance
74
+ box_constraint_solid = {
75
+ "type": "box",
76
+ "sign": SignConvention.SOLID.value,
77
+ "center": tuple(solid_center.tolist()),
78
+ "half_extents": (box_size, box_size, box_size),
79
+ }
80
+
81
+ constraints.append(
82
+ GeneratedConstraint(
83
+ constraint=box_constraint_solid,
84
+ algorithm=AlgorithmType.NORMAL_OFFSET,
85
+ confidence=0.75,
86
+ description=f"Interior offset from surface at ({point[0]:.2f}, {point[1]:.2f}, {point[2]:.2f})",
87
+ )
88
+ )
89
+
90
+ return constraints
91
+
92
+
93
+ def _farthest_point_sample(xyz: np.ndarray, n_samples: int) -> list[int]:
94
+ """Select well-distributed points using farthest point sampling."""
95
+ n_points = len(xyz)
96
+ if n_samples >= n_points:
97
+ return list(range(n_points))
98
+
99
+ rng = np.random.default_rng(42)
100
+ selected: list[int] = [int(rng.integers(n_points))]
101
+ min_distances = np.full(n_points, np.inf)
102
+
103
+ for _ in range(n_samples - 1):
104
+ last_selected = xyz[selected[-1]]
105
+ distances = np.linalg.norm(xyz - last_selected, axis=1)
106
+ min_distances = np.minimum(min_distances, distances)
107
+ min_distances[selected] = -1
108
+ next_idx = int(np.argmax(min_distances))
109
+ selected.append(next_idx)
110
+
111
+ return selected
@@ -0,0 +1,146 @@
1
+ # ABOUTME: Pocket detection algorithm for cavity identification
2
+ # ABOUTME: Uses voxel flood fill to find disconnected interior cavities
3
+
4
+ from dataclasses import dataclass
5
+ from enum import IntEnum
6
+
7
+ import numpy as np
8
+ from scipy import ndimage
9
+
10
+ from sdf_sampler.config import AnalyzerConfig
11
+ from sdf_sampler.models.analysis import AlgorithmType, GeneratedConstraint
12
+ from sdf_sampler.models.constraints import SignConvention
13
+
14
+
15
+ class VoxelState(IntEnum):
16
+ """State of a voxel in the occupancy grid."""
17
+
18
+ EMPTY = 0
19
+ OCCUPIED = 1
20
+ OUTSIDE = 2
21
+
22
+
23
+ @dataclass
24
+ class PocketInfo:
25
+ """Information about a detected pocket."""
26
+
27
+ pocket_id: int
28
+ voxel_count: int
29
+ centroid: tuple[float, float, float]
30
+ bounds_low: tuple[float, float, float]
31
+ bounds_high: tuple[float, float, float]
32
+ volume_estimate: float
33
+ voxel_coords: np.ndarray # World coordinates of voxel centers
34
+
35
+
36
+ def detect_pockets(
37
+ xyz: np.ndarray,
38
+ config: AnalyzerConfig,
39
+ ) -> list[GeneratedConstraint]:
40
+ """Detect pocket cavities in point cloud using voxel analysis.
41
+
42
+ Pockets are disconnected interior voids that should be marked as SOLID.
43
+
44
+ Args:
45
+ xyz: Point cloud positions (N, 3)
46
+ config: Analyzer configuration
47
+
48
+ Returns:
49
+ List of PocketConstraint GeneratedConstraints
50
+ """
51
+ constraints: list[GeneratedConstraint] = []
52
+
53
+ if len(xyz) < 10:
54
+ return constraints
55
+
56
+ # Compute bounds
57
+ bounds_low = xyz.min(axis=0)
58
+ bounds_high = xyz.max(axis=0)
59
+
60
+ # Compute voxel size
61
+ extent = bounds_high - bounds_low
62
+ longest_axis = np.max(extent)
63
+ voxel_size = longest_axis / config.pocket_voxel_target
64
+ voxel_size = max(voxel_size, config.pocket_min_voxel_size)
65
+
66
+ # Compute grid resolution - ensure at least 1 in each dimension
67
+ resolution = np.ceil(extent / voxel_size).astype(int)
68
+ resolution = np.maximum(resolution, 1) # Avoid zero-size dimensions
69
+ resolution = np.minimum(resolution, config.pocket_max_voxels_per_axis)
70
+
71
+ # Build occupancy grid
72
+ grid = np.full(tuple(resolution), VoxelState.EMPTY, dtype=np.uint8)
73
+
74
+ voxel_indices = ((xyz - bounds_low) / voxel_size).astype(int)
75
+ voxel_indices = np.clip(voxel_indices, 0, resolution - 1)
76
+
77
+ # Mark occupied with dilation
78
+ if config.pocket_occupancy_dilation == 0:
79
+ grid[voxel_indices[:, 0], voxel_indices[:, 1], voxel_indices[:, 2]] = VoxelState.OCCUPIED
80
+ else:
81
+ occupied_mask = np.zeros_like(grid, dtype=bool)
82
+ occupied_mask[voxel_indices[:, 0], voxel_indices[:, 1], voxel_indices[:, 2]] = True
83
+ struct = ndimage.generate_binary_structure(3, 1)
84
+ dilated = ndimage.binary_dilation(
85
+ occupied_mask, structure=struct, iterations=config.pocket_occupancy_dilation
86
+ )
87
+ grid[dilated] = VoxelState.OCCUPIED
88
+
89
+ # Flood fill from boundary to mark outside
90
+ boundary_mask = np.zeros_like(grid, dtype=bool)
91
+ boundary_mask[0, :, :] = True
92
+ boundary_mask[-1, :, :] = True
93
+ boundary_mask[:, 0, :] = True
94
+ boundary_mask[:, -1, :] = True
95
+ boundary_mask[:, :, 0] = True
96
+ boundary_mask[:, :, -1] = True
97
+
98
+ seed = boundary_mask & (grid == VoxelState.EMPTY)
99
+ traversable = grid == VoxelState.EMPTY
100
+
101
+ struct = ndimage.generate_binary_structure(3, 1)
102
+ outside = ndimage.binary_dilation(seed, mask=traversable, iterations=-1, structure=struct)
103
+ grid[outside] = VoxelState.OUTSIDE
104
+
105
+ # Label remaining empty voxels as pockets
106
+ pocket_mask = grid == VoxelState.EMPTY
107
+ labeled, num_pockets = ndimage.label(pocket_mask, structure=struct)
108
+
109
+ # Extract pocket info
110
+ for pocket_id in range(1, num_pockets + 1):
111
+ voxel_count = int(np.sum(labeled == pocket_id))
112
+ if voxel_count < config.pocket_min_volume_voxels:
113
+ continue
114
+
115
+ mask = labeled == pocket_id
116
+ voxel_coords = np.argwhere(mask)
117
+ world_coords = voxel_coords * voxel_size + bounds_low + voxel_size / 2
118
+
119
+ centroid = tuple(world_coords.mean(axis=0).tolist())
120
+ pocket_bounds_low = tuple((voxel_coords.min(axis=0) * voxel_size + bounds_low).tolist())
121
+ pocket_bounds_high = tuple(
122
+ ((voxel_coords.max(axis=0) + 1) * voxel_size + bounds_low).tolist()
123
+ )
124
+ volume = voxel_count * (voxel_size**3)
125
+
126
+ pocket_constraint = {
127
+ "type": "pocket",
128
+ "sign": SignConvention.SOLID.value,
129
+ "pocket_id": pocket_id,
130
+ "voxel_count": voxel_count,
131
+ "centroid": centroid,
132
+ "bounds_low": pocket_bounds_low,
133
+ "bounds_high": pocket_bounds_high,
134
+ "volume_estimate": volume,
135
+ }
136
+
137
+ constraints.append(
138
+ GeneratedConstraint(
139
+ constraint=pocket_constraint,
140
+ algorithm=AlgorithmType.POCKET,
141
+ confidence=0.95,
142
+ description=f"Interior cavity at ({centroid[0]:.2f}, {centroid[1]:.2f}, {centroid[2]:.2f}), {voxel_count} voxels",
143
+ )
144
+ )
145
+
146
+ return constraints