sdf-sampler 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,299 @@
1
+ # ABOUTME: SDFAnalyzer class for auto-analysis of point clouds
2
+ # ABOUTME: Detects SOLID and EMPTY regions using multiple algorithms
3
+
4
+ import uuid
5
+ from datetime import UTC, datetime
6
+
7
+ import numpy as np
8
+
9
+ from sdf_sampler.algorithms.flood_fill import flood_fill_empty_regions
10
+ from sdf_sampler.algorithms.normal_idw import generate_idw_normal_samples
11
+ from sdf_sampler.algorithms.normal_offset import generate_normal_offset_boxes
12
+ from sdf_sampler.algorithms.pocket import detect_pockets
13
+ from sdf_sampler.algorithms.voxel_regions import generate_voxel_region_constraints
14
+ from sdf_sampler.config import AnalyzerConfig, AutoAnalysisOptions
15
+ from sdf_sampler.models.analysis import (
16
+ ALL_ALGORITHMS,
17
+ AlgorithmStats,
18
+ AlgorithmType,
19
+ AnalysisResult,
20
+ AnalysisSummary,
21
+ GeneratedConstraint,
22
+ )
23
+ from sdf_sampler.models.constraints import SignConvention
24
+
25
+
26
+ class SDFAnalyzer:
27
+ """Automatic SDF region detection from point clouds.
28
+
29
+ Generates spatial constraints (boxes, samples) that define SOLID (inside)
30
+ and EMPTY (outside) regions for SDF training data generation.
31
+
32
+ Example:
33
+ >>> analyzer = SDFAnalyzer()
34
+ >>> result = analyzer.analyze(xyz=points, normals=normals)
35
+ >>> print(f"Generated {len(result.constraints)} constraints")
36
+
37
+ Algorithms:
38
+ - flood_fill: EMPTY regions reachable from sky
39
+ - voxel_regions: SOLID underground regions
40
+ - normal_offset: Paired boxes along surface normals
41
+ - normal_idw: Point samples with inverse distance weighting
42
+ - pocket: Interior cavity detection
43
+ """
44
+
45
+ def __init__(self, config: AnalyzerConfig | None = None):
46
+ """Initialize the analyzer.
47
+
48
+ Args:
49
+ config: Optional configuration. Uses defaults if not provided.
50
+ """
51
+ self.config = config or AnalyzerConfig()
52
+
53
+ def analyze(
54
+ self,
55
+ xyz: np.ndarray,
56
+ normals: np.ndarray | None = None,
57
+ algorithms: list[str] | None = None,
58
+ options: AutoAnalysisOptions | None = None,
59
+ ) -> AnalysisResult:
60
+ """Run analysis algorithms and generate constraints.
61
+
62
+ This is the main entry point for auto-analysis. It runs the specified
63
+ algorithms (or all by default) and returns constraints that can be
64
+ used for sample generation.
65
+
66
+ Args:
67
+ xyz: Point cloud positions (N, 3) as numpy array
68
+ normals: Point normals (N, 3) or None if not available
69
+ algorithms: List of algorithm names to run (default: all)
70
+ options: Fine-grained algorithm options (default: from config)
71
+
72
+ Returns:
73
+ AnalysisResult containing generated constraints and statistics
74
+
75
+ Example:
76
+ >>> result = analyzer.analyze(
77
+ ... xyz=points,
78
+ ... normals=normals,
79
+ ... algorithms=["flood_fill", "voxel_regions"],
80
+ ... )
81
+ """
82
+ if options is None:
83
+ options = AutoAnalysisOptions.from_analyzer_config(self.config)
84
+
85
+ # Validate input
86
+ xyz = np.asarray(xyz)
87
+ if xyz.ndim != 2 or xyz.shape[1] != 3:
88
+ raise ValueError(f"xyz must be (N, 3), got {xyz.shape}")
89
+
90
+ if normals is not None:
91
+ normals = np.asarray(normals)
92
+ if normals.shape != xyz.shape:
93
+ raise ValueError(f"normals shape {normals.shape} doesn't match xyz {xyz.shape}")
94
+
95
+ # Determine which algorithms to run
96
+ algo_list = algorithms if algorithms else [a.value for a in ALL_ALGORITHMS]
97
+ algo_list = [a for a in algo_list if a in [alg.value for alg in ALL_ALGORITHMS]]
98
+
99
+ # Run algorithms and collect constraints
100
+ all_constraints: list[GeneratedConstraint] = []
101
+ algorithm_stats: dict[str, AlgorithmStats] = {}
102
+ algorithms_run: list[str] = []
103
+
104
+ for algo_name in algo_list:
105
+ constraints = self._run_algorithm(algo_name, xyz, normals, options)
106
+ if constraints:
107
+ all_constraints.extend(constraints)
108
+ algorithms_run.append(algo_name)
109
+ algorithm_stats[algo_name] = AlgorithmStats(
110
+ constraints_generated=len(constraints),
111
+ coverage_description=self._get_algorithm_description(algo_name, len(constraints)),
112
+ )
113
+
114
+ # Remove redundant contained boxes
115
+ all_constraints = self._simplify_constraints(all_constraints, options.overlap_threshold)
116
+
117
+ # Filter out constraints outside the X-Y alpha shape
118
+ if options.hull_filter_enabled:
119
+ all_constraints = self._filter_outside_hull(all_constraints, xyz, options.hull_alpha)
120
+
121
+ # Compute summary
122
+ summary = self._compute_summary(all_constraints, len(algorithm_stats))
123
+
124
+ return AnalysisResult(
125
+ analysis_id=str(uuid.uuid4()),
126
+ computed_at=datetime.now(UTC),
127
+ algorithms_run=algorithms_run,
128
+ summary=summary,
129
+ algorithm_stats=algorithm_stats,
130
+ generated_constraints=all_constraints,
131
+ )
132
+
133
+ async def analyze_async(
134
+ self,
135
+ xyz: np.ndarray,
136
+ normals: np.ndarray | None = None,
137
+ algorithms: list[str] | None = None,
138
+ options: AutoAnalysisOptions | None = None,
139
+ ) -> AnalysisResult:
140
+ """Async variant of analyze() for heavy workloads.
141
+
142
+ Same interface as analyze() but can be awaited.
143
+ """
144
+ # For now, just wrap sync - could be made truly async with executors
145
+ return self.analyze(xyz, normals, algorithms, options)
146
+
147
+ def _run_algorithm(
148
+ self,
149
+ name: str,
150
+ xyz: np.ndarray,
151
+ normals: np.ndarray | None,
152
+ options: AutoAnalysisOptions,
153
+ ) -> list[GeneratedConstraint]:
154
+ """Run a single analysis algorithm."""
155
+ if name == AlgorithmType.POCKET.value:
156
+ return detect_pockets(xyz, self.config)
157
+ elif name == AlgorithmType.NORMAL_OFFSET.value:
158
+ return generate_normal_offset_boxes(xyz, normals, options)
159
+ elif name == AlgorithmType.FLOOD_FILL.value:
160
+ return flood_fill_empty_regions(xyz, normals, options)
161
+ elif name == AlgorithmType.VOXEL_REGIONS.value:
162
+ return generate_voxel_region_constraints(xyz, normals, options)
163
+ elif name == AlgorithmType.NORMAL_IDW.value:
164
+ return generate_idw_normal_samples(xyz, normals, options)
165
+ return []
166
+
167
+ def _get_algorithm_description(self, algo_name: str, count: int) -> str:
168
+ """Get human-readable description for algorithm results."""
169
+ descriptions = {
170
+ AlgorithmType.POCKET.value: f"Detected {count} interior cavities",
171
+ AlgorithmType.NORMAL_OFFSET.value: f"Generated {count} surface offset constraints",
172
+ AlgorithmType.FLOOD_FILL.value: f"Found {count} sky-reachable exterior regions",
173
+ AlgorithmType.VOXEL_REGIONS.value: f"Found {count} underground solid regions",
174
+ AlgorithmType.NORMAL_IDW.value: f"Generated {count} IDW normal samples",
175
+ }
176
+ return descriptions.get(algo_name, f"Generated {count} constraints")
177
+
178
+ def _compute_summary(
179
+ self, constraints: list[GeneratedConstraint], algorithms_contributing: int
180
+ ) -> AnalysisSummary:
181
+ """Compute summary statistics from generated constraints."""
182
+ solid_count = sum(
183
+ 1 for c in constraints if c.constraint.get("sign") == SignConvention.SOLID.value
184
+ )
185
+ empty_count = sum(
186
+ 1 for c in constraints if c.constraint.get("sign") == SignConvention.EMPTY.value
187
+ )
188
+
189
+ return AnalysisSummary(
190
+ total_constraints=len(constraints),
191
+ solid_constraints=solid_count,
192
+ empty_constraints=empty_count,
193
+ algorithms_contributing=algorithms_contributing,
194
+ )
195
+
196
+ def _box_intersection_fraction(self, box_a: dict, box_b: dict) -> float:
197
+ """Calculate what fraction of box_b's volume intersects with box_a."""
198
+ a_center = np.array(box_a["center"])
199
+ a_half = np.array(box_a["half_extents"])
200
+ b_center = np.array(box_b["center"])
201
+ b_half = np.array(box_b["half_extents"])
202
+
203
+ a_min, a_max = a_center - a_half, a_center + a_half
204
+ b_min, b_max = b_center - b_half, b_center + b_half
205
+
206
+ inter_min = np.maximum(a_min, b_min)
207
+ inter_max = np.minimum(a_max, b_max)
208
+
209
+ inter_dims = np.maximum(0, inter_max - inter_min)
210
+ intersection_volume = float(np.prod(inter_dims))
211
+
212
+ b_dims = b_max - b_min
213
+ b_volume = float(np.prod(b_dims))
214
+
215
+ if b_volume <= 0:
216
+ return 0.0
217
+
218
+ return intersection_volume / b_volume
219
+
220
+ def _simplify_constraints(
221
+ self, constraints: list[GeneratedConstraint], overlap_threshold: float = 0.5
222
+ ) -> list[GeneratedConstraint]:
223
+ """Remove boxes that significantly overlap with larger boxes."""
224
+ boxes: list[tuple[int, GeneratedConstraint, float]] = []
225
+ for i, c in enumerate(constraints):
226
+ if c.constraint.get("type") == "box":
227
+ half = np.array(c.constraint["half_extents"])
228
+ volume = float(np.prod(half * 2))
229
+ boxes.append((i, c, volume))
230
+
231
+ remove_indices: set[int] = set()
232
+ for i, (_idx_a, box_a, vol_a) in enumerate(boxes):
233
+ for j, (idx_b, box_b, vol_b) in enumerate(boxes):
234
+ if i == j:
235
+ continue
236
+ if vol_b < vol_a:
237
+ fraction = self._box_intersection_fraction(box_a.constraint, box_b.constraint)
238
+ if fraction > overlap_threshold:
239
+ remove_indices.add(idx_b)
240
+
241
+ return [c for i, c in enumerate(constraints) if i not in remove_indices]
242
+
243
+ def _filter_outside_hull(
244
+ self, constraints: list[GeneratedConstraint], xyz: np.ndarray, alpha: float
245
+ ) -> list[GeneratedConstraint]:
246
+ """Filter out constraints whose center falls outside the X-Y alpha shape."""
247
+ if len(constraints) == 0 or len(xyz) < 3:
248
+ return constraints
249
+
250
+ xy = xyz[:, :2]
251
+
252
+ try:
253
+ import alphashape
254
+ from shapely.geometry import Point
255
+
256
+ shape = alphashape.alphashape(xy, alpha)
257
+ if shape is None or shape.is_empty:
258
+ return constraints
259
+ except Exception:
260
+ return constraints
261
+
262
+ filtered: list[GeneratedConstraint] = []
263
+ for constraint in constraints:
264
+ c = constraint.constraint
265
+ c_type = c.get("type")
266
+
267
+ center_xy = self._get_constraint_center_xy(c, c_type)
268
+ if center_xy is None:
269
+ filtered.append(constraint)
270
+ continue
271
+
272
+ point = Point(center_xy[0], center_xy[1])
273
+ if shape.contains(point) or shape.touches(point):
274
+ filtered.append(constraint)
275
+
276
+ return filtered
277
+
278
+ def _get_constraint_center_xy(self, constraint: dict, c_type: str | None) -> np.ndarray | None:
279
+ """Get the X-Y center of a constraint for hull checking."""
280
+ center = None
281
+
282
+ if c_type == "box":
283
+ center = constraint.get("center")
284
+ elif c_type == "sample_point":
285
+ center = constraint.get("position")
286
+ elif c_type == "sphere":
287
+ center = constraint.get("center")
288
+ elif c_type == "pocket":
289
+ center = constraint.get("centroid")
290
+ else:
291
+ for field in ["center", "position", "point", "centroid"]:
292
+ if field in constraint:
293
+ center = constraint[field]
294
+ break
295
+
296
+ if center is None:
297
+ return None
298
+
299
+ return np.array(center[:2])
sdf_sampler/config.py ADDED
@@ -0,0 +1,171 @@
1
+ # ABOUTME: Configuration dataclasses for analyzer and sampler
2
+ # ABOUTME: Simple config objects replacing FastAPI Settings dependency
3
+
4
+ from dataclasses import dataclass, field
5
+
6
+
7
+ @dataclass
8
+ class AnalyzerConfig:
9
+ """Configuration for SDFAnalyzer.
10
+
11
+ Controls voxel grid parameters, ray propagation, and filtering options.
12
+ """
13
+
14
+ # Voxel grid parameters
15
+ min_gap_size: float = 0.10
16
+ """Minimum gap size in meters that flood fill can traverse."""
17
+
18
+ max_grid_dim: int = 200
19
+ """Maximum voxel grid dimension (caps at max_grid_dim³ voxels)."""
20
+
21
+ # Ray propagation
22
+ cone_angle: float = 15.0
23
+ """Ray propagation cone half-angle in degrees."""
24
+
25
+ # Normal offset algorithm
26
+ normal_offset_pairs: int = 40
27
+ """Number of SOLID/EMPTY box pairs for normal_offset algorithm."""
28
+
29
+ # Filtering
30
+ max_boxes: int = 30
31
+ """Maximum boxes per algorithm."""
32
+
33
+ overlap_threshold: float = 0.5
34
+ """Fraction overlap required to remove redundant boxes."""
35
+
36
+ # IDW Normal sampling
37
+ idw_sample_count: int = 1000
38
+ """Total IDW samples to generate."""
39
+
40
+ idw_max_distance: float = 0.5
41
+ """Maximum distance from surface in meters."""
42
+
43
+ idw_power: float = 2.0
44
+ """IDW power factor (higher = more weight near surface)."""
45
+
46
+ # Hull filtering
47
+ hull_filter_enabled: bool = True
48
+ """Filter out constraints outside the X-Y alpha shape of point cloud."""
49
+
50
+ hull_alpha: float = 1.0
51
+ """Alpha shape parameter (smaller = tighter fit to concave boundaries)."""
52
+
53
+ # Output modes
54
+ flood_fill_output: str = "samples"
55
+ """Output mode for flood fill: 'boxes', 'samples', or 'both'."""
56
+
57
+ flood_fill_sample_count: int = 500
58
+ """Number of sample points from empty voxels."""
59
+
60
+ voxel_regions_output: str = "samples"
61
+ """Output mode for voxel regions: 'boxes', 'samples', or 'both'."""
62
+
63
+ voxel_regions_sample_count: int = 500
64
+ """Number of sample points from solid voxels."""
65
+
66
+ # Pocket detection
67
+ pocket_voxel_target: int = 100
68
+ """Target number of voxels along longest axis for pocket detection."""
69
+
70
+ pocket_min_voxel_size: float = 0.01
71
+ """Minimum voxel size for pocket detection."""
72
+
73
+ pocket_max_voxels_per_axis: int = 200
74
+ """Maximum voxels per axis for pocket grid."""
75
+
76
+ pocket_occupancy_dilation: int = 1
77
+ """Dilation iterations for pocket occupancy grid."""
78
+
79
+ pocket_min_volume_voxels: int = 10
80
+ """Minimum voxels for a pocket to be considered significant."""
81
+
82
+
83
+ @dataclass
84
+ class SamplerConfig:
85
+ """Configuration for SDFSampler.
86
+
87
+ Controls sample generation strategy and parameters.
88
+ """
89
+
90
+ # Default sampling parameters
91
+ total_samples: int = 10000
92
+ """Default total number of samples to generate."""
93
+
94
+ samples_per_primitive: int = 100
95
+ """Samples per primitive constraint (CONSTANT strategy)."""
96
+
97
+ samples_per_cubic_meter: float = 10000.0
98
+ """Sample density per cubic meter (DENSITY strategy)."""
99
+
100
+ inverse_square_base_samples: int = 100
101
+ """Base samples at surface (INVERSE_SQUARE strategy)."""
102
+
103
+ inverse_square_falloff: float = 2.0
104
+ """Falloff exponent for inverse-square sampling."""
105
+
106
+ # Band widths
107
+ near_band: float = 0.02
108
+ """Near-band width around surface."""
109
+
110
+ # Random seed
111
+ seed: int = 0
112
+ """Random seed for reproducibility."""
113
+
114
+
115
+ @dataclass
116
+ class AutoAnalysisOptions:
117
+ """Tunable hyperparameters for auto-analysis algorithms.
118
+
119
+ This is a standalone version of the AutoAnalysisOptions from sdf-labeler,
120
+ allowing fine-grained control over each algorithm.
121
+ """
122
+
123
+ # Voxel grid parameters
124
+ min_gap_size: float = 0.10
125
+ max_grid_dim: int = 200
126
+
127
+ # Ray propagation
128
+ cone_angle: float = 15.0
129
+
130
+ # Normal offset
131
+ normal_offset_pairs: int = 40
132
+
133
+ # Filtering
134
+ max_boxes: int = 30
135
+ overlap_threshold: float = 0.5
136
+
137
+ # IDW Normal sampling
138
+ idw_sample_count: int = 1000
139
+ idw_max_distance: float = 0.5
140
+ idw_power: float = 2.0
141
+
142
+ # Hull filtering
143
+ hull_filter_enabled: bool = True
144
+ hull_alpha: float = 1.0
145
+
146
+ # Output modes
147
+ flood_fill_output: str = "samples"
148
+ flood_fill_sample_count: int = 500
149
+ voxel_regions_output: str = "samples"
150
+ voxel_regions_sample_count: int = 500
151
+
152
+ @classmethod
153
+ def from_analyzer_config(cls, config: AnalyzerConfig) -> "AutoAnalysisOptions":
154
+ """Create options from AnalyzerConfig."""
155
+ return cls(
156
+ min_gap_size=config.min_gap_size,
157
+ max_grid_dim=config.max_grid_dim,
158
+ cone_angle=config.cone_angle,
159
+ normal_offset_pairs=config.normal_offset_pairs,
160
+ max_boxes=config.max_boxes,
161
+ overlap_threshold=config.overlap_threshold,
162
+ idw_sample_count=config.idw_sample_count,
163
+ idw_max_distance=config.idw_max_distance,
164
+ idw_power=config.idw_power,
165
+ hull_filter_enabled=config.hull_filter_enabled,
166
+ hull_alpha=config.hull_alpha,
167
+ flood_fill_output=config.flood_fill_output,
168
+ flood_fill_sample_count=config.flood_fill_sample_count,
169
+ voxel_regions_output=config.voxel_regions_output,
170
+ voxel_regions_sample_count=config.voxel_regions_sample_count,
171
+ )
sdf_sampler/io.py ADDED
@@ -0,0 +1,178 @@
1
+ # ABOUTME: I/O utilities for point cloud loading and sample export
2
+ # ABOUTME: Supports PLY, LAS/LAZ, CSV, NPZ, and Parquet formats
3
+
4
+ from pathlib import Path
5
+ from typing import Any
6
+
7
+ import numpy as np
8
+ import pandas as pd
9
+
10
+ from sdf_sampler.models.samples import TrainingSample
11
+
12
+
13
+ def load_point_cloud(
14
+ path: str | Path,
15
+ **kwargs: Any,
16
+ ) -> tuple[np.ndarray, np.ndarray | None]:
17
+ """Load point cloud from file.
18
+
19
+ Supported formats:
20
+ - PLY (requires trimesh in [io] extras)
21
+ - LAS/LAZ (requires laspy in [io] extras)
22
+ - CSV (columns: x, y, z, [nx, ny, nz])
23
+ - NPZ (arrays: xyz, [normals])
24
+ - Parquet (columns: x, y, z, [nx, ny, nz])
25
+
26
+ Args:
27
+ path: Path to point cloud file
28
+ **kwargs: Additional arguments for specific loaders
29
+
30
+ Returns:
31
+ Tuple of (xyz, normals) where xyz is (N, 3) and normals is (N, 3) or None
32
+
33
+ Example:
34
+ >>> xyz, normals = load_point_cloud("scan.ply")
35
+ >>> xyz, normals = load_point_cloud("points.csv")
36
+ """
37
+ path = Path(path)
38
+ suffix = path.suffix.lower()
39
+
40
+ if suffix == ".ply":
41
+ return _load_ply(path, **kwargs)
42
+ elif suffix in (".las", ".laz"):
43
+ return _load_las(path, **kwargs)
44
+ elif suffix == ".csv":
45
+ return _load_csv(path, **kwargs)
46
+ elif suffix == ".npz":
47
+ return _load_npz(path, **kwargs)
48
+ elif suffix == ".parquet":
49
+ return _load_parquet(path, **kwargs)
50
+ else:
51
+ raise ValueError(f"Unsupported file format: {suffix}")
52
+
53
+
54
+ def export_parquet(
55
+ samples: list[TrainingSample],
56
+ path: str | Path,
57
+ ) -> Path:
58
+ """Export training samples to Parquet file.
59
+
60
+ Creates a survi-compatible Parquet file with columns:
61
+ x, y, z, phi, nx, ny, nz, weight, source, is_surface, is_free
62
+
63
+ Args:
64
+ samples: List of TrainingSample objects
65
+ path: Output file path
66
+
67
+ Returns:
68
+ Path to created file
69
+
70
+ Example:
71
+ >>> export_parquet(samples, "training_data.parquet")
72
+ """
73
+ path = Path(path)
74
+ df = pd.DataFrame([s.to_dict() for s in samples])
75
+ df.to_parquet(path)
76
+ return path
77
+
78
+
79
+ def _load_ply(path: Path, **kwargs: Any) -> tuple[np.ndarray, np.ndarray | None]:
80
+ """Load PLY file using trimesh."""
81
+ try:
82
+ import trimesh
83
+ except ImportError as e:
84
+ raise ImportError(
85
+ "trimesh is required for PLY support. "
86
+ "Install with: pip install sdf-sampler[io]"
87
+ ) from e
88
+
89
+ mesh = trimesh.load(path, **kwargs)
90
+
91
+ # Handle both PointCloud and Trimesh objects
92
+ if hasattr(mesh, "vertices"):
93
+ xyz = np.asarray(mesh.vertices)
94
+ else:
95
+ xyz = np.asarray(mesh.points if hasattr(mesh, "points") else mesh)
96
+
97
+ normals = None
98
+ if hasattr(mesh, "vertex_normals") and mesh.vertex_normals is not None:
99
+ normals = np.asarray(mesh.vertex_normals)
100
+ if normals.shape != xyz.shape:
101
+ normals = None
102
+
103
+ return xyz, normals
104
+
105
+
106
+ def _load_las(path: Path, **kwargs: Any) -> tuple[np.ndarray, np.ndarray | None]:
107
+ """Load LAS/LAZ file using laspy."""
108
+ try:
109
+ import laspy
110
+ except ImportError as e:
111
+ raise ImportError(
112
+ "laspy is required for LAS/LAZ support. "
113
+ "Install with: pip install sdf-sampler[io]"
114
+ ) from e
115
+
116
+ las = laspy.read(path, **kwargs)
117
+
118
+ xyz = np.column_stack([las.x, las.y, las.z])
119
+
120
+ # LAS files typically don't have normals
121
+ normals = None
122
+
123
+ return xyz, normals
124
+
125
+
126
+ def _load_csv(path: Path, **kwargs: Any) -> tuple[np.ndarray, np.ndarray | None]:
127
+ """Load CSV file with pandas."""
128
+ df = pd.read_csv(path, **kwargs)
129
+
130
+ # Required columns
131
+ if not all(c in df.columns for c in ["x", "y", "z"]):
132
+ raise ValueError("CSV must have x, y, z columns")
133
+
134
+ xyz = df[["x", "y", "z"]].values
135
+
136
+ # Optional normal columns
137
+ normals = None
138
+ if all(c in df.columns for c in ["nx", "ny", "nz"]):
139
+ normals = df[["nx", "ny", "nz"]].values
140
+
141
+ return xyz, normals
142
+
143
+
144
+ def _load_npz(path: Path, **kwargs: Any) -> tuple[np.ndarray, np.ndarray | None]:
145
+ """Load NPZ file."""
146
+ data = np.load(path, **kwargs)
147
+
148
+ # Support different naming conventions
149
+ if "xyz" in data:
150
+ xyz = data["xyz"]
151
+ elif "points" in data:
152
+ xyz = data["points"]
153
+ else:
154
+ raise ValueError("NPZ must have 'xyz' or 'points' array")
155
+
156
+ normals = None
157
+ if "normals" in data and data["normals"].size > 0:
158
+ normals = data["normals"]
159
+
160
+ return xyz, normals
161
+
162
+
163
+ def _load_parquet(path: Path, **kwargs: Any) -> tuple[np.ndarray, np.ndarray | None]:
164
+ """Load Parquet file with pandas."""
165
+ df = pd.read_parquet(path, **kwargs)
166
+
167
+ # Required columns
168
+ if not all(c in df.columns for c in ["x", "y", "z"]):
169
+ raise ValueError("Parquet must have x, y, z columns")
170
+
171
+ xyz = df[["x", "y", "z"]].values
172
+
173
+ # Optional normal columns
174
+ normals = None
175
+ if all(c in df.columns for c in ["nx", "ny", "nz"]):
176
+ normals = df[["nx", "ny", "nz"]].values
177
+
178
+ return xyz, normals
@@ -0,0 +1,49 @@
1
+ # ABOUTME: Public model exports for sdf-sampler
2
+ # ABOUTME: Constraint types, analysis results, and training samples
3
+
4
+ from sdf_sampler.models.analysis import (
5
+ AlgorithmStats,
6
+ AlgorithmType,
7
+ AnalysisResult,
8
+ AnalysisSummary,
9
+ GeneratedConstraint,
10
+ )
11
+ from sdf_sampler.models.constraints import (
12
+ BoxConstraint,
13
+ BrushStrokeConstraint,
14
+ Constraint,
15
+ HalfspaceConstraint,
16
+ PocketConstraint,
17
+ RayCarveConstraint,
18
+ RayInfo,
19
+ SamplePointConstraint,
20
+ SeedPropagationConstraint,
21
+ SignConvention,
22
+ SphereConstraint,
23
+ )
24
+ from sdf_sampler.models.samples import SamplingStrategy, TrainingSample
25
+
26
+ __all__ = [
27
+ # Enums
28
+ "SignConvention",
29
+ "AlgorithmType",
30
+ "SamplingStrategy",
31
+ # Constraints
32
+ "Constraint",
33
+ "BoxConstraint",
34
+ "SphereConstraint",
35
+ "HalfspaceConstraint",
36
+ "BrushStrokeConstraint",
37
+ "SeedPropagationConstraint",
38
+ "RayCarveConstraint",
39
+ "RayInfo",
40
+ "PocketConstraint",
41
+ "SamplePointConstraint",
42
+ # Analysis
43
+ "AnalysisResult",
44
+ "AnalysisSummary",
45
+ "AlgorithmStats",
46
+ "GeneratedConstraint",
47
+ # Samples
48
+ "TrainingSample",
49
+ ]