sdf-sampler 0.2.0__tar.gz → 0.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/CHANGELOG.md +13 -0
  2. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/PKG-INFO +1 -1
  3. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/pyproject.toml +1 -1
  4. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/__init__.py +1 -1
  5. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/cli.py +269 -45
  6. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/sampler.py +62 -0
  7. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/uv.lock +1 -1
  8. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/.gitignore +0 -0
  9. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/LICENSE +0 -0
  10. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/README.md +0 -0
  11. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/__main__.py +0 -0
  12. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/algorithms/__init__.py +0 -0
  13. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/algorithms/flood_fill.py +0 -0
  14. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/algorithms/normal_idw.py +0 -0
  15. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/algorithms/normal_offset.py +0 -0
  16. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/algorithms/pocket.py +0 -0
  17. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/algorithms/voxel_grid.py +0 -0
  18. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/algorithms/voxel_regions.py +0 -0
  19. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/analyzer.py +0 -0
  20. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/config.py +0 -0
  21. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/io.py +0 -0
  22. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/models/__init__.py +0 -0
  23. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/models/analysis.py +0 -0
  24. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/models/constraints.py +0 -0
  25. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/models/samples.py +0 -0
  26. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/sampling/__init__.py +0 -0
  27. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/sampling/box.py +0 -0
  28. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/sampling/brush.py +0 -0
  29. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/sampling/ray_carve.py +0 -0
  30. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/src/sdf_sampler/sampling/sphere.py +0 -0
  31. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/tests/__init__.py +0 -0
  32. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/tests/test_analyzer.py +0 -0
  33. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/tests/test_equivalence.py +0 -0
  34. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/tests/test_integration.py +0 -0
  35. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/tests/test_models.py +0 -0
  36. {sdf_sampler-0.2.0 → sdf_sampler-0.3.0}/tests/test_sampler.py +0 -0
@@ -5,6 +5,19 @@ All notable changes to sdf-sampler will be documented in this file.
5
5
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
6
6
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
7
 
8
+ ## [0.3.0] - 2025-01-29
9
+
10
+ ### Added
11
+
12
+ - **Full parameter control** in CLI and SDK
13
+ - All analysis options exposed: `--min-gap-size`, `--cone-angle`, `--idw-sample-count`, etc.
14
+ - All sampling options exposed: `--samples-per-primitive`, `--inverse-square-falloff`, etc.
15
+ - Output mode control: `--flood-fill-output`, `--voxel-regions-output` (boxes/samples/both)
16
+ - **Surface point inclusion**
17
+ - `--include-surface-points` flag to include original points with phi=0
18
+ - `--surface-point-ratio` to control fraction included (default 10%)
19
+ - SDK: `sampler.generate(..., include_surface_points=True, surface_point_ratio=0.1)`
20
+
8
21
  ## [0.2.0] - 2025-01-29
9
22
 
10
23
  ### Added
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sdf-sampler
3
- Version: 0.2.0
3
+ Version: 0.3.0
4
4
  Summary: Auto-analysis and sampling of point clouds for SDF (Signed Distance Field) training data generation
5
5
  Project-URL: Repository, https://github.com/Chiark-Collective/sdf-sampler
6
6
  Author-email: Liam <liam@example.com>
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "sdf-sampler"
3
- version = "0.2.0"
3
+ version = "0.3.0"
4
4
  description = "Auto-analysis and sampling of point clouds for SDF (Signed Distance Field) training data generation"
5
5
  readme = "README.md"
6
6
  license = { text = "MIT" }
@@ -47,7 +47,7 @@ from sdf_sampler.models import (
47
47
  )
48
48
  from sdf_sampler.sampler import SDFSampler
49
49
 
50
- __version__ = "0.2.0"
50
+ __version__ = "0.3.0"
51
51
 
52
52
  __all__ = [
53
53
  # Main classes
@@ -1,5 +1,5 @@
1
1
  # ABOUTME: Command-line interface for sdf-sampler
2
- # ABOUTME: Provides analyze, sample, and pipeline commands
2
+ # ABOUTME: Provides analyze, sample, and pipeline commands with full parameter control
3
3
 
4
4
  """
5
5
  CLI for sdf-sampler.
@@ -18,6 +18,164 @@ from pathlib import Path
18
18
  import numpy as np
19
19
 
20
20
 
21
+ def add_analysis_options(parser: argparse.ArgumentParser) -> None:
22
+ """Add analysis configuration options to a parser."""
23
+ group = parser.add_argument_group("Analysis Options")
24
+
25
+ group.add_argument(
26
+ "--min-gap-size",
27
+ type=float,
28
+ default=0.10,
29
+ help="Minimum gap size for flood fill in meters (default: 0.10)",
30
+ )
31
+ group.add_argument(
32
+ "--max-grid-dim",
33
+ type=int,
34
+ default=200,
35
+ help="Maximum voxel grid dimension (default: 200)",
36
+ )
37
+ group.add_argument(
38
+ "--cone-angle",
39
+ type=float,
40
+ default=15.0,
41
+ help="Ray propagation cone half-angle in degrees (default: 15.0)",
42
+ )
43
+ group.add_argument(
44
+ "--normal-offset-pairs",
45
+ type=int,
46
+ default=40,
47
+ help="Number of box pairs for normal_offset algorithm (default: 40)",
48
+ )
49
+ group.add_argument(
50
+ "--max-boxes",
51
+ type=int,
52
+ default=30,
53
+ help="Maximum boxes per algorithm (default: 30)",
54
+ )
55
+ group.add_argument(
56
+ "--overlap-threshold",
57
+ type=float,
58
+ default=0.5,
59
+ help="Overlap threshold for box simplification (default: 0.5)",
60
+ )
61
+ group.add_argument(
62
+ "--idw-sample-count",
63
+ type=int,
64
+ default=1000,
65
+ help="Number of IDW samples to generate (default: 1000)",
66
+ )
67
+ group.add_argument(
68
+ "--idw-max-distance",
69
+ type=float,
70
+ default=0.5,
71
+ help="Maximum IDW distance from surface in meters (default: 0.5)",
72
+ )
73
+ group.add_argument(
74
+ "--idw-power",
75
+ type=float,
76
+ default=2.0,
77
+ help="IDW power factor (default: 2.0)",
78
+ )
79
+ group.add_argument(
80
+ "--hull-alpha",
81
+ type=float,
82
+ default=1.0,
83
+ help="Alpha shape parameter for hull filtering (default: 1.0)",
84
+ )
85
+ group.add_argument(
86
+ "--flood-fill-output",
87
+ type=str,
88
+ choices=["boxes", "samples", "both"],
89
+ default="samples",
90
+ help="Output mode for flood fill (default: samples)",
91
+ )
92
+ group.add_argument(
93
+ "--flood-fill-samples",
94
+ type=int,
95
+ default=500,
96
+ help="Number of sample points from flood fill (default: 500)",
97
+ )
98
+ group.add_argument(
99
+ "--voxel-regions-output",
100
+ type=str,
101
+ choices=["boxes", "samples", "both"],
102
+ default="samples",
103
+ help="Output mode for voxel regions (default: samples)",
104
+ )
105
+ group.add_argument(
106
+ "--voxel-regions-samples",
107
+ type=int,
108
+ default=500,
109
+ help="Number of sample points from voxel regions (default: 500)",
110
+ )
111
+
112
+
113
+ def add_sampling_options(parser: argparse.ArgumentParser) -> None:
114
+ """Add sampling configuration options to a parser."""
115
+ group = parser.add_argument_group("Sampling Options")
116
+
117
+ group.add_argument(
118
+ "-n", "--total-samples",
119
+ type=int,
120
+ default=10000,
121
+ help="Total number of samples to generate (default: 10000)",
122
+ )
123
+ group.add_argument(
124
+ "-s", "--strategy",
125
+ type=str,
126
+ choices=["constant", "density", "inverse_square"],
127
+ default="inverse_square",
128
+ help="Sampling strategy (default: inverse_square)",
129
+ )
130
+ group.add_argument(
131
+ "--seed",
132
+ type=int,
133
+ default=None,
134
+ help="Random seed for reproducibility",
135
+ )
136
+ group.add_argument(
137
+ "--samples-per-primitive",
138
+ type=int,
139
+ default=100,
140
+ help="Samples per primitive for CONSTANT strategy (default: 100)",
141
+ )
142
+ group.add_argument(
143
+ "--inverse-square-base",
144
+ type=int,
145
+ default=100,
146
+ help="Base samples at surface for INVERSE_SQUARE (default: 100)",
147
+ )
148
+ group.add_argument(
149
+ "--inverse-square-falloff",
150
+ type=float,
151
+ default=2.0,
152
+ help="Falloff exponent for INVERSE_SQUARE (default: 2.0)",
153
+ )
154
+ group.add_argument(
155
+ "--near-band",
156
+ type=float,
157
+ default=0.02,
158
+ help="Near-band width around surface (default: 0.02)",
159
+ )
160
+
161
+
162
+ def add_output_options(parser: argparse.ArgumentParser) -> None:
163
+ """Add output configuration options to a parser."""
164
+ group = parser.add_argument_group("Output Options")
165
+
166
+ group.add_argument(
167
+ "--include-surface-points",
168
+ action="store_true",
169
+ help="Include original surface points (phi=0) in output",
170
+ )
171
+ group.add_argument(
172
+ "--surface-point-ratio",
173
+ type=float,
174
+ default=0.1,
175
+ help="Ratio of surface points to include (default: 0.1 = 10%%)",
176
+ )
177
+
178
+
21
179
  def main(argv: list[str] | None = None) -> int:
22
180
  """Main CLI entry point."""
23
181
  parser = argparse.ArgumentParser(
@@ -63,6 +221,7 @@ def main(argv: list[str] | None = None) -> int:
63
221
  action="store_true",
64
222
  help="Verbose output",
65
223
  )
224
+ add_analysis_options(analyze_parser)
66
225
 
67
226
  # sample command
68
227
  sample_parser = subparsers.add_parser(
@@ -85,30 +244,13 @@ def main(argv: list[str] | None = None) -> int:
85
244
  default=None,
86
245
  help="Output parquet file (default: <input>_samples.parquet)",
87
246
  )
88
- sample_parser.add_argument(
89
- "-n", "--total-samples",
90
- type=int,
91
- default=10000,
92
- help="Total number of samples to generate (default: 10000)",
93
- )
94
- sample_parser.add_argument(
95
- "-s", "--strategy",
96
- type=str,
97
- choices=["constant", "density", "inverse_square"],
98
- default="inverse_square",
99
- help="Sampling strategy (default: inverse_square)",
100
- )
101
- sample_parser.add_argument(
102
- "--seed",
103
- type=int,
104
- default=None,
105
- help="Random seed for reproducibility",
106
- )
107
247
  sample_parser.add_argument(
108
248
  "-v", "--verbose",
109
249
  action="store_true",
110
250
  help="Verbose output",
111
251
  )
252
+ add_sampling_options(sample_parser)
253
+ add_output_options(sample_parser)
112
254
 
113
255
  # pipeline command
114
256
  pipeline_parser = subparsers.add_parser(
@@ -134,23 +276,9 @@ def main(argv: list[str] | None = None) -> int:
134
276
  help="Algorithms to run",
135
277
  )
136
278
  pipeline_parser.add_argument(
137
- "-n", "--total-samples",
138
- type=int,
139
- default=10000,
140
- help="Total number of samples to generate (default: 10000)",
141
- )
142
- pipeline_parser.add_argument(
143
- "-s", "--strategy",
144
- type=str,
145
- choices=["constant", "density", "inverse_square"],
146
- default="inverse_square",
147
- help="Sampling strategy (default: inverse_square)",
148
- )
149
- pipeline_parser.add_argument(
150
- "--seed",
151
- type=int,
152
- default=None,
153
- help="Random seed for reproducibility",
279
+ "--no-hull-filter",
280
+ action="store_true",
281
+ help="Disable hull filtering",
154
282
  )
155
283
  pipeline_parser.add_argument(
156
284
  "--save-constraints",
@@ -163,6 +291,9 @@ def main(argv: list[str] | None = None) -> int:
163
291
  action="store_true",
164
292
  help="Verbose output",
165
293
  )
294
+ add_analysis_options(pipeline_parser)
295
+ add_sampling_options(pipeline_parser)
296
+ add_output_options(pipeline_parser)
166
297
 
167
298
  # info command
168
299
  info_parser = subparsers.add_parser(
@@ -198,10 +329,46 @@ def main(argv: list[str] | None = None) -> int:
198
329
  return 0
199
330
 
200
331
 
332
+ def build_analysis_options(args: argparse.Namespace):
333
+ """Build AutoAnalysisOptions from CLI args."""
334
+ from sdf_sampler.config import AutoAnalysisOptions
335
+
336
+ return AutoAnalysisOptions(
337
+ min_gap_size=args.min_gap_size,
338
+ max_grid_dim=args.max_grid_dim,
339
+ cone_angle=args.cone_angle,
340
+ normal_offset_pairs=args.normal_offset_pairs,
341
+ max_boxes=args.max_boxes,
342
+ overlap_threshold=args.overlap_threshold,
343
+ idw_sample_count=args.idw_sample_count,
344
+ idw_max_distance=args.idw_max_distance,
345
+ idw_power=args.idw_power,
346
+ hull_filter_enabled=not getattr(args, 'no_hull_filter', False),
347
+ hull_alpha=args.hull_alpha,
348
+ flood_fill_output=args.flood_fill_output,
349
+ flood_fill_sample_count=args.flood_fill_samples,
350
+ voxel_regions_output=args.voxel_regions_output,
351
+ voxel_regions_sample_count=args.voxel_regions_samples,
352
+ )
353
+
354
+
355
+ def build_sampler_config(args: argparse.Namespace):
356
+ """Build SamplerConfig from CLI args."""
357
+ from sdf_sampler.config import SamplerConfig
358
+
359
+ return SamplerConfig(
360
+ total_samples=args.total_samples,
361
+ samples_per_primitive=args.samples_per_primitive,
362
+ inverse_square_base_samples=args.inverse_square_base,
363
+ inverse_square_falloff=args.inverse_square_falloff,
364
+ near_band=args.near_band,
365
+ seed=args.seed or 0,
366
+ )
367
+
368
+
201
369
  def cmd_analyze(args: argparse.Namespace) -> int:
202
370
  """Run analyze command."""
203
371
  from sdf_sampler import SDFAnalyzer, load_point_cloud
204
- from sdf_sampler.config import AutoAnalysisOptions
205
372
 
206
373
  if not args.input.exists():
207
374
  print(f"Error: Input file not found: {args.input}", file=sys.stderr)
@@ -222,9 +389,7 @@ def cmd_analyze(args: argparse.Namespace) -> int:
222
389
  print(f" Points: {len(xyz):,}")
223
390
  print(f" Normals: {'yes' if normals is not None else 'no'}")
224
391
 
225
- options = AutoAnalysisOptions(
226
- hull_filter_enabled=not args.no_hull_filter,
227
- )
392
+ options = build_analysis_options(args)
228
393
 
229
394
  if args.verbose:
230
395
  algos = args.algorithms or ["all"]
@@ -284,7 +449,8 @@ def cmd_sample(args: argparse.Namespace) -> int:
284
449
  print(f" Constraints: {len(constraints)}")
285
450
  print(f"Generating {args.total_samples:,} samples with strategy: {args.strategy}")
286
451
 
287
- sampler = SDFSampler()
452
+ config = build_sampler_config(args)
453
+ sampler = SDFSampler(config=config)
288
454
  samples = sampler.generate(
289
455
  xyz=xyz,
290
456
  normals=normals,
@@ -294,6 +460,12 @@ def cmd_sample(args: argparse.Namespace) -> int:
294
460
  seed=args.seed,
295
461
  )
296
462
 
463
+ # Include surface points if requested
464
+ if args.include_surface_points:
465
+ samples = _add_surface_points(
466
+ samples, xyz, normals, args.surface_point_ratio, args.verbose
467
+ )
468
+
297
469
  if args.verbose:
298
470
  print(f"Generated {len(samples)} samples")
299
471
 
@@ -305,7 +477,6 @@ def cmd_sample(args: argparse.Namespace) -> int:
305
477
  def cmd_pipeline(args: argparse.Namespace) -> int:
306
478
  """Run full pipeline: analyze + sample + export."""
307
479
  from sdf_sampler import SDFAnalyzer, SDFSampler, load_point_cloud
308
- from sdf_sampler.config import AutoAnalysisOptions
309
480
 
310
481
  if not args.input.exists():
311
482
  print(f"Error: Input file not found: {args.input}", file=sys.stderr)
@@ -331,7 +502,7 @@ def cmd_pipeline(args: argparse.Namespace) -> int:
331
502
  algos = args.algorithms or ["all"]
332
503
  print(f"Running analysis: {', '.join(algos)}")
333
504
 
334
- options = AutoAnalysisOptions()
505
+ options = build_analysis_options(args)
335
506
  analyzer = SDFAnalyzer()
336
507
  result = analyzer.analyze(
337
508
  xyz=xyz,
@@ -356,7 +527,8 @@ def cmd_pipeline(args: argparse.Namespace) -> int:
356
527
  if args.verbose:
357
528
  print(f"Generating {args.total_samples:,} samples with strategy: {args.strategy}")
358
529
 
359
- sampler = SDFSampler()
530
+ config = build_sampler_config(args)
531
+ sampler = SDFSampler(config=config)
360
532
  samples = sampler.generate(
361
533
  xyz=xyz,
362
534
  normals=normals,
@@ -366,6 +538,12 @@ def cmd_pipeline(args: argparse.Namespace) -> int:
366
538
  seed=args.seed,
367
539
  )
368
540
 
541
+ # Include surface points if requested
542
+ if args.include_surface_points:
543
+ samples = _add_surface_points(
544
+ samples, xyz, normals, args.surface_point_ratio, args.verbose
545
+ )
546
+
369
547
  if args.verbose:
370
548
  print(f"Generated {len(samples)} samples")
371
549
 
@@ -375,6 +553,52 @@ def cmd_pipeline(args: argparse.Namespace) -> int:
375
553
  return 0
376
554
 
377
555
 
556
+ def _add_surface_points(
557
+ samples: list,
558
+ xyz: np.ndarray,
559
+ normals: np.ndarray | None,
560
+ ratio: float,
561
+ verbose: bool,
562
+ ) -> list:
563
+ """Add surface points to sample list."""
564
+ from sdf_sampler.models import TrainingSample
565
+
566
+ n_surface = int(len(xyz) * ratio)
567
+ if n_surface == 0:
568
+ return samples
569
+
570
+ # Subsample if needed
571
+ if n_surface < len(xyz):
572
+ indices = np.random.choice(len(xyz), n_surface, replace=False)
573
+ surface_xyz = xyz[indices]
574
+ surface_normals = normals[indices] if normals is not None else None
575
+ else:
576
+ surface_xyz = xyz
577
+ surface_normals = normals
578
+
579
+ if verbose:
580
+ print(f"Adding {len(surface_xyz):,} surface points (phi=0)")
581
+
582
+ for i in range(len(surface_xyz)):
583
+ sample = TrainingSample(
584
+ x=float(surface_xyz[i, 0]),
585
+ y=float(surface_xyz[i, 1]),
586
+ z=float(surface_xyz[i, 2]),
587
+ phi=0.0,
588
+ weight=1.0,
589
+ source="surface",
590
+ is_surface=True,
591
+ is_free=False,
592
+ )
593
+ if surface_normals is not None:
594
+ sample.nx = float(surface_normals[i, 0])
595
+ sample.ny = float(surface_normals[i, 1])
596
+ sample.nz = float(surface_normals[i, 2])
597
+ samples.append(sample)
598
+
599
+ return samples
600
+
601
+
378
602
  def cmd_info(args: argparse.Namespace) -> int:
379
603
  """Show information about a file."""
380
604
  if not args.input.exists():
@@ -65,6 +65,8 @@ class SDFSampler:
65
65
  total_samples: int | None = None,
66
66
  strategy: str | SamplingStrategy = SamplingStrategy.INVERSE_SQUARE,
67
67
  seed: int | None = None,
68
+ include_surface_points: bool = False,
69
+ surface_point_ratio: float = 0.1,
68
70
  ) -> list[TrainingSample]:
69
71
  """Generate training samples from constraints.
70
72
 
@@ -75,6 +77,8 @@ class SDFSampler:
75
77
  total_samples: Total samples to generate (default from config)
76
78
  strategy: Sampling strategy (CONSTANT, DENSITY, or INVERSE_SQUARE)
77
79
  seed: Random seed for reproducibility
80
+ include_surface_points: If True, include original surface points with phi=0
81
+ surface_point_ratio: Fraction of surface points to include (default 0.1 = 10%)
78
82
 
79
83
  Returns:
80
84
  List of TrainingSample objects
@@ -85,6 +89,7 @@ class SDFSampler:
85
89
  ... constraints=result.constraints,
86
90
  ... strategy="inverse_square",
87
91
  ... total_samples=50000,
92
+ ... include_surface_points=True,
88
93
  ... )
89
94
  """
90
95
  xyz = np.asarray(xyz)
@@ -153,6 +158,63 @@ class SDFSampler:
153
158
  elif isinstance(constraint, SamplePointConstraint):
154
159
  samples.extend(self._sample_sample_point(constraint))
155
160
 
161
+ # Add surface points if requested
162
+ if include_surface_points:
163
+ samples.extend(
164
+ self._generate_surface_points(xyz, normals, surface_point_ratio, rng)
165
+ )
166
+
167
+ return samples
168
+
169
+ def _generate_surface_points(
170
+ self,
171
+ xyz: np.ndarray,
172
+ normals: np.ndarray | None,
173
+ ratio: float,
174
+ rng: np.random.Generator,
175
+ ) -> list[TrainingSample]:
176
+ """Generate surface point samples (phi=0) from the input point cloud.
177
+
178
+ Args:
179
+ xyz: Point cloud positions (N, 3)
180
+ normals: Optional point normals (N, 3)
181
+ ratio: Fraction of points to include (0.0 to 1.0)
182
+ rng: Random number generator
183
+
184
+ Returns:
185
+ List of TrainingSample objects with phi=0
186
+ """
187
+ n_surface = int(len(xyz) * ratio)
188
+ if n_surface == 0:
189
+ return []
190
+
191
+ # Subsample if needed
192
+ if n_surface < len(xyz):
193
+ indices = rng.choice(len(xyz), n_surface, replace=False)
194
+ surface_xyz = xyz[indices]
195
+ surface_normals = normals[indices] if normals is not None else None
196
+ else:
197
+ surface_xyz = xyz
198
+ surface_normals = normals
199
+
200
+ samples = []
201
+ for i in range(len(surface_xyz)):
202
+ sample = TrainingSample(
203
+ x=float(surface_xyz[i, 0]),
204
+ y=float(surface_xyz[i, 1]),
205
+ z=float(surface_xyz[i, 2]),
206
+ phi=0.0,
207
+ weight=1.0,
208
+ source="surface",
209
+ is_surface=True,
210
+ is_free=False,
211
+ )
212
+ if surface_normals is not None:
213
+ sample.nx = float(surface_normals[i, 0])
214
+ sample.ny = float(surface_normals[i, 1])
215
+ sample.nz = float(surface_normals[i, 2])
216
+ samples.append(sample)
217
+
156
218
  return samples
157
219
 
158
220
  def to_dataframe(self, samples: list[TrainingSample]) -> pd.DataFrame:
@@ -855,7 +855,7 @@ wheels = [
855
855
 
856
856
  [[package]]
857
857
  name = "sdf-sampler"
858
- version = "0.1.0"
858
+ version = "0.2.0"
859
859
  source = { editable = "." }
860
860
  dependencies = [
861
861
  { name = "alphashape" },
File without changes
File without changes
File without changes