sdf-sampler 0.4.0__tar.gz → 0.6.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/CHANGELOG.md +29 -0
  2. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/PKG-INFO +1 -1
  3. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/pyproject.toml +1 -1
  4. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/__init__.py +4 -2
  5. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/cli.py +48 -60
  6. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/io.py +91 -11
  7. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/sampler.py +115 -1
  8. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/sampling/box.py +8 -4
  9. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/tests/test_sampler.py +158 -0
  10. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/uv.lock +1 -1
  11. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/.gitignore +0 -0
  12. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/LICENSE +0 -0
  13. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/README.md +0 -0
  14. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/__main__.py +0 -0
  15. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/algorithms/__init__.py +0 -0
  16. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/algorithms/flood_fill.py +0 -0
  17. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/algorithms/normal_idw.py +0 -0
  18. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/algorithms/normal_offset.py +0 -0
  19. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/algorithms/pocket.py +0 -0
  20. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/algorithms/voxel_grid.py +0 -0
  21. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/algorithms/voxel_regions.py +0 -0
  22. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/analyzer.py +0 -0
  23. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/config.py +0 -0
  24. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/models/__init__.py +0 -0
  25. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/models/analysis.py +0 -0
  26. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/models/constraints.py +0 -0
  27. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/models/samples.py +0 -0
  28. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/sampling/__init__.py +0 -0
  29. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/sampling/brush.py +0 -0
  30. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/sampling/ray_carve.py +0 -0
  31. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/src/sdf_sampler/sampling/sphere.py +0 -0
  32. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/tests/__init__.py +0 -0
  33. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/tests/test_analyzer.py +0 -0
  34. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/tests/test_equivalence.py +0 -0
  35. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/tests/test_integration.py +0 -0
  36. {sdf_sampler-0.4.0 → sdf_sampler-0.6.0}/tests/test_models.py +0 -0
@@ -5,6 +5,35 @@ All notable changes to sdf-sampler will be documented in this file.
5
5
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
6
6
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
7
 
8
+ ## [0.5.0] - 2025-01-30
9
+
10
+ ### Added
11
+
12
+ - **Area-weighted surface sampling** - New sampling mode that distributes surface points uniformly by surface area instead of by vertex count. Essential for meshes with uneven vertex density (e.g., trench floors vs walls).
13
+ - New `load_mesh()` function returns `Mesh` object with vertices, faces, and normals
14
+ - Pass `mesh=` parameter to `sampler.generate()` for area-weighted sampling
15
+ - CLI: `--mesh path/to/mesh.obj` enables area-weighted mode
16
+ - Supports PLY, OBJ, STL, OFF mesh formats
17
+ - **OBJ/STL/OFF file support** - `load_point_cloud()` now supports additional mesh formats via trimesh
18
+
19
+ ### Example
20
+
21
+ ```python
22
+ from sdf_sampler import SDFSampler, load_mesh, load_point_cloud
23
+
24
+ # Load mesh for area-weighted sampling
25
+ mesh = load_mesh("model.obj")
26
+ xyz, normals = load_point_cloud("model.obj")
27
+
28
+ # Area-weighted gives uniform coverage by surface area
29
+ samples = sampler.generate(
30
+ xyz=xyz, constraints=constraints,
31
+ include_surface_points=True,
32
+ surface_point_count=1000,
33
+ mesh=mesh, # Enables area-weighted sampling
34
+ )
35
+ ```
36
+
8
37
  ## [0.4.0] - 2025-01-30
9
38
 
10
39
  ### Changed
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sdf-sampler
3
- Version: 0.4.0
3
+ Version: 0.6.0
4
4
  Summary: Auto-analysis and sampling of point clouds for SDF (Signed Distance Field) training data generation
5
5
  Project-URL: Repository, https://github.com/Chiark-Collective/sdf-sampler
6
6
  Author-email: Liam <liam@example.com>
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "sdf-sampler"
3
- version = "0.4.0"
3
+ version = "0.6.0"
4
4
  description = "Auto-analysis and sampling of point clouds for SDF (Signed Distance Field) training data generation"
5
5
  readme = "README.md"
6
6
  license = { text = "MIT" }
@@ -28,7 +28,7 @@ Example usage:
28
28
 
29
29
  from sdf_sampler.analyzer import SDFAnalyzer
30
30
  from sdf_sampler.config import AnalyzerConfig, SamplerConfig
31
- from sdf_sampler.io import export_parquet, load_point_cloud
31
+ from sdf_sampler.io import Mesh, export_parquet, load_mesh, load_point_cloud
32
32
  from sdf_sampler.models import (
33
33
  AlgorithmType,
34
34
  AnalysisResult,
@@ -47,7 +47,7 @@ from sdf_sampler.models import (
47
47
  )
48
48
  from sdf_sampler.sampler import SDFSampler
49
49
 
50
- __version__ = "0.4.0"
50
+ __version__ = "0.5.0"
51
51
 
52
52
  __all__ = [
53
53
  # Main classes
@@ -58,6 +58,8 @@ __all__ = [
58
58
  "SamplerConfig",
59
59
  # I/O
60
60
  "load_point_cloud",
61
+ "load_mesh",
62
+ "Mesh",
61
63
  "export_parquet",
62
64
  # Models
63
65
  "SignConvention",
@@ -174,6 +174,14 @@ def add_output_options(parser: argparse.ArgumentParser) -> None:
174
174
  default=1000,
175
175
  help="Number of surface points to include (default: 1000)",
176
176
  )
177
+ group.add_argument(
178
+ "--mesh",
179
+ type=Path,
180
+ default=None,
181
+ help="Mesh file for area-weighted surface sampling (PLY/OBJ/STL). "
182
+ "If provided, surface points are sampled uniformly by surface area "
183
+ "instead of by vertex count. Recommended for meshes with uneven vertex density.",
184
+ )
177
185
 
178
186
 
179
187
  def main(argv: list[str] | None = None) -> int:
@@ -418,7 +426,7 @@ def cmd_analyze(args: argparse.Namespace) -> int:
418
426
 
419
427
  def cmd_sample(args: argparse.Namespace) -> int:
420
428
  """Run sample command."""
421
- from sdf_sampler import SDFSampler, load_point_cloud
429
+ from sdf_sampler import SDFSampler, load_mesh, load_point_cloud
422
430
 
423
431
  if not args.input.exists():
424
432
  print(f"Error: Input file not found: {args.input}", file=sys.stderr)
@@ -439,6 +447,19 @@ def cmd_sample(args: argparse.Namespace) -> int:
439
447
  print(f"Error loading point cloud: {e}", file=sys.stderr)
440
448
  return 1
441
449
 
450
+ # Load mesh for area-weighted surface sampling if provided
451
+ mesh = None
452
+ if args.mesh:
453
+ if args.verbose:
454
+ print(f"Loading mesh for area-weighted sampling: {args.mesh}")
455
+ try:
456
+ mesh = load_mesh(str(args.mesh))
457
+ if args.verbose:
458
+ print(f" Vertices: {len(mesh.vertices):,}, Faces: {len(mesh.faces):,}")
459
+ except Exception as e:
460
+ print(f"Error loading mesh: {e}", file=sys.stderr)
461
+ return 1
462
+
442
463
  if args.verbose:
443
464
  print(f"Loading constraints: {args.constraints}")
444
465
 
@@ -448,6 +469,9 @@ def cmd_sample(args: argparse.Namespace) -> int:
448
469
  if args.verbose:
449
470
  print(f" Constraints: {len(constraints)}")
450
471
  print(f"Generating {args.total_samples:,} samples with strategy: {args.strategy}")
472
+ if args.include_surface_points:
473
+ mode = "area-weighted" if mesh else "vertex-based"
474
+ print(f" Including {args.surface_point_count:,} surface points ({mode})")
451
475
 
452
476
  config = build_sampler_config(args)
453
477
  sampler = SDFSampler(config=config)
@@ -458,14 +482,11 @@ def cmd_sample(args: argparse.Namespace) -> int:
458
482
  total_samples=args.total_samples,
459
483
  strategy=args.strategy,
460
484
  seed=args.seed,
485
+ include_surface_points=args.include_surface_points,
486
+ surface_point_count=args.surface_point_count,
487
+ mesh=mesh,
461
488
  )
462
489
 
463
- # Include surface points if requested
464
- if args.include_surface_points:
465
- samples = _add_surface_points(
466
- samples, xyz, normals, args.surface_point_count, args.verbose
467
- )
468
-
469
490
  if args.verbose:
470
491
  print(f"Generated {len(samples)} samples")
471
492
 
@@ -476,7 +497,7 @@ def cmd_sample(args: argparse.Namespace) -> int:
476
497
 
477
498
  def cmd_pipeline(args: argparse.Namespace) -> int:
478
499
  """Run full pipeline: analyze + sample + export."""
479
- from sdf_sampler import SDFAnalyzer, SDFSampler, load_point_cloud
500
+ from sdf_sampler import SDFAnalyzer, SDFSampler, load_mesh, load_point_cloud
480
501
 
481
502
  if not args.input.exists():
482
503
  print(f"Error: Input file not found: {args.input}", file=sys.stderr)
@@ -497,6 +518,19 @@ def cmd_pipeline(args: argparse.Namespace) -> int:
497
518
  print(f" Points: {len(xyz):,}")
498
519
  print(f" Normals: {'yes' if normals is not None else 'no'}")
499
520
 
521
+ # Load mesh for area-weighted surface sampling if provided
522
+ mesh = None
523
+ if args.mesh:
524
+ if args.verbose:
525
+ print(f"Loading mesh for area-weighted sampling: {args.mesh}")
526
+ try:
527
+ mesh = load_mesh(str(args.mesh))
528
+ if args.verbose:
529
+ print(f" Vertices: {len(mesh.vertices):,}, Faces: {len(mesh.faces):,}")
530
+ except Exception as e:
531
+ print(f"Error loading mesh: {e}", file=sys.stderr)
532
+ return 1
533
+
500
534
  # Analyze
501
535
  if args.verbose:
502
536
  algos = args.algorithms or ["all"]
@@ -526,6 +560,9 @@ def cmd_pipeline(args: argparse.Namespace) -> int:
526
560
  # Sample
527
561
  if args.verbose:
528
562
  print(f"Generating {args.total_samples:,} samples with strategy: {args.strategy}")
563
+ if args.include_surface_points:
564
+ mode = "area-weighted" if mesh else "vertex-based"
565
+ print(f" Including {args.surface_point_count:,} surface points ({mode})")
529
566
 
530
567
  config = build_sampler_config(args)
531
568
  sampler = SDFSampler(config=config)
@@ -536,14 +573,11 @@ def cmd_pipeline(args: argparse.Namespace) -> int:
536
573
  total_samples=args.total_samples,
537
574
  strategy=args.strategy,
538
575
  seed=args.seed,
576
+ include_surface_points=args.include_surface_points,
577
+ surface_point_count=args.surface_point_count,
578
+ mesh=mesh,
539
579
  )
540
580
 
541
- # Include surface points if requested
542
- if args.include_surface_points:
543
- samples = _add_surface_points(
544
- samples, xyz, normals, args.surface_point_count, args.verbose
545
- )
546
-
547
581
  if args.verbose:
548
582
  print(f"Generated {len(samples)} samples")
549
583
 
@@ -553,52 +587,6 @@ def cmd_pipeline(args: argparse.Namespace) -> int:
553
587
  return 0
554
588
 
555
589
 
556
- def _add_surface_points(
557
- samples: list,
558
- xyz: np.ndarray,
559
- normals: np.ndarray | None,
560
- count: int,
561
- verbose: bool,
562
- ) -> list:
563
- """Add surface points to sample list."""
564
- from sdf_sampler.models import TrainingSample
565
-
566
- n_surface = min(count, len(xyz))
567
- if n_surface <= 0:
568
- return samples
569
-
570
- # Subsample if needed
571
- if n_surface < len(xyz):
572
- indices = np.random.choice(len(xyz), n_surface, replace=False)
573
- surface_xyz = xyz[indices]
574
- surface_normals = normals[indices] if normals is not None else None
575
- else:
576
- surface_xyz = xyz
577
- surface_normals = normals
578
-
579
- if verbose:
580
- print(f"Adding {len(surface_xyz):,} surface points (phi=0)")
581
-
582
- for i in range(len(surface_xyz)):
583
- sample = TrainingSample(
584
- x=float(surface_xyz[i, 0]),
585
- y=float(surface_xyz[i, 1]),
586
- z=float(surface_xyz[i, 2]),
587
- phi=0.0,
588
- weight=1.0,
589
- source="surface",
590
- is_surface=True,
591
- is_free=False,
592
- )
593
- if surface_normals is not None:
594
- sample.nx = float(surface_normals[i, 0])
595
- sample.ny = float(surface_normals[i, 1])
596
- sample.nz = float(surface_normals[i, 2])
597
- samples.append(sample)
598
-
599
- return samples
600
-
601
-
602
590
  def cmd_info(args: argparse.Namespace) -> int:
603
591
  """Show information about a file."""
604
592
  if not args.input.exists():
@@ -1,6 +1,7 @@
1
1
  # ABOUTME: I/O utilities for point cloud loading and sample export
2
2
  # ABOUTME: Supports PLY, LAS/LAZ, CSV, NPZ, and Parquet formats
3
3
 
4
+ from dataclasses import dataclass
4
5
  from pathlib import Path
5
6
  from typing import Any
6
7
 
@@ -10,6 +11,79 @@ import pandas as pd
10
11
  from sdf_sampler.models.samples import TrainingSample
11
12
 
12
13
 
14
+ @dataclass
15
+ class Mesh:
16
+ """Triangle mesh with vertices, faces, and optional normals.
17
+
18
+ Used for area-weighted surface sampling where we need face information.
19
+ """
20
+
21
+ vertices: np.ndarray # (N, 3) vertex positions
22
+ faces: np.ndarray # (M, 3) triangle face indices
23
+ vertex_normals: np.ndarray | None = None # (N, 3) per-vertex normals
24
+
25
+
26
+ def load_mesh(
27
+ path: str | Path,
28
+ **kwargs: Any,
29
+ ) -> Mesh:
30
+ """Load mesh from file (preserves face information).
31
+
32
+ Use this instead of load_point_cloud() when you need area-weighted
33
+ surface sampling, which requires face information.
34
+
35
+ Supported formats:
36
+ - PLY (requires trimesh in [io] extras)
37
+ - OBJ (requires trimesh in [io] extras)
38
+ - STL (requires trimesh in [io] extras)
39
+ - OFF (requires trimesh in [io] extras)
40
+
41
+ Args:
42
+ path: Path to mesh file
43
+ **kwargs: Additional arguments for trimesh loader
44
+
45
+ Returns:
46
+ Mesh object with vertices, faces, and optional normals
47
+
48
+ Example:
49
+ >>> mesh = load_mesh("model.ply")
50
+ >>> print(f"Vertices: {len(mesh.vertices)}, Faces: {len(mesh.faces)}")
51
+ """
52
+ try:
53
+ import trimesh
54
+ except ImportError as e:
55
+ raise ImportError(
56
+ "trimesh is required for mesh loading. "
57
+ "Install with: pip install sdf-sampler[io]"
58
+ ) from e
59
+
60
+ path = Path(path)
61
+ loaded = trimesh.load(path, **kwargs)
62
+
63
+ # Handle Scene objects (multiple meshes)
64
+ if isinstance(loaded, trimesh.Scene):
65
+ # Combine all meshes into one
66
+ meshes = [g for g in loaded.geometry.values() if isinstance(g, trimesh.Trimesh)]
67
+ if not meshes:
68
+ raise ValueError(f"No triangle meshes found in {path}")
69
+ loaded = trimesh.util.concatenate(meshes)
70
+
71
+ if not isinstance(loaded, trimesh.Trimesh):
72
+ raise ValueError(
73
+ f"File {path} did not load as a triangle mesh. "
74
+ "Use load_point_cloud() for point cloud files."
75
+ )
76
+
77
+ vertices = np.asarray(loaded.vertices)
78
+ faces = np.asarray(loaded.faces)
79
+
80
+ vertex_normals = None
81
+ if loaded.vertex_normals is not None and len(loaded.vertex_normals) == len(vertices):
82
+ vertex_normals = np.asarray(loaded.vertex_normals)
83
+
84
+ return Mesh(vertices=vertices, faces=faces, vertex_normals=vertex_normals)
85
+
86
+
13
87
  def load_point_cloud(
14
88
  path: str | Path,
15
89
  **kwargs: Any,
@@ -37,8 +111,8 @@ def load_point_cloud(
37
111
  path = Path(path)
38
112
  suffix = path.suffix.lower()
39
113
 
40
- if suffix == ".ply":
41
- return _load_ply(path, **kwargs)
114
+ if suffix in (".ply", ".obj", ".stl", ".off"):
115
+ return _load_mesh_vertices(path, **kwargs)
42
116
  elif suffix in (".las", ".laz"):
43
117
  return _load_las(path, **kwargs)
44
118
  elif suffix == ".csv":
@@ -76,27 +150,33 @@ def export_parquet(
76
150
  return path
77
151
 
78
152
 
79
- def _load_ply(path: Path, **kwargs: Any) -> tuple[np.ndarray, np.ndarray | None]:
80
- """Load PLY file using trimesh."""
153
+ def _load_mesh_vertices(path: Path, **kwargs: Any) -> tuple[np.ndarray, np.ndarray | None]:
154
+ """Load mesh file using trimesh and return vertices."""
81
155
  try:
82
156
  import trimesh
83
157
  except ImportError as e:
84
158
  raise ImportError(
85
- "trimesh is required for PLY support. "
159
+ "trimesh is required for mesh file support. "
86
160
  "Install with: pip install sdf-sampler[io]"
87
161
  ) from e
88
162
 
89
- mesh = trimesh.load(path, **kwargs)
163
+ loaded = trimesh.load(path, **kwargs)
164
+
165
+ # Handle Scene objects (multiple meshes)
166
+ if isinstance(loaded, trimesh.Scene):
167
+ meshes = [g for g in loaded.geometry.values() if isinstance(g, trimesh.Trimesh)]
168
+ if meshes:
169
+ loaded = trimesh.util.concatenate(meshes)
90
170
 
91
171
  # Handle both PointCloud and Trimesh objects
92
- if hasattr(mesh, "vertices"):
93
- xyz = np.asarray(mesh.vertices)
172
+ if hasattr(loaded, "vertices"):
173
+ xyz = np.asarray(loaded.vertices)
94
174
  else:
95
- xyz = np.asarray(mesh.points if hasattr(mesh, "points") else mesh)
175
+ xyz = np.asarray(loaded.points if hasattr(loaded, "points") else loaded)
96
176
 
97
177
  normals = None
98
- if hasattr(mesh, "vertex_normals") and mesh.vertex_normals is not None:
99
- normals = np.asarray(mesh.vertex_normals)
178
+ if hasattr(loaded, "vertex_normals") and loaded.vertex_normals is not None:
179
+ normals = np.asarray(loaded.vertex_normals)
100
180
  if normals.shape != xyz.shape:
101
181
  normals = None
102
182
 
@@ -67,6 +67,7 @@ class SDFSampler:
67
67
  seed: int | None = None,
68
68
  include_surface_points: bool = False,
69
69
  surface_point_count: int | None = None,
70
+ mesh: Any = None,
70
71
  ) -> list[TrainingSample]:
71
72
  """Generate training samples from constraints.
72
73
 
@@ -79,6 +80,8 @@ class SDFSampler:
79
80
  seed: Random seed for reproducibility
80
81
  include_surface_points: If True, include original surface points with phi=0
81
82
  surface_point_count: Number of surface points to include (default: 1000, or len(xyz) if smaller)
83
+ mesh: Optional Mesh object for area-weighted surface sampling. If provided,
84
+ surface points are sampled uniformly by surface area instead of by vertex.
82
85
 
83
86
  Returns:
84
87
  List of TrainingSample objects
@@ -163,7 +166,7 @@ class SDFSampler:
163
166
  # Default to 1000 surface points, or all points if smaller
164
167
  count = surface_point_count if surface_point_count is not None else min(1000, len(xyz))
165
168
  samples.extend(
166
- self._generate_surface_points(xyz, normals, count, rng)
169
+ self._generate_surface_points(xyz, normals, count, rng, mesh)
167
170
  )
168
171
 
169
172
  return samples
@@ -174,6 +177,7 @@ class SDFSampler:
174
177
  normals: np.ndarray | None,
175
178
  count: int,
176
179
  rng: np.random.Generator,
180
+ mesh: Any = None,
177
181
  ) -> list[TrainingSample]:
178
182
  """Generate surface point samples (phi=0) from the input point cloud.
179
183
 
@@ -182,10 +186,19 @@ class SDFSampler:
182
186
  normals: Optional point normals (N, 3)
183
187
  count: Number of surface points to include
184
188
  rng: Random number generator
189
+ mesh: Optional Mesh object for area-weighted sampling
185
190
 
186
191
  Returns:
187
192
  List of TrainingSample objects with phi=0
188
193
  """
194
+ if count <= 0:
195
+ return []
196
+
197
+ # Use area-weighted sampling if mesh is provided
198
+ if mesh is not None:
199
+ return self._generate_surface_points_area_weighted(mesh, count, rng)
200
+
201
+ # Fallback to vertex-based sampling
189
202
  n_surface = min(count, len(xyz))
190
203
  if n_surface <= 0:
191
204
  return []
@@ -219,6 +232,107 @@ class SDFSampler:
219
232
 
220
233
  return samples
221
234
 
235
+ def _generate_surface_points_area_weighted(
236
+ self,
237
+ mesh: Any,
238
+ count: int,
239
+ rng: np.random.Generator,
240
+ ) -> list[TrainingSample]:
241
+ """Generate surface points using area-weighted sampling.
242
+
243
+ Samples points uniformly by surface area, not by vertex count.
244
+ This ensures uniform coverage even when vertex density varies.
245
+
246
+ Args:
247
+ mesh: Mesh object with vertices, faces, and optional vertex_normals
248
+ count: Number of surface points to generate
249
+ rng: Random number generator
250
+
251
+ Returns:
252
+ List of TrainingSample objects with phi=0
253
+ """
254
+ vertices = mesh.vertices
255
+ faces = mesh.faces
256
+
257
+ # Compute face areas
258
+ v0 = vertices[faces[:, 0]]
259
+ v1 = vertices[faces[:, 1]]
260
+ v2 = vertices[faces[:, 2]]
261
+ face_areas = 0.5 * np.linalg.norm(np.cross(v1 - v0, v2 - v0), axis=1)
262
+
263
+ # Sample faces proportional to their area
264
+ total_area = face_areas.sum()
265
+ if total_area <= 0:
266
+ return []
267
+
268
+ face_probs = face_areas / total_area
269
+ sampled_faces = rng.choice(len(faces), size=count, p=face_probs)
270
+
271
+ # Sample random point within each selected face using barycentric coordinates
272
+ # Generate random barycentric coordinates
273
+ r1 = rng.random(count)
274
+ r2 = rng.random(count)
275
+ # Ensure uniform distribution within triangle
276
+ sqrt_r1 = np.sqrt(r1)
277
+ u = 1 - sqrt_r1
278
+ v = sqrt_r1 * (1 - r2)
279
+ w = sqrt_r1 * r2
280
+
281
+ # Get vertices for sampled faces
282
+ f_v0 = vertices[faces[sampled_faces, 0]]
283
+ f_v1 = vertices[faces[sampled_faces, 1]]
284
+ f_v2 = vertices[faces[sampled_faces, 2]]
285
+
286
+ # Compute sample positions
287
+ surface_xyz = (
288
+ u[:, np.newaxis] * f_v0 +
289
+ v[:, np.newaxis] * f_v1 +
290
+ w[:, np.newaxis] * f_v2
291
+ )
292
+
293
+ # Compute normals (interpolated from vertex normals if available, else face normals)
294
+ if mesh.vertex_normals is not None:
295
+ n0 = mesh.vertex_normals[faces[sampled_faces, 0]]
296
+ n1 = mesh.vertex_normals[faces[sampled_faces, 1]]
297
+ n2 = mesh.vertex_normals[faces[sampled_faces, 2]]
298
+ surface_normals = (
299
+ u[:, np.newaxis] * n0 +
300
+ v[:, np.newaxis] * n1 +
301
+ w[:, np.newaxis] * n2
302
+ )
303
+ # Normalize
304
+ norms = np.linalg.norm(surface_normals, axis=1, keepdims=True)
305
+ norms = np.where(norms > 0, norms, 1)
306
+ surface_normals = surface_normals / norms
307
+ else:
308
+ # Compute face normals
309
+ edge1 = f_v1 - f_v0
310
+ edge2 = f_v2 - f_v0
311
+ surface_normals = np.cross(edge1, edge2)
312
+ norms = np.linalg.norm(surface_normals, axis=1, keepdims=True)
313
+ norms = np.where(norms > 0, norms, 1)
314
+ surface_normals = surface_normals / norms
315
+
316
+ # Build samples
317
+ samples = []
318
+ for i in range(len(surface_xyz)):
319
+ sample = TrainingSample(
320
+ x=float(surface_xyz[i, 0]),
321
+ y=float(surface_xyz[i, 1]),
322
+ z=float(surface_xyz[i, 2]),
323
+ phi=0.0,
324
+ nx=float(surface_normals[i, 0]),
325
+ ny=float(surface_normals[i, 1]),
326
+ nz=float(surface_normals[i, 2]),
327
+ weight=1.0,
328
+ source="surface",
329
+ is_surface=True,
330
+ is_free=False,
331
+ )
332
+ samples.append(sample)
333
+
334
+ return samples
335
+
222
336
  def to_dataframe(self, samples: list[TrainingSample]) -> pd.DataFrame:
223
337
  """Convert samples to pandas DataFrame.
224
338
 
@@ -80,17 +80,18 @@ def sample_box_inverse_square(
80
80
  """Generate samples from a box with inverse-square density distribution.
81
81
 
82
82
  Samples more points near the surface (point cloud) and fewer far away.
83
+ Uses actual distance to surface for phi values (signed distance).
83
84
 
84
85
  Args:
85
86
  constraint: Box constraint to sample
86
87
  rng: Random number generator
87
- near_band: Near-band width for offset
88
+ near_band: Near-band width for density weighting (not phi assignment)
88
89
  n_samples: Number of samples to generate
89
90
  surface_tree: KDTree of surface points for distance computation
90
91
  falloff: Falloff exponent (higher = faster falloff)
91
92
 
92
93
  Returns:
93
- List of TrainingSample objects
94
+ List of TrainingSample objects with phi = actual signed distance to surface
94
95
  """
95
96
  samples = []
96
97
  center = np.array(constraint.center)
@@ -109,8 +110,11 @@ def sample_box_inverse_square(
109
110
  weight = (near_band / min_dist) ** falloff
110
111
 
111
112
  if rng.random() < min(1.0, weight):
112
- offset = near_band if constraint.sign == SignConvention.EMPTY else -near_band
113
- phi = offset
113
+ # Use actual distance to surface for phi, with sign based on constraint type
114
+ # EMPTY regions have positive phi (outside surface)
115
+ # SOLID regions have negative phi (inside surface)
116
+ sign = 1.0 if constraint.sign == SignConvention.EMPTY else -1.0
117
+ phi = sign * float(dist_to_surface)
114
118
 
115
119
  samples.append(
116
120
  TrainingSample(
@@ -229,3 +229,161 @@ class TestSamplerSignConvention:
229
229
  for s in samples:
230
230
  assert s.phi > 0, f"EMPTY sample should have positive phi, got {s.phi}"
231
231
  assert s.is_free
232
+
233
+
234
+ class TestBoxInverseSquarePhiValues:
235
+ """Tests that box inverse_square samples use actual distance to surface for phi."""
236
+
237
+ @pytest.fixture
238
+ def plane_surface(self):
239
+ """Flat plane at z=0 for easy distance calculation."""
240
+ x = np.linspace(-2, 2, 50)
241
+ y = np.linspace(-2, 2, 50)
242
+ xx, yy = np.meshgrid(x, y)
243
+ xyz = np.column_stack([xx.ravel(), yy.ravel(), np.zeros(2500)])
244
+ return xyz
245
+
246
+ @pytest.fixture
247
+ def box_above_plane(self):
248
+ """Box constraint above z=0 plane (empty region above surface)."""
249
+ return {
250
+ "type": "box",
251
+ "sign": "empty", # Above surface = empty/positive phi
252
+ "center": (0.0, 0.0, 0.5), # Center at z=0.5
253
+ "half_extents": (1.0, 1.0, 0.5), # Extends from z=0 to z=1
254
+ "weight": 1.0,
255
+ }
256
+
257
+ @pytest.fixture
258
+ def box_below_plane(self):
259
+ """Box constraint below z=0 plane (solid region below surface)."""
260
+ return {
261
+ "type": "box",
262
+ "sign": "solid", # Below surface = solid/negative phi
263
+ "center": (0.0, 0.0, -0.5), # Center at z=-0.5
264
+ "half_extents": (1.0, 1.0, 0.5), # Extends from z=-1 to z=0
265
+ "weight": 1.0,
266
+ }
267
+
268
+ def test_inverse_square_phi_is_actual_distance(self, plane_surface, box_above_plane):
269
+ """Verify phi is based on distance to nearest surface point, not constant near_band."""
270
+ sampler = SDFSampler(config=SamplerConfig(total_samples=500))
271
+ samples = sampler.generate(
272
+ xyz=plane_surface,
273
+ constraints=[box_above_plane],
274
+ strategy=SamplingStrategy.INVERSE_SQUARE,
275
+ seed=42,
276
+ )
277
+
278
+ assert len(samples) > 0, "Should generate samples"
279
+
280
+ for s in samples:
281
+ # For a flat plane at z=0, distance to surface is approximately |z|
282
+ # (exact value depends on nearest point in the discrete point cloud)
283
+ z_distance = abs(s.z)
284
+
285
+ # phi should be approximately |z| (within grid spacing tolerance)
286
+ # Grid spacing is ~0.08 units, so allow some tolerance
287
+ assert abs(s.phi - z_distance) < 0.1, (
288
+ f"phi should be approximately equal to |z| distance. "
289
+ f"Got phi={s.phi}, z={s.z}, expected ~{z_distance}"
290
+ )
291
+
292
+ # phi should definitely be positive for empty constraint
293
+ assert s.phi > 0, f"EMPTY sample should have positive phi, got {s.phi}"
294
+
295
+ def test_inverse_square_phi_varies_with_distance(self, plane_surface, box_above_plane):
296
+ """Verify phi values vary based on sample distance from surface."""
297
+ sampler = SDFSampler(config=SamplerConfig(total_samples=500))
298
+ samples = sampler.generate(
299
+ xyz=plane_surface,
300
+ constraints=[box_above_plane],
301
+ strategy=SamplingStrategy.INVERSE_SQUARE,
302
+ seed=42,
303
+ )
304
+
305
+ phi_values = [s.phi for s in samples]
306
+
307
+ # Phi should vary (not be constant ±near_band)
308
+ phi_std = np.std(phi_values)
309
+ assert phi_std > 0.01, (
310
+ f"phi values should vary with distance, got std={phi_std}. "
311
+ "This suggests phi is constant (bug: using near_band instead of distance)"
312
+ )
313
+
314
+ # Should have a range of values, not just near_band=0.02
315
+ phi_min, phi_max = min(phi_values), max(phi_values)
316
+ phi_range = phi_max - phi_min
317
+ assert phi_range > 0.1, (
318
+ f"phi range should be > 0.1, got {phi_range}. "
319
+ "Values: min={phi_min}, max={phi_max}"
320
+ )
321
+
322
+ def test_inverse_square_solid_has_negative_phi(self, plane_surface, box_below_plane):
323
+ """Verify solid box samples have negative phi with magnitude proportional to distance."""
324
+ sampler = SDFSampler(config=SamplerConfig(total_samples=500))
325
+ samples = sampler.generate(
326
+ xyz=plane_surface,
327
+ constraints=[box_below_plane],
328
+ strategy=SamplingStrategy.INVERSE_SQUARE,
329
+ seed=42,
330
+ )
331
+
332
+ for s in samples:
333
+ # SOLID constraint should always have negative phi
334
+ assert s.phi < 0, f"SOLID sample should have negative phi, got {s.phi}"
335
+
336
+ # For flat plane at z=0, solid samples are at z<0
337
+ # Distance to nearest surface point is approximately |z|
338
+ z_distance = abs(s.z)
339
+
340
+ # phi magnitude should be approximately |z| (within grid spacing tolerance)
341
+ assert abs(abs(s.phi) - z_distance) < 0.1, (
342
+ f"phi magnitude should be approximately |z|. "
343
+ f"Got phi={s.phi}, z={s.z}, expected ~{-z_distance}"
344
+ )
345
+
346
+ def test_inverse_square_phi_correlates_with_z_coordinate(self, plane_surface, box_above_plane):
347
+ """For plane at z=0, phi should be correlated with |z| coordinate."""
348
+ sampler = SDFSampler(config=SamplerConfig(total_samples=200))
349
+ samples = sampler.generate(
350
+ xyz=plane_surface,
351
+ constraints=[box_above_plane],
352
+ strategy=SamplingStrategy.INVERSE_SQUARE,
353
+ seed=123,
354
+ )
355
+
356
+ # Collect z values and phi values
357
+ z_values = np.array([abs(s.z) for s in samples])
358
+ phi_values = np.array([s.phi for s in samples])
359
+
360
+ # phi should be positively correlated with |z|
361
+ # (samples further from z=0 should have larger phi)
362
+ correlation = np.corrcoef(z_values, phi_values)[0, 1]
363
+ assert correlation > 0.9, (
364
+ f"phi should be strongly correlated with |z|. "
365
+ f"Got correlation={correlation:.3f}"
366
+ )
367
+
368
+ def test_inverse_square_not_constant_near_band(self, plane_surface, box_above_plane):
369
+ """Explicitly verify phi is NOT the constant near_band value."""
370
+ near_band = 0.02 # Default near_band value
371
+ sampler = SDFSampler(config=SamplerConfig(total_samples=200, near_band=near_band))
372
+ samples = sampler.generate(
373
+ xyz=plane_surface,
374
+ constraints=[box_above_plane],
375
+ strategy=SamplingStrategy.INVERSE_SQUARE,
376
+ seed=42,
377
+ )
378
+
379
+ # Count how many samples have phi approximately equal to near_band
380
+ near_band_count = sum(1 for s in samples if abs(abs(s.phi) - near_band) < 0.001)
381
+ total = len(samples)
382
+
383
+ # With actual distance-based phi, very few samples should be exactly at near_band
384
+ # (only those that happen to be exactly 0.02 away from surface)
385
+ ratio = near_band_count / total
386
+ assert ratio < 0.1, (
387
+ f"{near_band_count}/{total} ({ratio:.0%}) samples have phi≈±near_band. "
388
+ "This suggests phi is still using constant near_band instead of actual distance."
389
+ )
@@ -855,7 +855,7 @@ wheels = [
855
855
 
856
856
  [[package]]
857
857
  name = "sdf-sampler"
858
- version = "0.3.0"
858
+ version = "0.5.0"
859
859
  source = { editable = "." }
860
860
  dependencies = [
861
861
  { name = "alphashape" },
File without changes
File without changes
File without changes