sdf-sampler 0.3.0__tar.gz → 0.5.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/CHANGELOG.md +40 -2
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/PKG-INFO +1 -1
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/pyproject.toml +1 -1
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/__init__.py +4 -2
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/analyzer.py +2 -1
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/cli.py +52 -64
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/io.py +91 -11
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/models/analysis.py +7 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/sampler.py +123 -7
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/tests/test_equivalence.py +117 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/uv.lock +1 -1
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/.gitignore +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/LICENSE +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/README.md +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/__main__.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/algorithms/__init__.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/algorithms/flood_fill.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/algorithms/normal_idw.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/algorithms/normal_offset.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/algorithms/pocket.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/algorithms/voxel_grid.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/algorithms/voxel_regions.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/config.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/models/__init__.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/models/constraints.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/models/samples.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/sampling/__init__.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/sampling/box.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/sampling/brush.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/sampling/ray_carve.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/src/sdf_sampler/sampling/sphere.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/tests/__init__.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/tests/test_analyzer.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/tests/test_integration.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/tests/test_models.py +0 -0
- {sdf_sampler-0.3.0 → sdf_sampler-0.5.0}/tests/test_sampler.py +0 -0
|
@@ -5,6 +5,44 @@ All notable changes to sdf-sampler will be documented in this file.
|
|
|
5
5
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
|
6
6
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
7
7
|
|
|
8
|
+
## [0.5.0] - 2025-01-30
|
|
9
|
+
|
|
10
|
+
### Added
|
|
11
|
+
|
|
12
|
+
- **Area-weighted surface sampling** - New sampling mode that distributes surface points uniformly by surface area instead of by vertex count. Essential for meshes with uneven vertex density (e.g., trench floors vs walls).
|
|
13
|
+
- New `load_mesh()` function returns `Mesh` object with vertices, faces, and normals
|
|
14
|
+
- Pass `mesh=` parameter to `sampler.generate()` for area-weighted sampling
|
|
15
|
+
- CLI: `--mesh path/to/mesh.obj` enables area-weighted mode
|
|
16
|
+
- Supports PLY, OBJ, STL, OFF mesh formats
|
|
17
|
+
- **OBJ/STL/OFF file support** - `load_point_cloud()` now supports additional mesh formats via trimesh
|
|
18
|
+
|
|
19
|
+
### Example
|
|
20
|
+
|
|
21
|
+
```python
|
|
22
|
+
from sdf_sampler import SDFSampler, load_mesh, load_point_cloud
|
|
23
|
+
|
|
24
|
+
# Load mesh for area-weighted sampling
|
|
25
|
+
mesh = load_mesh("model.obj")
|
|
26
|
+
xyz, normals = load_point_cloud("model.obj")
|
|
27
|
+
|
|
28
|
+
# Area-weighted gives uniform coverage by surface area
|
|
29
|
+
samples = sampler.generate(
|
|
30
|
+
xyz=xyz, constraints=constraints,
|
|
31
|
+
include_surface_points=True,
|
|
32
|
+
surface_point_count=1000,
|
|
33
|
+
mesh=mesh, # Enables area-weighted sampling
|
|
34
|
+
)
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
## [0.4.0] - 2025-01-30
|
|
38
|
+
|
|
39
|
+
### Changed
|
|
40
|
+
|
|
41
|
+
- **Default algorithms no longer include `normal_idw`** - The `normal_idw` algorithm is now opt-in only. Default algorithms are: `flood_fill`, `voxel_regions`, `normal_offset`. To use `normal_idw`, explicitly pass `algorithms=["normal_idw"]` or include it in your algorithm list.
|
|
42
|
+
- **Surface point count is now a direct count** - Replaced `surface_point_ratio` with `surface_point_count`. Instead of specifying a percentage, you now specify the exact number of surface points to include.
|
|
43
|
+
- CLI: `--surface-point-count 1000` (default: 1000)
|
|
44
|
+
- SDK: `sampler.generate(..., include_surface_points=True, surface_point_count=1000)`
|
|
45
|
+
|
|
8
46
|
## [0.3.0] - 2025-01-29
|
|
9
47
|
|
|
10
48
|
### Added
|
|
@@ -15,8 +53,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|
|
15
53
|
- Output mode control: `--flood-fill-output`, `--voxel-regions-output` (boxes/samples/both)
|
|
16
54
|
- **Surface point inclusion**
|
|
17
55
|
- `--include-surface-points` flag to include original points with phi=0
|
|
18
|
-
- `--surface-point-
|
|
19
|
-
- SDK: `sampler.generate(..., include_surface_points=True,
|
|
56
|
+
- `--surface-point-count` to specify number of surface points (default 1000)
|
|
57
|
+
- SDK: `sampler.generate(..., include_surface_points=True, surface_point_count=1000)`
|
|
20
58
|
|
|
21
59
|
## [0.2.0] - 2025-01-29
|
|
22
60
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: sdf-sampler
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.5.0
|
|
4
4
|
Summary: Auto-analysis and sampling of point clouds for SDF (Signed Distance Field) training data generation
|
|
5
5
|
Project-URL: Repository, https://github.com/Chiark-Collective/sdf-sampler
|
|
6
6
|
Author-email: Liam <liam@example.com>
|
|
@@ -28,7 +28,7 @@ Example usage:
|
|
|
28
28
|
|
|
29
29
|
from sdf_sampler.analyzer import SDFAnalyzer
|
|
30
30
|
from sdf_sampler.config import AnalyzerConfig, SamplerConfig
|
|
31
|
-
from sdf_sampler.io import export_parquet, load_point_cloud
|
|
31
|
+
from sdf_sampler.io import Mesh, export_parquet, load_mesh, load_point_cloud
|
|
32
32
|
from sdf_sampler.models import (
|
|
33
33
|
AlgorithmType,
|
|
34
34
|
AnalysisResult,
|
|
@@ -47,7 +47,7 @@ from sdf_sampler.models import (
|
|
|
47
47
|
)
|
|
48
48
|
from sdf_sampler.sampler import SDFSampler
|
|
49
49
|
|
|
50
|
-
__version__ = "0.
|
|
50
|
+
__version__ = "0.5.0"
|
|
51
51
|
|
|
52
52
|
__all__ = [
|
|
53
53
|
# Main classes
|
|
@@ -58,6 +58,8 @@ __all__ = [
|
|
|
58
58
|
"SamplerConfig",
|
|
59
59
|
# I/O
|
|
60
60
|
"load_point_cloud",
|
|
61
|
+
"load_mesh",
|
|
62
|
+
"Mesh",
|
|
61
63
|
"export_parquet",
|
|
62
64
|
# Models
|
|
63
65
|
"SignConvention",
|
|
@@ -14,6 +14,7 @@ from sdf_sampler.algorithms.voxel_regions import generate_voxel_region_constrain
|
|
|
14
14
|
from sdf_sampler.config import AnalyzerConfig, AutoAnalysisOptions
|
|
15
15
|
from sdf_sampler.models.analysis import (
|
|
16
16
|
ALL_ALGORITHMS,
|
|
17
|
+
DEFAULT_ALGORITHMS,
|
|
17
18
|
AlgorithmStats,
|
|
18
19
|
AlgorithmType,
|
|
19
20
|
AnalysisResult,
|
|
@@ -93,7 +94,7 @@ class SDFAnalyzer:
|
|
|
93
94
|
raise ValueError(f"normals shape {normals.shape} doesn't match xyz {xyz.shape}")
|
|
94
95
|
|
|
95
96
|
# Determine which algorithms to run
|
|
96
|
-
algo_list = algorithms if algorithms else [a.value for a in
|
|
97
|
+
algo_list = algorithms if algorithms else [a.value for a in DEFAULT_ALGORITHMS]
|
|
97
98
|
algo_list = [a for a in algo_list if a in [alg.value for alg in ALL_ALGORITHMS]]
|
|
98
99
|
|
|
99
100
|
# Run algorithms and collect constraints
|
|
@@ -169,10 +169,18 @@ def add_output_options(parser: argparse.ArgumentParser) -> None:
|
|
|
169
169
|
help="Include original surface points (phi=0) in output",
|
|
170
170
|
)
|
|
171
171
|
group.add_argument(
|
|
172
|
-
"--surface-point-
|
|
173
|
-
type=
|
|
174
|
-
default=
|
|
175
|
-
help="
|
|
172
|
+
"--surface-point-count",
|
|
173
|
+
type=int,
|
|
174
|
+
default=1000,
|
|
175
|
+
help="Number of surface points to include (default: 1000)",
|
|
176
|
+
)
|
|
177
|
+
group.add_argument(
|
|
178
|
+
"--mesh",
|
|
179
|
+
type=Path,
|
|
180
|
+
default=None,
|
|
181
|
+
help="Mesh file for area-weighted surface sampling (PLY/OBJ/STL). "
|
|
182
|
+
"If provided, surface points are sampled uniformly by surface area "
|
|
183
|
+
"instead of by vertex count. Recommended for meshes with uneven vertex density.",
|
|
176
184
|
)
|
|
177
185
|
|
|
178
186
|
|
|
@@ -418,7 +426,7 @@ def cmd_analyze(args: argparse.Namespace) -> int:
|
|
|
418
426
|
|
|
419
427
|
def cmd_sample(args: argparse.Namespace) -> int:
|
|
420
428
|
"""Run sample command."""
|
|
421
|
-
from sdf_sampler import SDFSampler, load_point_cloud
|
|
429
|
+
from sdf_sampler import SDFSampler, load_mesh, load_point_cloud
|
|
422
430
|
|
|
423
431
|
if not args.input.exists():
|
|
424
432
|
print(f"Error: Input file not found: {args.input}", file=sys.stderr)
|
|
@@ -439,6 +447,19 @@ def cmd_sample(args: argparse.Namespace) -> int:
|
|
|
439
447
|
print(f"Error loading point cloud: {e}", file=sys.stderr)
|
|
440
448
|
return 1
|
|
441
449
|
|
|
450
|
+
# Load mesh for area-weighted surface sampling if provided
|
|
451
|
+
mesh = None
|
|
452
|
+
if args.mesh:
|
|
453
|
+
if args.verbose:
|
|
454
|
+
print(f"Loading mesh for area-weighted sampling: {args.mesh}")
|
|
455
|
+
try:
|
|
456
|
+
mesh = load_mesh(str(args.mesh))
|
|
457
|
+
if args.verbose:
|
|
458
|
+
print(f" Vertices: {len(mesh.vertices):,}, Faces: {len(mesh.faces):,}")
|
|
459
|
+
except Exception as e:
|
|
460
|
+
print(f"Error loading mesh: {e}", file=sys.stderr)
|
|
461
|
+
return 1
|
|
462
|
+
|
|
442
463
|
if args.verbose:
|
|
443
464
|
print(f"Loading constraints: {args.constraints}")
|
|
444
465
|
|
|
@@ -448,6 +469,9 @@ def cmd_sample(args: argparse.Namespace) -> int:
|
|
|
448
469
|
if args.verbose:
|
|
449
470
|
print(f" Constraints: {len(constraints)}")
|
|
450
471
|
print(f"Generating {args.total_samples:,} samples with strategy: {args.strategy}")
|
|
472
|
+
if args.include_surface_points:
|
|
473
|
+
mode = "area-weighted" if mesh else "vertex-based"
|
|
474
|
+
print(f" Including {args.surface_point_count:,} surface points ({mode})")
|
|
451
475
|
|
|
452
476
|
config = build_sampler_config(args)
|
|
453
477
|
sampler = SDFSampler(config=config)
|
|
@@ -458,14 +482,11 @@ def cmd_sample(args: argparse.Namespace) -> int:
|
|
|
458
482
|
total_samples=args.total_samples,
|
|
459
483
|
strategy=args.strategy,
|
|
460
484
|
seed=args.seed,
|
|
485
|
+
include_surface_points=args.include_surface_points,
|
|
486
|
+
surface_point_count=args.surface_point_count,
|
|
487
|
+
mesh=mesh,
|
|
461
488
|
)
|
|
462
489
|
|
|
463
|
-
# Include surface points if requested
|
|
464
|
-
if args.include_surface_points:
|
|
465
|
-
samples = _add_surface_points(
|
|
466
|
-
samples, xyz, normals, args.surface_point_ratio, args.verbose
|
|
467
|
-
)
|
|
468
|
-
|
|
469
490
|
if args.verbose:
|
|
470
491
|
print(f"Generated {len(samples)} samples")
|
|
471
492
|
|
|
@@ -476,7 +497,7 @@ def cmd_sample(args: argparse.Namespace) -> int:
|
|
|
476
497
|
|
|
477
498
|
def cmd_pipeline(args: argparse.Namespace) -> int:
|
|
478
499
|
"""Run full pipeline: analyze + sample + export."""
|
|
479
|
-
from sdf_sampler import SDFAnalyzer, SDFSampler, load_point_cloud
|
|
500
|
+
from sdf_sampler import SDFAnalyzer, SDFSampler, load_mesh, load_point_cloud
|
|
480
501
|
|
|
481
502
|
if not args.input.exists():
|
|
482
503
|
print(f"Error: Input file not found: {args.input}", file=sys.stderr)
|
|
@@ -497,6 +518,19 @@ def cmd_pipeline(args: argparse.Namespace) -> int:
|
|
|
497
518
|
print(f" Points: {len(xyz):,}")
|
|
498
519
|
print(f" Normals: {'yes' if normals is not None else 'no'}")
|
|
499
520
|
|
|
521
|
+
# Load mesh for area-weighted surface sampling if provided
|
|
522
|
+
mesh = None
|
|
523
|
+
if args.mesh:
|
|
524
|
+
if args.verbose:
|
|
525
|
+
print(f"Loading mesh for area-weighted sampling: {args.mesh}")
|
|
526
|
+
try:
|
|
527
|
+
mesh = load_mesh(str(args.mesh))
|
|
528
|
+
if args.verbose:
|
|
529
|
+
print(f" Vertices: {len(mesh.vertices):,}, Faces: {len(mesh.faces):,}")
|
|
530
|
+
except Exception as e:
|
|
531
|
+
print(f"Error loading mesh: {e}", file=sys.stderr)
|
|
532
|
+
return 1
|
|
533
|
+
|
|
500
534
|
# Analyze
|
|
501
535
|
if args.verbose:
|
|
502
536
|
algos = args.algorithms or ["all"]
|
|
@@ -526,6 +560,9 @@ def cmd_pipeline(args: argparse.Namespace) -> int:
|
|
|
526
560
|
# Sample
|
|
527
561
|
if args.verbose:
|
|
528
562
|
print(f"Generating {args.total_samples:,} samples with strategy: {args.strategy}")
|
|
563
|
+
if args.include_surface_points:
|
|
564
|
+
mode = "area-weighted" if mesh else "vertex-based"
|
|
565
|
+
print(f" Including {args.surface_point_count:,} surface points ({mode})")
|
|
529
566
|
|
|
530
567
|
config = build_sampler_config(args)
|
|
531
568
|
sampler = SDFSampler(config=config)
|
|
@@ -536,14 +573,11 @@ def cmd_pipeline(args: argparse.Namespace) -> int:
|
|
|
536
573
|
total_samples=args.total_samples,
|
|
537
574
|
strategy=args.strategy,
|
|
538
575
|
seed=args.seed,
|
|
576
|
+
include_surface_points=args.include_surface_points,
|
|
577
|
+
surface_point_count=args.surface_point_count,
|
|
578
|
+
mesh=mesh,
|
|
539
579
|
)
|
|
540
580
|
|
|
541
|
-
# Include surface points if requested
|
|
542
|
-
if args.include_surface_points:
|
|
543
|
-
samples = _add_surface_points(
|
|
544
|
-
samples, xyz, normals, args.surface_point_ratio, args.verbose
|
|
545
|
-
)
|
|
546
|
-
|
|
547
581
|
if args.verbose:
|
|
548
582
|
print(f"Generated {len(samples)} samples")
|
|
549
583
|
|
|
@@ -553,52 +587,6 @@ def cmd_pipeline(args: argparse.Namespace) -> int:
|
|
|
553
587
|
return 0
|
|
554
588
|
|
|
555
589
|
|
|
556
|
-
def _add_surface_points(
|
|
557
|
-
samples: list,
|
|
558
|
-
xyz: np.ndarray,
|
|
559
|
-
normals: np.ndarray | None,
|
|
560
|
-
ratio: float,
|
|
561
|
-
verbose: bool,
|
|
562
|
-
) -> list:
|
|
563
|
-
"""Add surface points to sample list."""
|
|
564
|
-
from sdf_sampler.models import TrainingSample
|
|
565
|
-
|
|
566
|
-
n_surface = int(len(xyz) * ratio)
|
|
567
|
-
if n_surface == 0:
|
|
568
|
-
return samples
|
|
569
|
-
|
|
570
|
-
# Subsample if needed
|
|
571
|
-
if n_surface < len(xyz):
|
|
572
|
-
indices = np.random.choice(len(xyz), n_surface, replace=False)
|
|
573
|
-
surface_xyz = xyz[indices]
|
|
574
|
-
surface_normals = normals[indices] if normals is not None else None
|
|
575
|
-
else:
|
|
576
|
-
surface_xyz = xyz
|
|
577
|
-
surface_normals = normals
|
|
578
|
-
|
|
579
|
-
if verbose:
|
|
580
|
-
print(f"Adding {len(surface_xyz):,} surface points (phi=0)")
|
|
581
|
-
|
|
582
|
-
for i in range(len(surface_xyz)):
|
|
583
|
-
sample = TrainingSample(
|
|
584
|
-
x=float(surface_xyz[i, 0]),
|
|
585
|
-
y=float(surface_xyz[i, 1]),
|
|
586
|
-
z=float(surface_xyz[i, 2]),
|
|
587
|
-
phi=0.0,
|
|
588
|
-
weight=1.0,
|
|
589
|
-
source="surface",
|
|
590
|
-
is_surface=True,
|
|
591
|
-
is_free=False,
|
|
592
|
-
)
|
|
593
|
-
if surface_normals is not None:
|
|
594
|
-
sample.nx = float(surface_normals[i, 0])
|
|
595
|
-
sample.ny = float(surface_normals[i, 1])
|
|
596
|
-
sample.nz = float(surface_normals[i, 2])
|
|
597
|
-
samples.append(sample)
|
|
598
|
-
|
|
599
|
-
return samples
|
|
600
|
-
|
|
601
|
-
|
|
602
590
|
def cmd_info(args: argparse.Namespace) -> int:
|
|
603
591
|
"""Show information about a file."""
|
|
604
592
|
if not args.input.exists():
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
# ABOUTME: I/O utilities for point cloud loading and sample export
|
|
2
2
|
# ABOUTME: Supports PLY, LAS/LAZ, CSV, NPZ, and Parquet formats
|
|
3
3
|
|
|
4
|
+
from dataclasses import dataclass
|
|
4
5
|
from pathlib import Path
|
|
5
6
|
from typing import Any
|
|
6
7
|
|
|
@@ -10,6 +11,79 @@ import pandas as pd
|
|
|
10
11
|
from sdf_sampler.models.samples import TrainingSample
|
|
11
12
|
|
|
12
13
|
|
|
14
|
+
@dataclass
|
|
15
|
+
class Mesh:
|
|
16
|
+
"""Triangle mesh with vertices, faces, and optional normals.
|
|
17
|
+
|
|
18
|
+
Used for area-weighted surface sampling where we need face information.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
vertices: np.ndarray # (N, 3) vertex positions
|
|
22
|
+
faces: np.ndarray # (M, 3) triangle face indices
|
|
23
|
+
vertex_normals: np.ndarray | None = None # (N, 3) per-vertex normals
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def load_mesh(
|
|
27
|
+
path: str | Path,
|
|
28
|
+
**kwargs: Any,
|
|
29
|
+
) -> Mesh:
|
|
30
|
+
"""Load mesh from file (preserves face information).
|
|
31
|
+
|
|
32
|
+
Use this instead of load_point_cloud() when you need area-weighted
|
|
33
|
+
surface sampling, which requires face information.
|
|
34
|
+
|
|
35
|
+
Supported formats:
|
|
36
|
+
- PLY (requires trimesh in [io] extras)
|
|
37
|
+
- OBJ (requires trimesh in [io] extras)
|
|
38
|
+
- STL (requires trimesh in [io] extras)
|
|
39
|
+
- OFF (requires trimesh in [io] extras)
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
path: Path to mesh file
|
|
43
|
+
**kwargs: Additional arguments for trimesh loader
|
|
44
|
+
|
|
45
|
+
Returns:
|
|
46
|
+
Mesh object with vertices, faces, and optional normals
|
|
47
|
+
|
|
48
|
+
Example:
|
|
49
|
+
>>> mesh = load_mesh("model.ply")
|
|
50
|
+
>>> print(f"Vertices: {len(mesh.vertices)}, Faces: {len(mesh.faces)}")
|
|
51
|
+
"""
|
|
52
|
+
try:
|
|
53
|
+
import trimesh
|
|
54
|
+
except ImportError as e:
|
|
55
|
+
raise ImportError(
|
|
56
|
+
"trimesh is required for mesh loading. "
|
|
57
|
+
"Install with: pip install sdf-sampler[io]"
|
|
58
|
+
) from e
|
|
59
|
+
|
|
60
|
+
path = Path(path)
|
|
61
|
+
loaded = trimesh.load(path, **kwargs)
|
|
62
|
+
|
|
63
|
+
# Handle Scene objects (multiple meshes)
|
|
64
|
+
if isinstance(loaded, trimesh.Scene):
|
|
65
|
+
# Combine all meshes into one
|
|
66
|
+
meshes = [g for g in loaded.geometry.values() if isinstance(g, trimesh.Trimesh)]
|
|
67
|
+
if not meshes:
|
|
68
|
+
raise ValueError(f"No triangle meshes found in {path}")
|
|
69
|
+
loaded = trimesh.util.concatenate(meshes)
|
|
70
|
+
|
|
71
|
+
if not isinstance(loaded, trimesh.Trimesh):
|
|
72
|
+
raise ValueError(
|
|
73
|
+
f"File {path} did not load as a triangle mesh. "
|
|
74
|
+
"Use load_point_cloud() for point cloud files."
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
vertices = np.asarray(loaded.vertices)
|
|
78
|
+
faces = np.asarray(loaded.faces)
|
|
79
|
+
|
|
80
|
+
vertex_normals = None
|
|
81
|
+
if loaded.vertex_normals is not None and len(loaded.vertex_normals) == len(vertices):
|
|
82
|
+
vertex_normals = np.asarray(loaded.vertex_normals)
|
|
83
|
+
|
|
84
|
+
return Mesh(vertices=vertices, faces=faces, vertex_normals=vertex_normals)
|
|
85
|
+
|
|
86
|
+
|
|
13
87
|
def load_point_cloud(
|
|
14
88
|
path: str | Path,
|
|
15
89
|
**kwargs: Any,
|
|
@@ -37,8 +111,8 @@ def load_point_cloud(
|
|
|
37
111
|
path = Path(path)
|
|
38
112
|
suffix = path.suffix.lower()
|
|
39
113
|
|
|
40
|
-
if suffix
|
|
41
|
-
return
|
|
114
|
+
if suffix in (".ply", ".obj", ".stl", ".off"):
|
|
115
|
+
return _load_mesh_vertices(path, **kwargs)
|
|
42
116
|
elif suffix in (".las", ".laz"):
|
|
43
117
|
return _load_las(path, **kwargs)
|
|
44
118
|
elif suffix == ".csv":
|
|
@@ -76,27 +150,33 @@ def export_parquet(
|
|
|
76
150
|
return path
|
|
77
151
|
|
|
78
152
|
|
|
79
|
-
def
|
|
80
|
-
"""Load
|
|
153
|
+
def _load_mesh_vertices(path: Path, **kwargs: Any) -> tuple[np.ndarray, np.ndarray | None]:
|
|
154
|
+
"""Load mesh file using trimesh and return vertices."""
|
|
81
155
|
try:
|
|
82
156
|
import trimesh
|
|
83
157
|
except ImportError as e:
|
|
84
158
|
raise ImportError(
|
|
85
|
-
"trimesh is required for
|
|
159
|
+
"trimesh is required for mesh file support. "
|
|
86
160
|
"Install with: pip install sdf-sampler[io]"
|
|
87
161
|
) from e
|
|
88
162
|
|
|
89
|
-
|
|
163
|
+
loaded = trimesh.load(path, **kwargs)
|
|
164
|
+
|
|
165
|
+
# Handle Scene objects (multiple meshes)
|
|
166
|
+
if isinstance(loaded, trimesh.Scene):
|
|
167
|
+
meshes = [g for g in loaded.geometry.values() if isinstance(g, trimesh.Trimesh)]
|
|
168
|
+
if meshes:
|
|
169
|
+
loaded = trimesh.util.concatenate(meshes)
|
|
90
170
|
|
|
91
171
|
# Handle both PointCloud and Trimesh objects
|
|
92
|
-
if hasattr(
|
|
93
|
-
xyz = np.asarray(
|
|
172
|
+
if hasattr(loaded, "vertices"):
|
|
173
|
+
xyz = np.asarray(loaded.vertices)
|
|
94
174
|
else:
|
|
95
|
-
xyz = np.asarray(
|
|
175
|
+
xyz = np.asarray(loaded.points if hasattr(loaded, "points") else loaded)
|
|
96
176
|
|
|
97
177
|
normals = None
|
|
98
|
-
if hasattr(
|
|
99
|
-
normals = np.asarray(
|
|
178
|
+
if hasattr(loaded, "vertex_normals") and loaded.vertex_normals is not None:
|
|
179
|
+
normals = np.asarray(loaded.vertex_normals)
|
|
100
180
|
if normals.shape != xyz.shape:
|
|
101
181
|
normals = None
|
|
102
182
|
|
|
@@ -26,6 +26,13 @@ ALL_ALGORITHMS = [
|
|
|
26
26
|
AlgorithmType.NORMAL_IDW,
|
|
27
27
|
]
|
|
28
28
|
|
|
29
|
+
# Default algorithms (excludes normal_idw which is opt-in)
|
|
30
|
+
DEFAULT_ALGORITHMS = [
|
|
31
|
+
AlgorithmType.FLOOD_FILL,
|
|
32
|
+
AlgorithmType.VOXEL_REGIONS,
|
|
33
|
+
AlgorithmType.NORMAL_OFFSET,
|
|
34
|
+
]
|
|
35
|
+
|
|
29
36
|
|
|
30
37
|
class GeneratedConstraint(BaseModel):
|
|
31
38
|
"""A constraint generated by auto-analysis.
|
|
@@ -66,7 +66,8 @@ class SDFSampler:
|
|
|
66
66
|
strategy: str | SamplingStrategy = SamplingStrategy.INVERSE_SQUARE,
|
|
67
67
|
seed: int | None = None,
|
|
68
68
|
include_surface_points: bool = False,
|
|
69
|
-
|
|
69
|
+
surface_point_count: int | None = None,
|
|
70
|
+
mesh: Any = None,
|
|
70
71
|
) -> list[TrainingSample]:
|
|
71
72
|
"""Generate training samples from constraints.
|
|
72
73
|
|
|
@@ -78,7 +79,9 @@ class SDFSampler:
|
|
|
78
79
|
strategy: Sampling strategy (CONSTANT, DENSITY, or INVERSE_SQUARE)
|
|
79
80
|
seed: Random seed for reproducibility
|
|
80
81
|
include_surface_points: If True, include original surface points with phi=0
|
|
81
|
-
|
|
82
|
+
surface_point_count: Number of surface points to include (default: 1000, or len(xyz) if smaller)
|
|
83
|
+
mesh: Optional Mesh object for area-weighted surface sampling. If provided,
|
|
84
|
+
surface points are sampled uniformly by surface area instead of by vertex.
|
|
82
85
|
|
|
83
86
|
Returns:
|
|
84
87
|
List of TrainingSample objects
|
|
@@ -160,8 +163,10 @@ class SDFSampler:
|
|
|
160
163
|
|
|
161
164
|
# Add surface points if requested
|
|
162
165
|
if include_surface_points:
|
|
166
|
+
# Default to 1000 surface points, or all points if smaller
|
|
167
|
+
count = surface_point_count if surface_point_count is not None else min(1000, len(xyz))
|
|
163
168
|
samples.extend(
|
|
164
|
-
self._generate_surface_points(xyz, normals,
|
|
169
|
+
self._generate_surface_points(xyz, normals, count, rng, mesh)
|
|
165
170
|
)
|
|
166
171
|
|
|
167
172
|
return samples
|
|
@@ -170,22 +175,32 @@ class SDFSampler:
|
|
|
170
175
|
self,
|
|
171
176
|
xyz: np.ndarray,
|
|
172
177
|
normals: np.ndarray | None,
|
|
173
|
-
|
|
178
|
+
count: int,
|
|
174
179
|
rng: np.random.Generator,
|
|
180
|
+
mesh: Any = None,
|
|
175
181
|
) -> list[TrainingSample]:
|
|
176
182
|
"""Generate surface point samples (phi=0) from the input point cloud.
|
|
177
183
|
|
|
178
184
|
Args:
|
|
179
185
|
xyz: Point cloud positions (N, 3)
|
|
180
186
|
normals: Optional point normals (N, 3)
|
|
181
|
-
|
|
187
|
+
count: Number of surface points to include
|
|
182
188
|
rng: Random number generator
|
|
189
|
+
mesh: Optional Mesh object for area-weighted sampling
|
|
183
190
|
|
|
184
191
|
Returns:
|
|
185
192
|
List of TrainingSample objects with phi=0
|
|
186
193
|
"""
|
|
187
|
-
|
|
188
|
-
|
|
194
|
+
if count <= 0:
|
|
195
|
+
return []
|
|
196
|
+
|
|
197
|
+
# Use area-weighted sampling if mesh is provided
|
|
198
|
+
if mesh is not None:
|
|
199
|
+
return self._generate_surface_points_area_weighted(mesh, count, rng)
|
|
200
|
+
|
|
201
|
+
# Fallback to vertex-based sampling
|
|
202
|
+
n_surface = min(count, len(xyz))
|
|
203
|
+
if n_surface <= 0:
|
|
189
204
|
return []
|
|
190
205
|
|
|
191
206
|
# Subsample if needed
|
|
@@ -217,6 +232,107 @@ class SDFSampler:
|
|
|
217
232
|
|
|
218
233
|
return samples
|
|
219
234
|
|
|
235
|
+
def _generate_surface_points_area_weighted(
|
|
236
|
+
self,
|
|
237
|
+
mesh: Any,
|
|
238
|
+
count: int,
|
|
239
|
+
rng: np.random.Generator,
|
|
240
|
+
) -> list[TrainingSample]:
|
|
241
|
+
"""Generate surface points using area-weighted sampling.
|
|
242
|
+
|
|
243
|
+
Samples points uniformly by surface area, not by vertex count.
|
|
244
|
+
This ensures uniform coverage even when vertex density varies.
|
|
245
|
+
|
|
246
|
+
Args:
|
|
247
|
+
mesh: Mesh object with vertices, faces, and optional vertex_normals
|
|
248
|
+
count: Number of surface points to generate
|
|
249
|
+
rng: Random number generator
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
List of TrainingSample objects with phi=0
|
|
253
|
+
"""
|
|
254
|
+
vertices = mesh.vertices
|
|
255
|
+
faces = mesh.faces
|
|
256
|
+
|
|
257
|
+
# Compute face areas
|
|
258
|
+
v0 = vertices[faces[:, 0]]
|
|
259
|
+
v1 = vertices[faces[:, 1]]
|
|
260
|
+
v2 = vertices[faces[:, 2]]
|
|
261
|
+
face_areas = 0.5 * np.linalg.norm(np.cross(v1 - v0, v2 - v0), axis=1)
|
|
262
|
+
|
|
263
|
+
# Sample faces proportional to their area
|
|
264
|
+
total_area = face_areas.sum()
|
|
265
|
+
if total_area <= 0:
|
|
266
|
+
return []
|
|
267
|
+
|
|
268
|
+
face_probs = face_areas / total_area
|
|
269
|
+
sampled_faces = rng.choice(len(faces), size=count, p=face_probs)
|
|
270
|
+
|
|
271
|
+
# Sample random point within each selected face using barycentric coordinates
|
|
272
|
+
# Generate random barycentric coordinates
|
|
273
|
+
r1 = rng.random(count)
|
|
274
|
+
r2 = rng.random(count)
|
|
275
|
+
# Ensure uniform distribution within triangle
|
|
276
|
+
sqrt_r1 = np.sqrt(r1)
|
|
277
|
+
u = 1 - sqrt_r1
|
|
278
|
+
v = sqrt_r1 * (1 - r2)
|
|
279
|
+
w = sqrt_r1 * r2
|
|
280
|
+
|
|
281
|
+
# Get vertices for sampled faces
|
|
282
|
+
f_v0 = vertices[faces[sampled_faces, 0]]
|
|
283
|
+
f_v1 = vertices[faces[sampled_faces, 1]]
|
|
284
|
+
f_v2 = vertices[faces[sampled_faces, 2]]
|
|
285
|
+
|
|
286
|
+
# Compute sample positions
|
|
287
|
+
surface_xyz = (
|
|
288
|
+
u[:, np.newaxis] * f_v0 +
|
|
289
|
+
v[:, np.newaxis] * f_v1 +
|
|
290
|
+
w[:, np.newaxis] * f_v2
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
# Compute normals (interpolated from vertex normals if available, else face normals)
|
|
294
|
+
if mesh.vertex_normals is not None:
|
|
295
|
+
n0 = mesh.vertex_normals[faces[sampled_faces, 0]]
|
|
296
|
+
n1 = mesh.vertex_normals[faces[sampled_faces, 1]]
|
|
297
|
+
n2 = mesh.vertex_normals[faces[sampled_faces, 2]]
|
|
298
|
+
surface_normals = (
|
|
299
|
+
u[:, np.newaxis] * n0 +
|
|
300
|
+
v[:, np.newaxis] * n1 +
|
|
301
|
+
w[:, np.newaxis] * n2
|
|
302
|
+
)
|
|
303
|
+
# Normalize
|
|
304
|
+
norms = np.linalg.norm(surface_normals, axis=1, keepdims=True)
|
|
305
|
+
norms = np.where(norms > 0, norms, 1)
|
|
306
|
+
surface_normals = surface_normals / norms
|
|
307
|
+
else:
|
|
308
|
+
# Compute face normals
|
|
309
|
+
edge1 = f_v1 - f_v0
|
|
310
|
+
edge2 = f_v2 - f_v0
|
|
311
|
+
surface_normals = np.cross(edge1, edge2)
|
|
312
|
+
norms = np.linalg.norm(surface_normals, axis=1, keepdims=True)
|
|
313
|
+
norms = np.where(norms > 0, norms, 1)
|
|
314
|
+
surface_normals = surface_normals / norms
|
|
315
|
+
|
|
316
|
+
# Build samples
|
|
317
|
+
samples = []
|
|
318
|
+
for i in range(len(surface_xyz)):
|
|
319
|
+
sample = TrainingSample(
|
|
320
|
+
x=float(surface_xyz[i, 0]),
|
|
321
|
+
y=float(surface_xyz[i, 1]),
|
|
322
|
+
z=float(surface_xyz[i, 2]),
|
|
323
|
+
phi=0.0,
|
|
324
|
+
nx=float(surface_normals[i, 0]),
|
|
325
|
+
ny=float(surface_normals[i, 1]),
|
|
326
|
+
nz=float(surface_normals[i, 2]),
|
|
327
|
+
weight=1.0,
|
|
328
|
+
source="surface",
|
|
329
|
+
is_surface=True,
|
|
330
|
+
is_free=False,
|
|
331
|
+
)
|
|
332
|
+
samples.append(sample)
|
|
333
|
+
|
|
334
|
+
return samples
|
|
335
|
+
|
|
220
336
|
def to_dataframe(self, samples: list[TrainingSample]) -> pd.DataFrame:
|
|
221
337
|
"""Convert samples to pandas DataFrame.
|
|
222
338
|
|
|
@@ -759,3 +759,120 @@ class TestFullPipelineEquivalence:
|
|
|
759
759
|
f"Sample count ratio too high: {ratio:.2f} "
|
|
760
760
|
f"(standalone={standalone_sample_count}, backend={backend_sample_count})"
|
|
761
761
|
)
|
|
762
|
+
|
|
763
|
+
@requires_backend
|
|
764
|
+
def test_inverse_square_pipeline_equivalence(self, trench_pointcloud):
|
|
765
|
+
"""Test inverse_square sampling produces equivalent results.
|
|
766
|
+
|
|
767
|
+
This is the recommended production workflow: auto-analyze + inverse_square sampling.
|
|
768
|
+
"""
|
|
769
|
+
xyz, normals = trench_pointcloud
|
|
770
|
+
|
|
771
|
+
# Shared analysis options
|
|
772
|
+
analysis_options = AutoAnalysisOptions(
|
|
773
|
+
flood_fill_output="samples",
|
|
774
|
+
flood_fill_sample_count=100,
|
|
775
|
+
voxel_regions_output="samples",
|
|
776
|
+
voxel_regions_sample_count=100,
|
|
777
|
+
idw_sample_count=100,
|
|
778
|
+
hull_filter_enabled=False,
|
|
779
|
+
)
|
|
780
|
+
|
|
781
|
+
# Run standalone with inverse_square
|
|
782
|
+
standalone_analyzer = SDFAnalyzer()
|
|
783
|
+
standalone_result = standalone_analyzer.analyze(
|
|
784
|
+
xyz=xyz,
|
|
785
|
+
normals=normals,
|
|
786
|
+
algorithms=["flood_fill", "voxel_regions", "normal_idw"],
|
|
787
|
+
options=analysis_options,
|
|
788
|
+
)
|
|
789
|
+
|
|
790
|
+
standalone_sampler = SDFSampler()
|
|
791
|
+
standalone_samples = standalone_sampler.generate(
|
|
792
|
+
xyz=xyz,
|
|
793
|
+
normals=normals,
|
|
794
|
+
constraints=standalone_result.constraints,
|
|
795
|
+
strategy="inverse_square",
|
|
796
|
+
total_samples=5000,
|
|
797
|
+
seed=42,
|
|
798
|
+
include_surface_points=False, # Test without surface points first
|
|
799
|
+
)
|
|
800
|
+
|
|
801
|
+
# Run backend with inverse_square
|
|
802
|
+
from sdf_labeler_api.config import Settings
|
|
803
|
+
from sdf_labeler_api.services.auto_analysis_service import AutoAnalysisService
|
|
804
|
+
from sdf_labeler_api.services.sampling_service import SamplingService
|
|
805
|
+
from sdf_labeler_api.services.project_service import ProjectService
|
|
806
|
+
from sdf_labeler_api.services.constraint_service import ConstraintService
|
|
807
|
+
from sdf_labeler_api.models.project import ProjectCreate
|
|
808
|
+
from sdf_labeler_api.models.samples import SampleGenerationRequest, SamplingStrategy
|
|
809
|
+
|
|
810
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
811
|
+
data_dir = Path(tmpdir)
|
|
812
|
+
|
|
813
|
+
import sdf_labeler_api.config as backend_config
|
|
814
|
+
original_settings = backend_config.settings
|
|
815
|
+
backend_config.settings = Settings(data_dir=data_dir)
|
|
816
|
+
|
|
817
|
+
try:
|
|
818
|
+
project_service = ProjectService(data_dir)
|
|
819
|
+
project = project_service.create(ProjectCreate(name="test"))
|
|
820
|
+
project_id = project.id
|
|
821
|
+
|
|
822
|
+
setup_backend_project(data_dir, project_id, xyz, normals)
|
|
823
|
+
|
|
824
|
+
# Analyze
|
|
825
|
+
backend_analysis = AutoAnalysisService(backend_config.settings)
|
|
826
|
+
backend_options = get_backend_options(analysis_options)
|
|
827
|
+
backend_result = asyncio.run(backend_analysis.analyze(
|
|
828
|
+
project_id=project_id,
|
|
829
|
+
algorithms=["flood_fill", "voxel_regions", "normal_idw"],
|
|
830
|
+
recompute=True,
|
|
831
|
+
options=backend_options,
|
|
832
|
+
))
|
|
833
|
+
|
|
834
|
+
# Add constraints to project
|
|
835
|
+
constraint_service = ConstraintService()
|
|
836
|
+
for gc in backend_result.generated_constraints:
|
|
837
|
+
constraint_service.add_from_dict(project_id, gc.constraint)
|
|
838
|
+
|
|
839
|
+
# Sample with inverse_square
|
|
840
|
+
sampling_service = SamplingService()
|
|
841
|
+
request = SampleGenerationRequest(
|
|
842
|
+
total_samples=5000,
|
|
843
|
+
strategy=SamplingStrategy.INVERSE_SQUARE,
|
|
844
|
+
seed=42,
|
|
845
|
+
)
|
|
846
|
+
backend_sample_result = sampling_service.generate(project_id, request)
|
|
847
|
+
backend_samples = backend_sample_result.samples
|
|
848
|
+
finally:
|
|
849
|
+
backend_config.settings = original_settings
|
|
850
|
+
|
|
851
|
+
# Compare results
|
|
852
|
+
print(f"\nInverse square pipeline comparison:")
|
|
853
|
+
print(f" Standalone constraints: {len(standalone_result.constraints)}")
|
|
854
|
+
print(f" Backend constraints: {len(backend_result.generated_constraints)}")
|
|
855
|
+
print(f" Standalone samples: {len(standalone_samples)}")
|
|
856
|
+
print(f" Backend samples: {len(backend_samples)}")
|
|
857
|
+
|
|
858
|
+
# Verify phi distribution is similar (more samples near 0)
|
|
859
|
+
standalone_near_surface = sum(1 for s in standalone_samples if abs(s.phi) < 0.1)
|
|
860
|
+
backend_near_surface = sum(1 for s in backend_samples if abs(s.phi) < 0.1)
|
|
861
|
+
|
|
862
|
+
print(f" Standalone near-surface (|phi|<0.1): {standalone_near_surface}")
|
|
863
|
+
print(f" Backend near-surface (|phi|<0.1): {backend_near_surface}")
|
|
864
|
+
|
|
865
|
+
# Both should have majority of samples near surface (inverse_square characteristic)
|
|
866
|
+
standalone_ratio = standalone_near_surface / len(standalone_samples) if standalone_samples else 0
|
|
867
|
+
backend_ratio = backend_near_surface / len(backend_samples) if backend_samples else 0
|
|
868
|
+
|
|
869
|
+
assert standalone_ratio > 0.3, f"Standalone should have >30% near-surface, got {standalone_ratio:.1%}"
|
|
870
|
+
assert backend_ratio > 0.3, f"Backend should have >30% near-surface, got {backend_ratio:.1%}"
|
|
871
|
+
|
|
872
|
+
# Ratios should be similar
|
|
873
|
+
if standalone_ratio > 0 and backend_ratio > 0:
|
|
874
|
+
ratio_diff = abs(standalone_ratio - backend_ratio)
|
|
875
|
+
assert ratio_diff < 0.2, (
|
|
876
|
+
f"Near-surface ratio difference too high: {ratio_diff:.1%} "
|
|
877
|
+
f"(standalone={standalone_ratio:.1%}, backend={backend_ratio:.1%})"
|
|
878
|
+
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|