py4dgeo 0.7.0__cp39-cp39-win_amd64.whl → 0.8.0__cp39-cp39-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- _py4dgeo.cp39-win_amd64.pyd +0 -0
- py4dgeo/__init__.py +34 -0
- py4dgeo/epoch.py +79 -28
- py4dgeo/fallback.py +5 -5
- py4dgeo/m3c2.py +12 -9
- py4dgeo/m3c2ep.py +5 -4
- py4dgeo/pbm3c2.py +14 -14
- py4dgeo/py4dgeo_python.cpp +289 -18
- py4dgeo/segmentation.py +15 -7
- py4dgeo/util.py +25 -0
- py4dgeo-0.8.0.data/platlib/.load-order-py4dgeo-0.8.0 +2 -0
- py4dgeo-0.8.0.data/platlib/libomp-a12116ba72d1d6820407cf30be23da04.dll +0 -0
- py4dgeo-0.8.0.data/platlib/msvcp140-a4c2229bdc2a2a630acdc095b4d86008.dll +0 -0
- py4dgeo-0.8.0.dist-info/DELVEWHEEL +2 -0
- {py4dgeo-0.7.0.dist-info → py4dgeo-0.8.0.dist-info}/METADATA +15 -7
- py4dgeo-0.8.0.dist-info/RECORD +24 -0
- {py4dgeo-0.7.0.dist-info → py4dgeo-0.8.0.dist-info}/WHEEL +1 -1
- py4dgeo-0.7.0.dist-info/RECORD +0 -20
- {py4dgeo-0.7.0.dist-info → py4dgeo-0.8.0.dist-info}/entry_points.txt +0 -0
- {py4dgeo-0.7.0.dist-info → py4dgeo-0.8.0.dist-info}/licenses/COPYING.md +0 -0
- {py4dgeo-0.7.0.dist-info → py4dgeo-0.8.0.dist-info}/licenses/LICENSE.md +0 -0
_py4dgeo.cp39-win_amd64.pyd
CHANGED
|
Binary file
|
py4dgeo/__init__.py
CHANGED
|
@@ -1,3 +1,33 @@
|
|
|
1
|
+
"""""" # start delvewheel patch
|
|
2
|
+
def _delvewheel_patch_1_11_1():
|
|
3
|
+
import ctypes
|
|
4
|
+
import os
|
|
5
|
+
import platform
|
|
6
|
+
import sys
|
|
7
|
+
libs_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, '.'))
|
|
8
|
+
is_conda_cpython = platform.python_implementation() == 'CPython' and (hasattr(ctypes.pythonapi, 'Anaconda_GetVersion') or 'packaged by conda-forge' in sys.version)
|
|
9
|
+
if sys.version_info[:2] >= (3, 8) and not is_conda_cpython or sys.version_info[:2] >= (3, 10):
|
|
10
|
+
if os.path.isdir(libs_dir):
|
|
11
|
+
os.add_dll_directory(libs_dir)
|
|
12
|
+
else:
|
|
13
|
+
load_order_filepath = os.path.join(libs_dir, '.load-order-py4dgeo-0.8.0')
|
|
14
|
+
if os.path.isfile(load_order_filepath):
|
|
15
|
+
import ctypes.wintypes
|
|
16
|
+
with open(os.path.join(libs_dir, '.load-order-py4dgeo-0.8.0')) as file:
|
|
17
|
+
load_order = file.read().split()
|
|
18
|
+
kernel32 = ctypes.WinDLL('kernel32', use_last_error=True)
|
|
19
|
+
kernel32.LoadLibraryExW.restype = ctypes.wintypes.HMODULE
|
|
20
|
+
kernel32.LoadLibraryExW.argtypes = ctypes.wintypes.LPCWSTR, ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD
|
|
21
|
+
for lib in load_order:
|
|
22
|
+
lib_path = os.path.join(os.path.join(libs_dir, lib))
|
|
23
|
+
if os.path.isfile(lib_path) and not kernel32.LoadLibraryExW(lib_path, None, 8):
|
|
24
|
+
raise OSError('Error loading {}; {}'.format(lib, ctypes.FormatError(ctypes.get_last_error())))
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
_delvewheel_patch_1_11_1()
|
|
28
|
+
del _delvewheel_patch_1_11_1
|
|
29
|
+
# end delvewheel patch
|
|
30
|
+
|
|
1
31
|
from py4dgeo.logger import set_py4dgeo_logfile
|
|
2
32
|
from py4dgeo.cloudcompare import CloudCompareM3C2
|
|
3
33
|
from py4dgeo.epoch import (
|
|
@@ -7,6 +37,7 @@ from py4dgeo.epoch import (
|
|
|
7
37
|
save_epoch,
|
|
8
38
|
load_epoch,
|
|
9
39
|
)
|
|
40
|
+
from _py4dgeo import SearchTree
|
|
10
41
|
from py4dgeo.m3c2 import M3C2, write_m3c2_results_to_las
|
|
11
42
|
from py4dgeo.m3c2ep import M3C2EP
|
|
12
43
|
from py4dgeo.registration import (
|
|
@@ -27,6 +58,9 @@ from py4dgeo.util import (
|
|
|
27
58
|
set_memory_policy,
|
|
28
59
|
get_num_threads,
|
|
29
60
|
set_num_threads,
|
|
61
|
+
initialize_openmp_defaults,
|
|
30
62
|
)
|
|
31
63
|
|
|
64
|
+
initialize_openmp_defaults()
|
|
65
|
+
|
|
32
66
|
from py4dgeo.pbm3c2 import *
|
py4dgeo/epoch.py
CHANGED
|
@@ -115,6 +115,16 @@ class Epoch(_py4dgeo.Epoch):
|
|
|
115
115
|
"The KDTree of an Epoch cannot be changed after initialization."
|
|
116
116
|
)
|
|
117
117
|
|
|
118
|
+
@property
|
|
119
|
+
def octree(self):
|
|
120
|
+
return self._octree
|
|
121
|
+
|
|
122
|
+
@octree.setter
|
|
123
|
+
def octree(self, octree):
|
|
124
|
+
raise Py4DGeoError(
|
|
125
|
+
"The Octree of an Epoch cannot be changed after initialization."
|
|
126
|
+
)
|
|
127
|
+
|
|
118
128
|
@property
|
|
119
129
|
def normals(self):
|
|
120
130
|
# Maybe calculate normals
|
|
@@ -137,9 +147,7 @@ class Epoch(_py4dgeo.Epoch):
|
|
|
137
147
|
A vector to determine orientation of the normals. It should point "up".
|
|
138
148
|
"""
|
|
139
149
|
|
|
140
|
-
|
|
141
|
-
if self.kdtree.leaf_parameter() == 0:
|
|
142
|
-
self.build_kdtree()
|
|
150
|
+
self._validate_search_tree()
|
|
143
151
|
|
|
144
152
|
# Reuse the multiscale code with a single radius in order to
|
|
145
153
|
# avoid code duplication.
|
|
@@ -153,6 +161,18 @@ class Epoch(_py4dgeo.Epoch):
|
|
|
153
161
|
|
|
154
162
|
return self.normals
|
|
155
163
|
|
|
164
|
+
def _validate_search_tree(self):
|
|
165
|
+
""" "Check if the default search tree is built"""
|
|
166
|
+
|
|
167
|
+
tree_type = self.get_default_radius_search_tree()
|
|
168
|
+
|
|
169
|
+
if tree_type == _py4dgeo.SearchTree.KDTreeSearch:
|
|
170
|
+
if self.kdtree.leaf_parameter() == 0:
|
|
171
|
+
self.build_kdtree()
|
|
172
|
+
else:
|
|
173
|
+
if self.octree.get_number_of_points() == 0:
|
|
174
|
+
self.build_octree()
|
|
175
|
+
|
|
156
176
|
def normals_attachment(self, normals_array):
|
|
157
177
|
"""Attach normals to the epoch object
|
|
158
178
|
|
|
@@ -282,6 +302,12 @@ class Epoch(_py4dgeo.Epoch):
|
|
|
282
302
|
logger.info(f"Building KDTree structure with leaf parameter {leaf_size}")
|
|
283
303
|
self.kdtree.build_tree(leaf_size)
|
|
284
304
|
|
|
305
|
+
def build_octree(self):
|
|
306
|
+
"""Build the search octree index"""
|
|
307
|
+
if self.octree.get_number_of_points() == 0:
|
|
308
|
+
logger.info(f"Building Octree structure")
|
|
309
|
+
self.octree.build_tree()
|
|
310
|
+
|
|
285
311
|
def transform(
|
|
286
312
|
self,
|
|
287
313
|
transformation: typing.Optional[Transformation] = None,
|
|
@@ -343,6 +369,9 @@ class Epoch(_py4dgeo.Epoch):
|
|
|
343
369
|
# Invalidate the KDTree
|
|
344
370
|
self.kdtree.invalidate()
|
|
345
371
|
|
|
372
|
+
# Invalidate the Octree
|
|
373
|
+
self.octree.invalidate()
|
|
374
|
+
|
|
346
375
|
if self._normals is None:
|
|
347
376
|
self._normals = np.empty((1, 3)) # dummy array to avoid error in C++ code
|
|
348
377
|
# Apply the actual transformation as efficient C++
|
|
@@ -453,6 +482,11 @@ class Epoch(_py4dgeo.Epoch):
|
|
|
453
482
|
self.kdtree.save_index(kdtreefile)
|
|
454
483
|
zf.write(kdtreefile, arcname="kdtree")
|
|
455
484
|
|
|
485
|
+
octreefile = os.path.join(tmp_dir, "octree")
|
|
486
|
+
with open(octreefile, "w") as f:
|
|
487
|
+
self.octree.save_index(octreefile)
|
|
488
|
+
zf.write(octreefile, arcname="octree")
|
|
489
|
+
|
|
456
490
|
@staticmethod
|
|
457
491
|
def load(filename):
|
|
458
492
|
"""Construct an Epoch instance by loading it from a file
|
|
@@ -499,6 +533,15 @@ class Epoch(_py4dgeo.Epoch):
|
|
|
499
533
|
kdtreefile = zf.extract("kdtree", path=tmp_dir)
|
|
500
534
|
epoch.kdtree.load_index(kdtreefile)
|
|
501
535
|
|
|
536
|
+
# Restore the Octree object if present
|
|
537
|
+
try:
|
|
538
|
+
octreefile = zf.extract("octree", path=tmp_dir)
|
|
539
|
+
epoch.octree.load_index(octreefile)
|
|
540
|
+
except KeyError:
|
|
541
|
+
logger.warning(
|
|
542
|
+
"No octree found in the archive. Skipping octree loading."
|
|
543
|
+
)
|
|
544
|
+
|
|
502
545
|
# Read the transformation if it exists
|
|
503
546
|
if version >= 3:
|
|
504
547
|
trafofile = zf.extract("trafo.json", path=tmp_dir)
|
|
@@ -578,6 +621,7 @@ def read_from_xyz(
|
|
|
578
621
|
xyz_columns=[0, 1, 2],
|
|
579
622
|
normal_columns=[],
|
|
580
623
|
additional_dimensions={},
|
|
624
|
+
additional_dimensions_dtypes={},
|
|
581
625
|
**parse_opts,
|
|
582
626
|
):
|
|
583
627
|
"""Create an epoch from an xyz file
|
|
@@ -603,6 +647,11 @@ def read_from_xyz(
|
|
|
603
647
|
They will be read from the file and are accessible under their names from the
|
|
604
648
|
created Epoch objects.
|
|
605
649
|
Additional column indexes start with 3.
|
|
650
|
+
:type additional_dimensions: dict
|
|
651
|
+
:param additional_dimensions_dtypes:
|
|
652
|
+
A dictionary, mapping column names to numpy dtypes which should be used
|
|
653
|
+
in parsing the data.
|
|
654
|
+
:type additional_dimensions_dtypes: dict
|
|
606
655
|
:type parse_opts: dict
|
|
607
656
|
"""
|
|
608
657
|
|
|
@@ -633,32 +682,29 @@ def read_from_xyz(
|
|
|
633
682
|
|
|
634
683
|
try:
|
|
635
684
|
normals = np.genfromtxt(
|
|
636
|
-
filename,
|
|
685
|
+
filename,
|
|
686
|
+
dtype=np.float64,
|
|
687
|
+
usecols=normal_columns,
|
|
688
|
+
**parse_opts,
|
|
637
689
|
)
|
|
638
690
|
except ValueError:
|
|
639
691
|
raise Py4DGeoError("Malformed XYZ file")
|
|
640
692
|
|
|
641
693
|
# Potentially read additional_dimensions passed by the user
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
694
|
+
if additional_dimensions:
|
|
695
|
+
additional_columns = np.genfromtxt(
|
|
696
|
+
filename,
|
|
697
|
+
dtype=np.dtype(
|
|
698
|
+
[
|
|
699
|
+
(name, additional_dimensions_dtypes.get(name, np.float64))
|
|
700
|
+
for name in additional_dimensions.values()
|
|
701
|
+
]
|
|
702
|
+
),
|
|
703
|
+
usecols=additional_dimensions.keys(),
|
|
704
|
+
**parse_opts,
|
|
651
705
|
)
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
parsed_additionals = parsed_additionals.reshape(-1, 1)
|
|
655
|
-
except ValueError:
|
|
656
|
-
raise Py4DGeoError("Malformed XYZ file")
|
|
657
|
-
|
|
658
|
-
for i, col in enumerate(add_cols):
|
|
659
|
-
additional_columns[additional_dimensions[col]] = parsed_additionals[
|
|
660
|
-
:, i
|
|
661
|
-
].reshape(-1, 1)
|
|
706
|
+
else:
|
|
707
|
+
additional_columns = np.empty(shape=(cloud.shape[0], 1), dtype=[])
|
|
662
708
|
|
|
663
709
|
# Finalize the construction of the new epoch
|
|
664
710
|
new_epoch = Epoch(cloud, normals=normals, additional_dimensions=additional_columns)
|
|
@@ -675,6 +721,7 @@ def read_from_xyz(
|
|
|
675
721
|
xyz_columns=xyz_columns,
|
|
676
722
|
normal_columns=normal_columns,
|
|
677
723
|
additional_dimensions=additional_dimensions,
|
|
724
|
+
additional_dimensions_dtypes=additional_dimensions_dtypes,
|
|
678
725
|
**parse_opts,
|
|
679
726
|
)
|
|
680
727
|
)
|
|
@@ -725,16 +772,20 @@ def read_from_las(*filenames, normal_columns=[], additional_dimensions={}):
|
|
|
725
772
|
]
|
|
726
773
|
).transpose()
|
|
727
774
|
|
|
728
|
-
# set scan positions
|
|
729
775
|
# build additional_dimensions dtype structure
|
|
730
776
|
additional_columns = np.empty(
|
|
731
777
|
shape=(cloud.shape[0], 1),
|
|
732
|
-
dtype=np.dtype(
|
|
778
|
+
dtype=np.dtype(
|
|
779
|
+
[
|
|
780
|
+
(column_name, lasfile.points[column_id].dtype)
|
|
781
|
+
for column_id, column_name in additional_dimensions.items()
|
|
782
|
+
]
|
|
783
|
+
),
|
|
733
784
|
)
|
|
785
|
+
|
|
786
|
+
# and fill it with the data from the lasfile
|
|
734
787
|
for column_id, column_name in additional_dimensions.items():
|
|
735
|
-
additional_columns[column_name] =
|
|
736
|
-
lasfile.points[column_id], dtype=np.int32
|
|
737
|
-
).reshape(-1, 1)
|
|
788
|
+
additional_columns[column_name] = lasfile.points[column_id].reshape(-1, 1)
|
|
738
789
|
|
|
739
790
|
# Construct Epoch and go into recursion
|
|
740
791
|
new_epoch = Epoch(
|
py4dgeo/fallback.py
CHANGED
|
@@ -7,7 +7,7 @@ import _py4dgeo
|
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
def radius_workingset_finder(params: _py4dgeo.WorkingSetFinderParameters) -> np.ndarray:
|
|
10
|
-
indices = params.epoch.
|
|
10
|
+
indices = params.epoch._radius_search(params.corepoint, params.radius)
|
|
11
11
|
return params.epoch._cloud[indices, :]
|
|
12
12
|
|
|
13
13
|
|
|
@@ -35,7 +35,7 @@ def cylinder_workingset_finder(
|
|
|
35
35
|
params.corepoint[0, :]
|
|
36
36
|
+ (2 * i - N + 1) / N * max_cylinder_length * params.cylinder_axis[0, :]
|
|
37
37
|
)
|
|
38
|
-
indices = params.epoch.
|
|
38
|
+
indices = params.epoch._radius_search(qp, r_cyl)
|
|
39
39
|
|
|
40
40
|
# Gather the points from the point cloud
|
|
41
41
|
superset = params.epoch._cloud[indices, :]
|
|
@@ -85,12 +85,12 @@ def mean_stddev_distance(
|
|
|
85
85
|
np.sqrt(
|
|
86
86
|
variance1 / params.workingset1.shape[0]
|
|
87
87
|
+ variance2 / params.workingset2.shape[0]
|
|
88
|
-
)
|
|
88
|
+
).item()
|
|
89
89
|
+ params.registration_error
|
|
90
90
|
),
|
|
91
|
-
spread1=np.sqrt(variance1),
|
|
91
|
+
spread1=np.sqrt(variance1).item(),
|
|
92
92
|
num_samples1=params.workingset1.shape[0],
|
|
93
|
-
spread2=np.sqrt(variance2),
|
|
93
|
+
spread2=np.sqrt(variance2).item(),
|
|
94
94
|
num_samples2=params.workingset2.shape[0],
|
|
95
95
|
)
|
|
96
96
|
|
py4dgeo/m3c2.py
CHANGED
|
@@ -72,7 +72,9 @@ class M3C2LikeAlgorithm(abc.ABC):
|
|
|
72
72
|
"""The normal direction(s) to use for this algorithm."""
|
|
73
73
|
raise NotImplementedError
|
|
74
74
|
|
|
75
|
-
def calculate_distances(
|
|
75
|
+
def calculate_distances(
|
|
76
|
+
self, epoch1, epoch2, searchtree: typing.Optional[str] = None
|
|
77
|
+
):
|
|
76
78
|
"""Calculate the distances between two epochs"""
|
|
77
79
|
|
|
78
80
|
if isinstance(self.cyl_radii, typing.Iterable):
|
|
@@ -80,7 +82,9 @@ class M3C2LikeAlgorithm(abc.ABC):
|
|
|
80
82
|
"DEPRECATION: use cyl_radius instead of cyl_radii. In a future version, cyl_radii will be removed!"
|
|
81
83
|
)
|
|
82
84
|
if len(self.cyl_radii) != 1:
|
|
83
|
-
Py4DGeoError(
|
|
85
|
+
raise Py4DGeoError(
|
|
86
|
+
"cyl_radii must be a list containing a single float!"
|
|
87
|
+
)
|
|
84
88
|
elif self.cyl_radius is None:
|
|
85
89
|
self.cyl_radius = self.cyl_radii[0]
|
|
86
90
|
self.cyl_radii = None
|
|
@@ -90,11 +94,9 @@ class M3C2LikeAlgorithm(abc.ABC):
|
|
|
90
94
|
f"{self.name} requires exactly one cylinder radius to be given as a float."
|
|
91
95
|
)
|
|
92
96
|
|
|
93
|
-
# Ensure
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
epoch1.build_kdtree()
|
|
97
|
-
epoch2.build_kdtree()
|
|
97
|
+
# Ensure appropriate trees are built
|
|
98
|
+
epoch1._validate_search_tree()
|
|
99
|
+
epoch2._validate_search_tree()
|
|
98
100
|
|
|
99
101
|
distances, uncertainties = _py4dgeo.compute_distances(
|
|
100
102
|
self.corepoints,
|
|
@@ -174,8 +176,9 @@ class M3C2(M3C2LikeAlgorithm):
|
|
|
174
176
|
if normals_epoch is None:
|
|
175
177
|
normals_epoch = self.epochs[0]
|
|
176
178
|
normals_epoch = as_epoch(normals_epoch)
|
|
177
|
-
|
|
178
|
-
|
|
179
|
+
|
|
180
|
+
# Ensure appropriate tree structures have been built
|
|
181
|
+
normals_epoch._validate_search_tree()
|
|
179
182
|
|
|
180
183
|
# Trigger the precomputation
|
|
181
184
|
self.corepoint_normals, self._directions_radii = (
|
py4dgeo/m3c2ep.py
CHANGED
|
@@ -61,8 +61,9 @@ class M3C2EP(M3C2):
|
|
|
61
61
|
f"{self.name} requires exactly one cylinder radius to be given"
|
|
62
62
|
)
|
|
63
63
|
|
|
64
|
-
|
|
65
|
-
|
|
64
|
+
# Ensure appropriate trees are built
|
|
65
|
+
epoch1._validate_search_tree()
|
|
66
|
+
epoch2._validate_search_tree()
|
|
66
67
|
|
|
67
68
|
p1_coords = epoch1.cloud
|
|
68
69
|
p1_positions = epoch1.scanpos_id
|
|
@@ -837,13 +838,13 @@ def radius_search(epoch: Epoch, query: np.ndarray, radius: float):
|
|
|
837
838
|
:type radius: float
|
|
838
839
|
"""
|
|
839
840
|
if len(query.shape) == 1 and query.shape[0] == 3:
|
|
840
|
-
return [epoch.
|
|
841
|
+
return [epoch._radius_search(query, radius)]
|
|
841
842
|
|
|
842
843
|
if len(query.shape) == 2 and query.shape[1] == 3:
|
|
843
844
|
neighbors = []
|
|
844
845
|
for i in range(query.shape[0]):
|
|
845
846
|
q = query[i]
|
|
846
|
-
result = epoch.
|
|
847
|
+
result = epoch._radius_search(q, radius)
|
|
847
848
|
neighbors.append(result)
|
|
848
849
|
return neighbors
|
|
849
850
|
|
py4dgeo/pbm3c2.py
CHANGED
|
@@ -182,7 +182,7 @@ def _extract_from_additional_dimensions(
|
|
|
182
182
|
for column in column_names:
|
|
183
183
|
if column in epoch.additional_dimensions.dtype.names:
|
|
184
184
|
result = np.concatenate(
|
|
185
|
-
(result, epoch.additional_dimensions[column]), axis=1
|
|
185
|
+
(result, epoch.additional_dimensions[column].reshape(-1, 1)), axis=1
|
|
186
186
|
)
|
|
187
187
|
else:
|
|
188
188
|
logger.debug(
|
|
@@ -572,19 +572,19 @@ def generate_extended_y_from_prior_knowledge(
|
|
|
572
572
|
|
|
573
573
|
# generate kd-tree for each of the 2 sets
|
|
574
574
|
epoch0 = Epoch(epoch0_set.T)
|
|
575
|
-
epoch0.
|
|
575
|
+
epoch0._validate_search_tree()
|
|
576
576
|
epoch1 = Epoch(epoch1_set.T)
|
|
577
|
-
epoch1.
|
|
577
|
+
epoch1._validate_search_tree()
|
|
578
578
|
|
|
579
579
|
# search for the near segments and build the 'extended y'
|
|
580
580
|
for row in pairs_of_points:
|
|
581
581
|
seg_epoch0, seg_epoch1, label = np.split(ary=row, indices_or_sections=[3, 6])
|
|
582
582
|
label = label[0]
|
|
583
583
|
|
|
584
|
-
candidates_seg_epoch0 = epoch0.
|
|
584
|
+
candidates_seg_epoch0 = epoch0._radius_search(
|
|
585
585
|
seg_epoch0, threshold_max_distance
|
|
586
586
|
)
|
|
587
|
-
candidates_seg_epoch1 = epoch1.
|
|
587
|
+
candidates_seg_epoch1 = epoch1._radius_search(
|
|
588
588
|
seg_epoch1, threshold_max_distance
|
|
589
589
|
)
|
|
590
590
|
|
|
@@ -675,16 +675,16 @@ def add_no_corresponding_seg(
|
|
|
675
675
|
epoch1_set = segments[epoch1_mask][X_Y_Z_Columns]
|
|
676
676
|
|
|
677
677
|
epoch0_set = epoch0_set.T
|
|
678
|
-
# generate
|
|
678
|
+
# generate search tree
|
|
679
679
|
epoch1 = Epoch(epoch1_set.T)
|
|
680
|
-
epoch1.
|
|
680
|
+
epoch1._validate_search_tree()
|
|
681
681
|
|
|
682
682
|
# search for the near segments and build the 'extended y'
|
|
683
683
|
# for row in pairs_of_points:
|
|
684
684
|
for index, row in enumerate(epoch0_set):
|
|
685
685
|
index_seg_epoch0 = epoch0_index[index]
|
|
686
686
|
|
|
687
|
-
candidates_seg_epoch1 = epoch1.
|
|
687
|
+
candidates_seg_epoch1 = epoch1._radius_search(row, threshold_max_distance)
|
|
688
688
|
|
|
689
689
|
indexes_seg_epoch1 = epoch1_index[candidates_seg_epoch1]
|
|
690
690
|
|
|
@@ -925,7 +925,7 @@ class PerPointComputation(BaseTransformer):
|
|
|
925
925
|
]
|
|
926
926
|
|
|
927
927
|
for current_epoch in _epoch:
|
|
928
|
-
current_epoch.
|
|
928
|
+
current_epoch._validate_search_tree()
|
|
929
929
|
|
|
930
930
|
# add extra columns
|
|
931
931
|
# Eigenvalues( 3 columns ) |
|
|
@@ -940,7 +940,7 @@ class PerPointComputation(BaseTransformer):
|
|
|
940
940
|
lambda x: self._llsv_and_pca(
|
|
941
941
|
x,
|
|
942
942
|
_epoch[int(x[self.columns.EPOCH_ID_COLUMN])].cloud[
|
|
943
|
-
_epoch[int(x[self.columns.EPOCH_ID_COLUMN])].
|
|
943
|
+
_epoch[int(x[self.columns.EPOCH_ID_COLUMN])]._radius_search(
|
|
944
944
|
x[X_Y_Z_Columns], self.radius
|
|
945
945
|
)
|
|
946
946
|
],
|
|
@@ -1201,7 +1201,7 @@ class Segmentation(BaseTransformer):
|
|
|
1201
1201
|
]
|
|
1202
1202
|
|
|
1203
1203
|
for current_epoch in _epoch:
|
|
1204
|
-
current_epoch.
|
|
1204
|
+
current_epoch._validate_search_tree()
|
|
1205
1205
|
|
|
1206
1206
|
# sort by the "Lowest local surface variation"
|
|
1207
1207
|
sort_indx_epoch0 = X[mask_epoch0, self.columns.LLSV_COLUMN].argsort()
|
|
@@ -1224,7 +1224,7 @@ class Segmentation(BaseTransformer):
|
|
|
1224
1224
|
cumulative_distance_for_std_deviation = 0
|
|
1225
1225
|
nr_points_for_std_deviation = 0
|
|
1226
1226
|
|
|
1227
|
-
indx_kd_tree_list = _epoch[epoch_id].
|
|
1227
|
+
indx_kd_tree_list = _epoch[epoch_id]._radius_search(
|
|
1228
1228
|
X[indx_row, X_Y_Z_Columns], self.radius
|
|
1229
1229
|
)[: self.max_nr_points_neighborhood]
|
|
1230
1230
|
for indx_kd_tree in indx_kd_tree_list:
|
|
@@ -1975,13 +1975,13 @@ class ClassifierWrapper(ClassifierMixin, BaseEstimator):
|
|
|
1975
1975
|
:, [self.columns.X_COLUMN, self.columns.Y_COLUMN, self.columns.Z_COLUMN]
|
|
1976
1976
|
]
|
|
1977
1977
|
)
|
|
1978
|
-
self.epoch1_segments.
|
|
1978
|
+
self.epoch1_segments._validate_search_tree()
|
|
1979
1979
|
|
|
1980
1980
|
list_segments_pair = np.empty((0, epoch0_set.shape[1] + epoch1_set.shape[1]))
|
|
1981
1981
|
|
|
1982
1982
|
# this operation can be parallelized
|
|
1983
1983
|
for epoch0_set_row in epoch0_set:
|
|
1984
|
-
list_candidates = self.epoch1_segments.
|
|
1984
|
+
list_candidates = self.epoch1_segments._radius_search(
|
|
1985
1985
|
epoch0_set_row, self.neighborhood_search_radius
|
|
1986
1986
|
)
|
|
1987
1987
|
|
py4dgeo/py4dgeo_python.cpp
CHANGED
|
@@ -8,18 +8,27 @@
|
|
|
8
8
|
#include <omp.h>
|
|
9
9
|
#endif
|
|
10
10
|
|
|
11
|
-
#include
|
|
12
|
-
#include
|
|
13
|
-
#include
|
|
14
|
-
#include
|
|
15
|
-
#include
|
|
16
|
-
#include
|
|
17
|
-
#include
|
|
18
|
-
|
|
11
|
+
#include <py4dgeo/compute.hpp>
|
|
12
|
+
#include <py4dgeo/epoch.hpp>
|
|
13
|
+
#include <py4dgeo/kdtree.hpp>
|
|
14
|
+
#include <py4dgeo/octree.hpp>
|
|
15
|
+
#include <py4dgeo/py4dgeo.hpp>
|
|
16
|
+
#include <py4dgeo/pybind11_numpy_interop.hpp>
|
|
17
|
+
#include <py4dgeo/registration.hpp>
|
|
18
|
+
#include <py4dgeo/searchtree.hpp>
|
|
19
|
+
#include <py4dgeo/segmentation.hpp>
|
|
20
|
+
|
|
21
|
+
#include <algorithm>
|
|
22
|
+
#include <cstddef>
|
|
19
23
|
#include <fstream>
|
|
24
|
+
#include <ios>
|
|
25
|
+
#include <optional>
|
|
20
26
|
#include <sstream>
|
|
27
|
+
#include <stdexcept>
|
|
21
28
|
#include <string>
|
|
22
29
|
#include <tuple>
|
|
30
|
+
#include <utility>
|
|
31
|
+
#include <vector>
|
|
23
32
|
|
|
24
33
|
namespace py = pybind11;
|
|
25
34
|
|
|
@@ -37,6 +46,12 @@ PYBIND11_MODULE(_py4dgeo, m)
|
|
|
37
46
|
.value("RELAXED", MemoryPolicy::RELAXED)
|
|
38
47
|
.export_values();
|
|
39
48
|
|
|
49
|
+
// The enum class for the type of search tree
|
|
50
|
+
py::enum_<SearchTree>(m, "SearchTree")
|
|
51
|
+
.value("KDTreeSearch", SearchTree::KDTree)
|
|
52
|
+
.value("OctreeSearch", SearchTree::Octree)
|
|
53
|
+
.export_values();
|
|
54
|
+
|
|
40
55
|
// Register a numpy structured type for uncertainty calculation. This allows
|
|
41
56
|
// us to allocate memory in C++ and expose it as a structured numpy array in
|
|
42
57
|
// Python. The given names will be usable in Python.
|
|
@@ -64,9 +79,86 @@ PYBIND11_MODULE(_py4dgeo, m)
|
|
|
64
79
|
// garbage collected as long as the Epoch object is alive
|
|
65
80
|
epoch.def(py::init<EigenPointCloudRef>(), py::keep_alive<1, 2>());
|
|
66
81
|
|
|
67
|
-
// We can directly access the point cloud and the
|
|
82
|
+
// We can directly access the point cloud, the kdtree and the octree
|
|
68
83
|
epoch.def_readwrite("_cloud", &Epoch::cloud);
|
|
69
84
|
epoch.def_readwrite("_kdtree", &Epoch::kdtree);
|
|
85
|
+
epoch.def_readwrite("_octree", &Epoch::octree);
|
|
86
|
+
|
|
87
|
+
epoch.def(
|
|
88
|
+
"_radius_search",
|
|
89
|
+
[](Epoch& self, py::array_t<double> qp, double radius) {
|
|
90
|
+
// Ensure appropriate search tree has been built
|
|
91
|
+
if (Epoch::get_default_radius_search_tree() == SearchTree::KDTree) {
|
|
92
|
+
if (self.kdtree.get_leaf_parameter() == 0) {
|
|
93
|
+
self.kdtree.build_tree(10);
|
|
94
|
+
}
|
|
95
|
+
} else {
|
|
96
|
+
if (self.octree.get_number_of_points() == 0) {
|
|
97
|
+
self.octree.build_tree();
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Get a pointer for the query point
|
|
102
|
+
auto ptr = static_cast<const double*>(qp.request().ptr);
|
|
103
|
+
|
|
104
|
+
// Now perform the radius search
|
|
105
|
+
RadiusSearchResult result;
|
|
106
|
+
auto radius_search_func = get_radius_search_function(self, radius);
|
|
107
|
+
Eigen::Vector3d query_point(ptr[0], ptr[1], ptr[2]);
|
|
108
|
+
radius_search_func(query_point, result);
|
|
109
|
+
|
|
110
|
+
return as_pyarray(std::move(result));
|
|
111
|
+
},
|
|
112
|
+
py::arg("query_point"),
|
|
113
|
+
py::arg("radius"),
|
|
114
|
+
"Perform a radius search");
|
|
115
|
+
|
|
116
|
+
// Set and get default search trees
|
|
117
|
+
epoch.def_static(
|
|
118
|
+
"set_default_radius_search_tree",
|
|
119
|
+
[](const std::string& tree_name_input) {
|
|
120
|
+
std::string tree_name = tree_name_input;
|
|
121
|
+
std::transform(
|
|
122
|
+
tree_name.begin(), tree_name.end(), tree_name.begin(), ::tolower);
|
|
123
|
+
|
|
124
|
+
if (tree_name == "kdtree") {
|
|
125
|
+
Epoch::set_default_radius_search_tree(SearchTree::KDTree);
|
|
126
|
+
} else if (tree_name == "octree") {
|
|
127
|
+
Epoch::set_default_radius_search_tree(SearchTree::Octree);
|
|
128
|
+
} else {
|
|
129
|
+
throw std::invalid_argument("Unknown search tree type: " +
|
|
130
|
+
tree_name_input);
|
|
131
|
+
}
|
|
132
|
+
},
|
|
133
|
+
py::arg("tree_name"),
|
|
134
|
+
"Set the default search tree for radius searches (\"kdtree\" or "
|
|
135
|
+
"\"octree\")");
|
|
136
|
+
|
|
137
|
+
epoch.def_static(
|
|
138
|
+
"set_default_nearest_neighbor_tree",
|
|
139
|
+
[](const std::string& tree_name_input) {
|
|
140
|
+
std::string tree_name = tree_name_input;
|
|
141
|
+
std::transform(
|
|
142
|
+
tree_name.begin(), tree_name.end(), tree_name.begin(), ::tolower);
|
|
143
|
+
|
|
144
|
+
if (tree_name == "kdtree") {
|
|
145
|
+
Epoch::set_default_nearest_neighbor_tree(SearchTree::KDTree);
|
|
146
|
+
} else if (tree_name == "octree") {
|
|
147
|
+
Epoch::set_default_nearest_neighbor_tree(SearchTree::Octree);
|
|
148
|
+
} else {
|
|
149
|
+
throw std::invalid_argument("Unknown search tree type: " +
|
|
150
|
+
tree_name_input);
|
|
151
|
+
}
|
|
152
|
+
},
|
|
153
|
+
py::arg("tree_name"),
|
|
154
|
+
"Set the default search tree for nearest neighbor searches (\"kdtree\" or "
|
|
155
|
+
"\"octree\")");
|
|
156
|
+
|
|
157
|
+
epoch.def_static("get_default_radius_search_tree",
|
|
158
|
+
&Epoch::get_default_radius_search_tree);
|
|
159
|
+
|
|
160
|
+
epoch.def_static("get_default_nearest_neighbor_tree",
|
|
161
|
+
&Epoch::get_default_nearest_neighbor_tree);
|
|
70
162
|
|
|
71
163
|
// Pickling support for the Epoch class
|
|
72
164
|
epoch.def(py::pickle(
|
|
@@ -101,15 +193,17 @@ PYBIND11_MODULE(_py4dgeo, m)
|
|
|
101
193
|
|
|
102
194
|
// Allow building the KDTree structure
|
|
103
195
|
kdtree.def(
|
|
104
|
-
"build_tree", &KDTree::build_tree, "Trigger building the search tree");
|
|
196
|
+
"build_tree", &KDTree::build_tree, "Trigger building the search k-d tree");
|
|
105
197
|
|
|
106
198
|
// Allow invalidating the KDTree structure
|
|
107
|
-
kdtree.def(
|
|
199
|
+
kdtree.def(
|
|
200
|
+
"invalidate", &KDTree::invalidate, "Invalidate the search k-d tree");
|
|
108
201
|
|
|
109
|
-
// Give access to the leaf parameter that the tree has been built with
|
|
110
|
-
kdtree.def(
|
|
111
|
-
|
|
112
|
-
|
|
202
|
+
// Give access to the leaf parameter that the k-d tree has been built with
|
|
203
|
+
kdtree.def(
|
|
204
|
+
"leaf_parameter",
|
|
205
|
+
&KDTree::get_leaf_parameter,
|
|
206
|
+
"Retrieve the leaf parameter that the k-d tree has been built with.");
|
|
113
207
|
|
|
114
208
|
// Add all the radius search methods
|
|
115
209
|
kdtree.def(
|
|
@@ -118,7 +212,7 @@ PYBIND11_MODULE(_py4dgeo, m)
|
|
|
118
212
|
// Get a pointer for the query point
|
|
119
213
|
auto ptr = static_cast<const double*>(qp.request().ptr);
|
|
120
214
|
|
|
121
|
-
|
|
215
|
+
RadiusSearchResult result;
|
|
122
216
|
self.radius_search(ptr, radius, result);
|
|
123
217
|
|
|
124
218
|
return as_pyarray(std::move(result));
|
|
@@ -128,7 +222,7 @@ PYBIND11_MODULE(_py4dgeo, m)
|
|
|
128
222
|
kdtree.def(
|
|
129
223
|
"nearest_neighbors",
|
|
130
224
|
[](const KDTree& self, EigenPointCloudConstRef cloud, int k) {
|
|
131
|
-
|
|
225
|
+
NearestNeighborsDistanceResult result;
|
|
132
226
|
self.nearest_neighbors_with_distances(cloud, result, k);
|
|
133
227
|
|
|
134
228
|
py::array_t<long int> indices_array(result.size());
|
|
@@ -137,7 +231,7 @@ PYBIND11_MODULE(_py4dgeo, m)
|
|
|
137
231
|
auto indices_array_ptr = indices_array.mutable_data();
|
|
138
232
|
auto distances_array_ptr = distances_array.mutable_data();
|
|
139
233
|
|
|
140
|
-
for (size_t i = 0; i < result.size(); ++i) {
|
|
234
|
+
for (std::size_t i = 0; i < result.size(); ++i) {
|
|
141
235
|
*indices_array_ptr++ = result[i].first[result[i].first.size() - 1];
|
|
142
236
|
*distances_array_ptr++ = result[i].second[result[i].second.size() - 1];
|
|
143
237
|
}
|
|
@@ -158,6 +252,179 @@ PYBIND11_MODULE(_py4dgeo, m)
|
|
|
158
252
|
};
|
|
159
253
|
});
|
|
160
254
|
|
|
255
|
+
// Expose the Octree class
|
|
256
|
+
py::class_<Octree> octree(m, "Octree", py::buffer_protocol());
|
|
257
|
+
|
|
258
|
+
// Map __init__ to constructor
|
|
259
|
+
octree.def(py::init<>(&Octree::create));
|
|
260
|
+
|
|
261
|
+
// Allow updating Octree from a given file
|
|
262
|
+
octree.def("load_index", [](Octree& self, std::string filename) {
|
|
263
|
+
std::ifstream stream(filename, std::ios::binary | std::ios::in);
|
|
264
|
+
self.loadIndex(stream);
|
|
265
|
+
});
|
|
266
|
+
|
|
267
|
+
// Allow dumping Octree to a file
|
|
268
|
+
octree.def("save_index", [](const Octree& self, std::string filename) {
|
|
269
|
+
std::ofstream stream(filename, std::ios::binary | std::ios::out);
|
|
270
|
+
self.saveIndex(stream);
|
|
271
|
+
});
|
|
272
|
+
|
|
273
|
+
// Allow building the Octree structure
|
|
274
|
+
octree.def("build_tree",
|
|
275
|
+
&Octree::build_tree,
|
|
276
|
+
py::arg("force_cubic") = false,
|
|
277
|
+
py::arg("min_corner") = std::nullopt,
|
|
278
|
+
py::arg("max_corner") = std::nullopt,
|
|
279
|
+
"Trigger building the search octree");
|
|
280
|
+
|
|
281
|
+
// Allow invalidating the Octree structure
|
|
282
|
+
octree.def("invalidate", &Octree::invalidate, "Invalidate the search octree");
|
|
283
|
+
|
|
284
|
+
// Allow extraction of number of points
|
|
285
|
+
octree.def("get_number_of_points",
|
|
286
|
+
&Octree::get_number_of_points,
|
|
287
|
+
"Return the number of points in the associated cloud");
|
|
288
|
+
|
|
289
|
+
// Allow extraction of bounding box size
|
|
290
|
+
octree.def("get_box_size",
|
|
291
|
+
&Octree::get_box_size,
|
|
292
|
+
"Return the side length of the bounding box");
|
|
293
|
+
|
|
294
|
+
// Allow extraction of min point
|
|
295
|
+
octree.def("get_min_point",
|
|
296
|
+
&Octree::get_min_point,
|
|
297
|
+
"Return the minimum point of the bounding box");
|
|
298
|
+
|
|
299
|
+
// Allow extraction of max point
|
|
300
|
+
octree.def(
|
|
301
|
+
"get_max_point", &Octree::get_max_point, "Return 8-bit dilated integer");
|
|
302
|
+
|
|
303
|
+
// Allow extraction of cell sizes
|
|
304
|
+
octree.def("get_cell_size",
|
|
305
|
+
&Octree::get_cell_size,
|
|
306
|
+
"Return the size of cells at a level of depth");
|
|
307
|
+
|
|
308
|
+
// Allow extraction of number of occupied cells per level
|
|
309
|
+
octree.def("get_occupied_cells_per_level",
|
|
310
|
+
&Octree::get_occupied_cells_per_level,
|
|
311
|
+
"Return the number of occupied cells per level of depth");
|
|
312
|
+
|
|
313
|
+
// Allow extraction of maximum amount of points
|
|
314
|
+
octree.def("get_max_cell_population_per_level",
|
|
315
|
+
&Octree::get_max_cell_population_per_level,
|
|
316
|
+
"Return the maximum number of points per cell per level of depth");
|
|
317
|
+
|
|
318
|
+
// Allow extraction of average amount of points
|
|
319
|
+
octree.def("get_average_cell_population_per_level",
|
|
320
|
+
&Octree::get_average_cell_population_per_level,
|
|
321
|
+
"Return the average number of points per cell per level of depth");
|
|
322
|
+
|
|
323
|
+
// Allow extraction of std of amount of points
|
|
324
|
+
octree.def("get_std_cell_population_per_level",
|
|
325
|
+
&Octree::get_std_cell_population_per_level,
|
|
326
|
+
"Return the standard deviation of number of points per cell per "
|
|
327
|
+
"level of depth");
|
|
328
|
+
|
|
329
|
+
// Allow extraction of spatial keys
|
|
330
|
+
octree.def("get_spatial_keys",
|
|
331
|
+
&Octree::get_spatial_keys,
|
|
332
|
+
"Return the computed spatial keys");
|
|
333
|
+
|
|
334
|
+
// Allow extraction of point indices
|
|
335
|
+
octree.def("get_point_indices",
|
|
336
|
+
&Octree::get_point_indices,
|
|
337
|
+
"Return the sorted point indices");
|
|
338
|
+
|
|
339
|
+
// Allow cell index computation
|
|
340
|
+
octree.def(
|
|
341
|
+
"get_cell_index_start",
|
|
342
|
+
&Octree::get_cell_index_start,
|
|
343
|
+
"Return first the index of a cell in the sorted array of point indices "
|
|
344
|
+
"and point spatial keys");
|
|
345
|
+
|
|
346
|
+
// Allow cell index computation
|
|
347
|
+
octree.def(
|
|
348
|
+
"get_cell_index_end",
|
|
349
|
+
&Octree::get_cell_index_end,
|
|
350
|
+
"Return the last index of a cell in the sorted array of point indices "
|
|
351
|
+
"and point spatial keys");
|
|
352
|
+
|
|
353
|
+
// Allow extraction from points in cell
|
|
354
|
+
octree.def(
|
|
355
|
+
"get_points_indices_from_cells",
|
|
356
|
+
[](const Octree& self,
|
|
357
|
+
const Octree::KeyContainer& keys,
|
|
358
|
+
unsigned int level) {
|
|
359
|
+
RadiusSearchResult result;
|
|
360
|
+
std::size_t num_points =
|
|
361
|
+
self.get_points_indices_from_cells(keys, level, result);
|
|
362
|
+
|
|
363
|
+
return as_pyarray(std::move(result));
|
|
364
|
+
},
|
|
365
|
+
"Retrieve point indices and spatial keys for a given cell",
|
|
366
|
+
py::arg("spatial_keys"),
|
|
367
|
+
py::arg("level"));
|
|
368
|
+
|
|
369
|
+
// Allow extraction from points in cell
|
|
370
|
+
octree.def(
|
|
371
|
+
"get_cells_intersected_by_sphere",
|
|
372
|
+
[](const Octree& self,
|
|
373
|
+
const Eigen::Vector3d& query_point,
|
|
374
|
+
double radius,
|
|
375
|
+
unsigned int level) {
|
|
376
|
+
Octree::KeyContainer cells_inside;
|
|
377
|
+
Octree::KeyContainer cells_intersecting;
|
|
378
|
+
self.get_cells_intersected_by_sphere(
|
|
379
|
+
query_point, radius, level, cells_inside, cells_intersecting);
|
|
380
|
+
|
|
381
|
+
return py::make_tuple(as_pyarray(std::move(cells_inside)),
|
|
382
|
+
as_pyarray(std::move(cells_intersecting)));
|
|
383
|
+
},
|
|
384
|
+
"Retrieve the spatial keys of cells intersected by a sphere.",
|
|
385
|
+
py::arg("query_point"),
|
|
386
|
+
py::arg("radius"),
|
|
387
|
+
py::arg("level"));
|
|
388
|
+
|
|
389
|
+
// Allow computation of level of depth at which a radius search will be most
|
|
390
|
+
// efficient
|
|
391
|
+
octree.def("find_appropriate_level_for_radius_search",
|
|
392
|
+
&Octree::find_appropriate_level_for_radius_search,
|
|
393
|
+
"Return the level of depth at which a radius search will be most "
|
|
394
|
+
"efficient");
|
|
395
|
+
|
|
396
|
+
// Allow radius search with optional depth level specification
|
|
397
|
+
octree.def(
|
|
398
|
+
"radius_search",
|
|
399
|
+
[](const Octree& self,
|
|
400
|
+
Eigen::Ref<const Eigen::Vector3d> query_point,
|
|
401
|
+
double radius,
|
|
402
|
+
std::optional<unsigned int> level) {
|
|
403
|
+
unsigned int lvl =
|
|
404
|
+
level.value_or(self.find_appropriate_level_for_radius_search(radius));
|
|
405
|
+
|
|
406
|
+
RadiusSearchResult result;
|
|
407
|
+
self.radius_search(query_point, radius, lvl, result);
|
|
408
|
+
|
|
409
|
+
return as_pyarray(std::move(result));
|
|
410
|
+
},
|
|
411
|
+
"Search point in given radius!",
|
|
412
|
+
py::arg("query_point"),
|
|
413
|
+
py::arg("radius"),
|
|
414
|
+
py::arg("level") = std::nullopt);
|
|
415
|
+
|
|
416
|
+
// Pickling support for the Octree data structure
|
|
417
|
+
octree.def("__getstate__", [](const Octree&) {
|
|
418
|
+
// If a user pickles Octree itself, we end up redundantly storing
|
|
419
|
+
// the point cloud itself, because the Octree is only usable with the
|
|
420
|
+
// cloud (scipy does exactly the same). We solve the problem by asking
|
|
421
|
+
// users to pickle Epoch instead, which is the much cleaner solution.
|
|
422
|
+
throw std::runtime_error{
|
|
423
|
+
"Please pickle Epoch instead of Octree. Otherwise unpickled Octree does "
|
|
424
|
+
"not know the point cloud."
|
|
425
|
+
};
|
|
426
|
+
});
|
|
427
|
+
|
|
161
428
|
// Segment point cloud into a supervoxels
|
|
162
429
|
m.def("segment_pc_in_supervoxels",
|
|
163
430
|
[](Epoch& epoch,
|
|
@@ -271,6 +538,10 @@ PYBIND11_MODULE(_py4dgeo, m)
|
|
|
271
538
|
return std::make_tuple(std::move(result),
|
|
272
539
|
as_pyarray(std::move(used_radii)));
|
|
273
540
|
},
|
|
541
|
+
py::arg("epoch"),
|
|
542
|
+
py::arg("corepoints"),
|
|
543
|
+
py::arg("normal_radii"),
|
|
544
|
+
py::arg("orientation"),
|
|
274
545
|
"Compute M3C2 multiscale directions");
|
|
275
546
|
|
|
276
547
|
// Corresponence distances computation
|
py4dgeo/segmentation.py
CHANGED
|
@@ -130,8 +130,8 @@ class SpatiotemporalAnalysis:
|
|
|
130
130
|
# Ensure that we do have a timestamp on the epoch
|
|
131
131
|
epoch = check_epoch_timestamp(epoch)
|
|
132
132
|
|
|
133
|
-
# Ensure that the
|
|
134
|
-
epoch.
|
|
133
|
+
# Ensure that the tearch tree is built - no-op if triggered by the user
|
|
134
|
+
epoch._validate_search_tree()
|
|
135
135
|
|
|
136
136
|
# Write the reference epoch into the archive
|
|
137
137
|
with tempfile.TemporaryDirectory() as tmp_dir:
|
|
@@ -173,9 +173,9 @@ class SpatiotemporalAnalysis:
|
|
|
173
173
|
"Corepoints cannot be changed - please start a new analysis"
|
|
174
174
|
)
|
|
175
175
|
|
|
176
|
-
# Ensure that the corepoints are stored as an epoch and
|
|
176
|
+
# Ensure that the corepoints are stored as an epoch and its search trees are built
|
|
177
177
|
self._corepoints = as_epoch(_corepoints)
|
|
178
|
-
self._corepoints.
|
|
178
|
+
self._corepoints._validate_search_tree()
|
|
179
179
|
|
|
180
180
|
# Write the corepoints into the archive
|
|
181
181
|
with tempfile.TemporaryDirectory() as tmp_dir:
|
|
@@ -678,6 +678,7 @@ class RegionGrowingAlgorithmBase:
|
|
|
678
678
|
return self._analysis
|
|
679
679
|
|
|
680
680
|
def run(self, analysis, force=False):
|
|
681
|
+
_py4dgeo.Epoch.set_default_radius_search_tree("octree")
|
|
681
682
|
"""Calculate the _segmentation
|
|
682
683
|
|
|
683
684
|
:param analysis:
|
|
@@ -715,9 +716,9 @@ class RegionGrowingAlgorithmBase:
|
|
|
715
716
|
[]
|
|
716
717
|
) # TODO: test initializing this in the analysis class, see if it crashes instantly
|
|
717
718
|
|
|
718
|
-
# Get corepoints from M3C2 class and build a
|
|
719
|
+
# Get corepoints from M3C2 class and build a search tree on them
|
|
719
720
|
corepoints = as_epoch(analysis.corepoints)
|
|
720
|
-
corepoints.
|
|
721
|
+
corepoints._validate_search_tree()
|
|
721
722
|
|
|
722
723
|
# Calculate the list of seed points and sort them
|
|
723
724
|
seeds = analysis.seeds
|
|
@@ -931,6 +932,12 @@ class RegionGrowingAlgorithm(RegionGrowingAlgorithmBase):
|
|
|
931
932
|
# window_jump = 1
|
|
932
933
|
# window_penalty = 1.0
|
|
933
934
|
|
|
935
|
+
# Before starting the process, we check if the user has set a reasonable window width parameter
|
|
936
|
+
if self.window_width >= self.analysis.distances_for_compute.shape[1]:
|
|
937
|
+
raise Py4DGeoError(
|
|
938
|
+
"Window width cannot be larger than the length of the time series - please adapt parameter"
|
|
939
|
+
)
|
|
940
|
+
|
|
934
941
|
# The list of generated seeds
|
|
935
942
|
seeds = []
|
|
936
943
|
|
|
@@ -1044,7 +1051,8 @@ class RegionGrowingAlgorithm(RegionGrowingAlgorithmBase):
|
|
|
1044
1051
|
# The 4D-OBC algorithm sorts by similarity in the neighborhood
|
|
1045
1052
|
# of the seed.
|
|
1046
1053
|
def neighborhood_similarity(seed):
|
|
1047
|
-
|
|
1054
|
+
self.analysis.corepoints._validate_search_tree()
|
|
1055
|
+
neighbors = self.analysis.corepoints._radius_search(
|
|
1048
1056
|
self.analysis.corepoints.cloud[seed.index, :], self.neighborhood_radius
|
|
1049
1057
|
)
|
|
1050
1058
|
# if no neighbors are found make sure the algorithm continues its search but with a large dissimilarity
|
py4dgeo/util.py
CHANGED
|
@@ -3,9 +3,11 @@ import logging
|
|
|
3
3
|
import numpy as np
|
|
4
4
|
import os
|
|
5
5
|
import platform
|
|
6
|
+
import psutil
|
|
6
7
|
import pooch
|
|
7
8
|
import requests
|
|
8
9
|
import sys
|
|
10
|
+
import warnings
|
|
9
11
|
import xdg
|
|
10
12
|
|
|
11
13
|
from importlib import metadata
|
|
@@ -217,6 +219,16 @@ def set_num_threads(num_threads: int):
|
|
|
217
219
|
"type num_threads: int
|
|
218
220
|
"""
|
|
219
221
|
|
|
222
|
+
env_threads = os.environ.get("OMP_NUM_THREADS")
|
|
223
|
+
if env_threads:
|
|
224
|
+
try:
|
|
225
|
+
env_threads_int = int(env_threads)
|
|
226
|
+
if env_threads_int != num_threads:
|
|
227
|
+
warnings.warn(
|
|
228
|
+
f"OMP_NUM_THREADS environment variable is set to {env_threads_int}, but set_num_threads({num_threads}) was called. The environment variable may override this setting."
|
|
229
|
+
)
|
|
230
|
+
except ValueError:
|
|
231
|
+
raise Py4DGeoError(f"Invalid value for OMP_NUM_THREADS: '{env_threads}'")
|
|
220
232
|
try:
|
|
221
233
|
_py4dgeo.omp_set_num_threads(num_threads)
|
|
222
234
|
except AttributeError:
|
|
@@ -254,6 +266,19 @@ def is_iterable(obj):
|
|
|
254
266
|
return isinstance(obj, collections.abc.Iterable) and not isinstance(obj, str)
|
|
255
267
|
|
|
256
268
|
|
|
269
|
+
def initialize_openmp_defaults():
|
|
270
|
+
"""Set OpenMP environment variables for optimal performance on Windows with llvm OpenMP"""
|
|
271
|
+
|
|
272
|
+
# Only apply when using Windows
|
|
273
|
+
if platform.system() != "Windows":
|
|
274
|
+
return
|
|
275
|
+
|
|
276
|
+
# Only set if the user has not already
|
|
277
|
+
if "OMP_NUM_THREADS" not in os.environ:
|
|
278
|
+
num_cores = psutil.cpu_count(logical=False)
|
|
279
|
+
os.environ["OMP_NUM_THREADS"] = str(num_cores)
|
|
280
|
+
|
|
281
|
+
|
|
257
282
|
def copy_test_data_entrypoint():
|
|
258
283
|
# Define the target directory
|
|
259
284
|
target = os.getcwd()
|
|
Binary file
|
|
Binary file
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
Version: 1.11.1
|
|
2
|
+
Arguments: ['C:\\Users\\runneradmin\\AppData\\Local\\Temp\\cibw-run-x9rhbpfq\\cp39-win_amd64\\build\\venv\\Scripts\\delvewheel', 'repair', '-v', '-w', 'C:\\Users\\runneradmin\\AppData\\Local\\Temp\\cibw-run-x9rhbpfq\\cp39-win_amd64\\repaired_wheel', 'C:\\Users\\runneradmin\\AppData\\Local\\Temp\\cibw-run-x9rhbpfq\\cp39-win_amd64\\built_wheel\\py4dgeo-0.8.0-cp39-cp39-win_amd64.whl']
|
|
@@ -1,13 +1,14 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.2
|
|
2
2
|
Name: py4dgeo
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.8.0
|
|
4
4
|
Summary: Library for change detection in 4D point cloud data
|
|
5
5
|
Maintainer-Email: Dominic Kempf <ssc@iwr.uni-heidelberg.de>
|
|
6
6
|
License: Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
7
|
+
|
|
8
|
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
9
|
+
|
|
10
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
11
|
+
|
|
11
12
|
Classifier: Programming Language :: Python :: 3
|
|
12
13
|
Classifier: Programming Language :: C++
|
|
13
14
|
Classifier: Operating System :: OS Independent
|
|
@@ -25,6 +26,7 @@ Requires-Dist: seaborn
|
|
|
25
26
|
Requires-Dist: scikit-learn
|
|
26
27
|
Requires-Dist: vedo
|
|
27
28
|
Requires-Dist: xdg
|
|
29
|
+
Requires-Dist: psutil
|
|
28
30
|
Description-Content-Type: text/markdown
|
|
29
31
|
|
|
30
32
|
# Welcome to py4dgeo
|
|
@@ -84,7 +86,6 @@ Using py4dgeo requires the following software installed:
|
|
|
84
86
|
In order to build the package from source, the following tools are also needed.
|
|
85
87
|
|
|
86
88
|
* A C++17-compliant compiler
|
|
87
|
-
* CMake `>= 3.9`
|
|
88
89
|
* Doxygen (optional, documentation building is skipped if missing)
|
|
89
90
|
|
|
90
91
|
### Installing py4dgeo
|
|
@@ -123,6 +124,13 @@ its additional Python dependencies for testing and documentation building:
|
|
|
123
124
|
python -m pip install -r requirements-dev.txt
|
|
124
125
|
```
|
|
125
126
|
|
|
127
|
+
### 🪟 Windows-specific Notes
|
|
128
|
+
py4dgeo works on Windows, but optimal performance is typically achieved on Linux.
|
|
129
|
+
|
|
130
|
+
If you want to experiment with performance on your system, you can tune OpenMP using environment variables such as ```OMP_NUM_THREADS```, ```OMP_PROC_BIND```, and ```OMP_PLACES```.
|
|
131
|
+
|
|
132
|
+
The best settings depend on your system, so there is no single recommended configuration.
|
|
133
|
+
|
|
126
134
|
### Setting up py4dgeo using Docker
|
|
127
135
|
|
|
128
136
|
Additionally, `py4dgeo` provides a Docker image that allows to explore
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
_py4dgeo.cp39-win_amd64.pyd,sha256=l6uYg_LZFIbmu6Ia7VHatOfO4SOq56G9jOfiZB-Y1Uw,857600
|
|
2
|
+
py4dgeo/cloudcompare.py,sha256=hCe0YKZit8XVe98DsMKeUp-m-F2-avwicgS-IRjL4EQ,1111
|
|
3
|
+
py4dgeo/epoch.py,sha256=y-DR19Lcyc9fpvw33i3WKy6U16DnIvrdcK05xpV3lFw,31587
|
|
4
|
+
py4dgeo/fallback.py,sha256=5fL4OK8MIA7IMdgneRAVpmk8BWY_6ppYXIdtSvBMyDo,5240
|
|
5
|
+
py4dgeo/logger.py,sha256=5J7DenNkvq5GMOF_Vd2kHLHGP7os1u4EFBMbyGCAJJY,2314
|
|
6
|
+
py4dgeo/m3c2.py,sha256=TtrYiSWq7_U6uLBRQsEWplIgLRuRQoSmMffjn383zDQ,8451
|
|
7
|
+
py4dgeo/m3c2ep.py,sha256=E0gjEk0a75YzO4KUE598L4ElnHIY9K9eiBkaKxxKpsg,29202
|
|
8
|
+
py4dgeo/pbm3c2.py,sha256=Laa0v_9MUG1xEHlihsD-EV0BuPInR2L-9xQ0x5E2qDo,146237
|
|
9
|
+
py4dgeo/py4dgeo_python.cpp,sha256=8TDJ28s8h0F8fjRPbiVykV60CaKJDaksBbm9MXK7xfc,28783
|
|
10
|
+
py4dgeo/registration.py,sha256=aUdEas772buy31PVv-hA5Y3v-jGnnGeJB8Bo56eV4zg,17640
|
|
11
|
+
py4dgeo/segmentation.py,sha256=MeBCc2Xh7tVHUYMKhAFqBKUJh4B4274OKvxohZMTkzA,54067
|
|
12
|
+
py4dgeo/UpdateableZipFile.py,sha256=aZVdQgAc_M-EWFDIEVukgrYQUtEb5fRoRMCVxZqpggc,2770
|
|
13
|
+
py4dgeo/util.py,sha256=_yowPxDyPcwuQGcBlRWRcpQ38_4Lw-BCt4oJIf6UHUU,9554
|
|
14
|
+
py4dgeo/__init__.py,sha256=pEe0Zj9-YZtCMouP1IB95zvQSg9gA26erBg4H0jDUT4,2423
|
|
15
|
+
py4dgeo-0.8.0.data/platlib/.load-order-py4dgeo-0.8.0,sha256=97iLj2m3vbU46AbFSKrbq4hxZ1pPNOZvy0aoCOmxtvs,90
|
|
16
|
+
py4dgeo-0.8.0.data/platlib/libomp-a12116ba72d1d6820407cf30be23da04.dll,sha256=oSEWunLR1oIEB88wviPaBM551ruKcaXucXWcWh-qbxw,768000
|
|
17
|
+
py4dgeo-0.8.0.data/platlib/msvcp140-a4c2229bdc2a2a630acdc095b4d86008.dll,sha256=pMIim9wqKmMKzcCVtNhgCOXD47x3cxdDVPPaT1vrnN4,575056
|
|
18
|
+
py4dgeo-0.8.0.dist-info/DELVEWHEEL,sha256=RliAk1qCKddpFrnCE4-vuOzvBDfc4tkIpI2pU6VO3GA,401
|
|
19
|
+
py4dgeo-0.8.0.dist-info/entry_points.txt,sha256=S8EHFVRD4deFJ_N6ZWst9v_ukH5lGnZY-f8NHjYoIfk,83
|
|
20
|
+
py4dgeo-0.8.0.dist-info/METADATA,sha256=0r9xZELre0I1kpUVBdK7xBmkNYJbuzr9yt8cNVFCGrU,12697
|
|
21
|
+
py4dgeo-0.8.0.dist-info/RECORD,,
|
|
22
|
+
py4dgeo-0.8.0.dist-info/WHEEL,sha256=9tsL4JT94eZPTkcS3bNng2riasYJMxXndrO9CxUfJHs,104
|
|
23
|
+
py4dgeo-0.8.0.dist-info/licenses/COPYING.md,sha256=ZetvO_BrdyO2DkROtlUcvpd6rl1M8Ak69vyDyePCZN0,1330
|
|
24
|
+
py4dgeo-0.8.0.dist-info/licenses/LICENSE.md,sha256=Rza103klOvpFdEr8ed20dZErIT6Tm998uX2ai29wDl8,1028
|
py4dgeo-0.7.0.dist-info/RECORD
DELETED
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
_py4dgeo.cp39-win_amd64.pyd,sha256=zaN5NSVv7ZV0_Qp1FqcH6Jp5ysJS93HfsC-Yf4Muyd0,526848
|
|
2
|
-
py4dgeo/__init__.py,sha256=T_hK47cJOktmTxJrC03Ou-JGcdhs0IEKBcPXjUT4wJ8,767
|
|
3
|
-
py4dgeo/cloudcompare.py,sha256=hCe0YKZit8XVe98DsMKeUp-m-F2-avwicgS-IRjL4EQ,1111
|
|
4
|
-
py4dgeo/epoch.py,sha256=8Q9NTxb4OoiJx4IGESDXaDe4uwcgksofiMQLChsnndI,29914
|
|
5
|
-
py4dgeo/fallback.py,sha256=dZltN-wUSMq5GYnUgCz157fh_H5gNf2MF2SdcntDQmI,5233
|
|
6
|
-
py4dgeo/logger.py,sha256=5J7DenNkvq5GMOF_Vd2kHLHGP7os1u4EFBMbyGCAJJY,2314
|
|
7
|
-
py4dgeo/m3c2.py,sha256=I-T2rnsHgZtjOcN0ONmB0UB3TlaW5je4-S0lgxhNvjo,8475
|
|
8
|
-
py4dgeo/m3c2ep.py,sha256=vqudKR_QcV7z9bYMbGssVyJ2d4q5shgFhRue_WErJQ0,29150
|
|
9
|
-
py4dgeo/pbm3c2.py,sha256=Ia7yWym0M72NNz1L-N5WhDA_ZGA_YmWmee80Dvdjgqo,146200
|
|
10
|
-
py4dgeo/py4dgeo_python.cpp,sha256=J_csxFp1FF7nWHh8_z_BTJQRKbG4oPyRrrccJjoW4zw,18960
|
|
11
|
-
py4dgeo/registration.py,sha256=aUdEas772buy31PVv-hA5Y3v-jGnnGeJB8Bo56eV4zg,17640
|
|
12
|
-
py4dgeo/segmentation.py,sha256=pzSHuWyfHfiTDpmfy2onL3XTE_9dvLz7faVr0ddV0JI,53556
|
|
13
|
-
py4dgeo/UpdateableZipFile.py,sha256=aZVdQgAc_M-EWFDIEVukgrYQUtEb5fRoRMCVxZqpggc,2770
|
|
14
|
-
py4dgeo/util.py,sha256=dB27r6UAX1V4zy-bj-TbNxtjtwebvU7T1BCd3EIj59k,8553
|
|
15
|
-
py4dgeo-0.7.0.dist-info/METADATA,sha256=jhvHo83amSpEvxj_2hXuIsBJWb1ef_ni04l70Xn9I9Q,12290
|
|
16
|
-
py4dgeo-0.7.0.dist-info/WHEEL,sha256=s-IzttuPuPJp8zDv-jm-zGvTOCxdFCvjX9dIQ9zqnM8,104
|
|
17
|
-
py4dgeo-0.7.0.dist-info/entry_points.txt,sha256=S8EHFVRD4deFJ_N6ZWst9v_ukH5lGnZY-f8NHjYoIfk,83
|
|
18
|
-
py4dgeo-0.7.0.dist-info/licenses/COPYING.md,sha256=ZetvO_BrdyO2DkROtlUcvpd6rl1M8Ak69vyDyePCZN0,1330
|
|
19
|
-
py4dgeo-0.7.0.dist-info/licenses/LICENSE.md,sha256=Rza103klOvpFdEr8ed20dZErIT6Tm998uX2ai29wDl8,1028
|
|
20
|
-
py4dgeo-0.7.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|