py4dgeo 0.7.0__cp313-cp313-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
py4dgeo/fallback.py ADDED
@@ -0,0 +1,159 @@
1
+ """Fallback implementations for C++ components of the M3C2 algorithms"""
2
+
3
+ from py4dgeo.m3c2 import M3C2
4
+
5
+ import numpy as np
6
+ import _py4dgeo
7
+
8
+
9
+ def radius_workingset_finder(params: _py4dgeo.WorkingSetFinderParameters) -> np.ndarray:
10
+ indices = params.epoch._kdtree.radius_search(params.corepoint, params.radius)
11
+ return params.epoch._cloud[indices, :]
12
+
13
+
14
+ def cylinder_workingset_finder(
15
+ params: _py4dgeo.WorkingSetFinderParameters,
16
+ ) -> np.ndarray:
17
+ # Cut the cylinder into N segments, perform radius searches around the
18
+ # segment midpoints and create the union of indices
19
+ N = 1
20
+ max_cylinder_length = params.max_distance
21
+ if max_cylinder_length >= params.radius:
22
+ N = np.ceil(max_cylinder_length / params.radius)
23
+ else:
24
+ max_cylinder_length = params.radius
25
+
26
+ r_cyl = np.sqrt(
27
+ params.radius * params.radius
28
+ + max_cylinder_length * max_cylinder_length / (N * N)
29
+ )
30
+
31
+ slabs = []
32
+ for i in range(int(N)):
33
+ # Find indices around slab midpoint
34
+ qp = (
35
+ params.corepoint[0, :]
36
+ + (2 * i - N + 1) / N * max_cylinder_length * params.cylinder_axis[0, :]
37
+ )
38
+ indices = params.epoch._kdtree.radius_search(qp, r_cyl)
39
+
40
+ # Gather the points from the point cloud
41
+ superset = params.epoch._cloud[indices, :]
42
+
43
+ # Calculate distance from the axis and the plane perpendicular to the axis
44
+ to_corepoint = superset - qp
45
+ to_corepoint_plane = to_corepoint.dot(params.cylinder_axis[0, :])
46
+ to_axis2 = np.sum(
47
+ np.square(
48
+ to_corepoint
49
+ - np.multiply(
50
+ to_corepoint_plane[:, np.newaxis], params.cylinder_axis[0, :]
51
+ )
52
+ ),
53
+ axis=1,
54
+ )
55
+
56
+ # Filter the points that are not within the slab
57
+ filtered = superset[
58
+ np.logical_and(
59
+ to_axis2 <= params.radius * params.radius,
60
+ np.abs(to_corepoint_plane) < max_cylinder_length / N,
61
+ )
62
+ ]
63
+
64
+ slabs.append(filtered)
65
+
66
+ return np.concatenate(tuple(slabs))
67
+
68
+
69
+ def mean_stddev_distance(
70
+ params: _py4dgeo.DistanceUncertaintyCalculationParameters,
71
+ ) -> tuple:
72
+ # Calculate distance
73
+ distance = params.normal[0, :].dot(
74
+ params.workingset2.mean(axis=0) - params.workingset1.mean(axis=0)
75
+ )
76
+
77
+ # Calculate variances
78
+ variance1 = params.normal @ np.cov(params.workingset1.T) @ params.normal.T
79
+ variance2 = params.normal @ np.cov(params.workingset2.T) @ params.normal.T
80
+
81
+ # The structured array that describes the full uncertainty
82
+ uncertainty = _py4dgeo.DistanceUncertainty(
83
+ lodetection=1.96
84
+ * (
85
+ np.sqrt(
86
+ variance1 / params.workingset1.shape[0]
87
+ + variance2 / params.workingset2.shape[0]
88
+ )
89
+ + params.registration_error
90
+ ),
91
+ spread1=np.sqrt(variance1),
92
+ num_samples1=params.workingset1.shape[0],
93
+ spread2=np.sqrt(variance2),
94
+ num_samples2=params.workingset2.shape[0],
95
+ )
96
+
97
+ return distance, uncertainty
98
+
99
+
100
+ def average_pos(a, pos, div):
101
+ # This is an unfortunate helper, but numpy.percentile does not do
102
+ # the correct thing. It sometimes averages although we have an exact
103
+ # match for the position we are searching.
104
+ if len(a) % div == 0:
105
+ return (
106
+ a[int(np.floor(pos * len(a)))] + a[int(np.floor(pos * len(a))) - 1]
107
+ ) / 2.0
108
+ else:
109
+ return a[int(np.floor(pos * len(a)))]
110
+
111
+
112
+ def median_iqr_distance(
113
+ params: _py4dgeo.DistanceUncertaintyCalculationParameters,
114
+ ) -> tuple:
115
+ # Calculate distributions
116
+ dist1 = (params.workingset1 - params.corepoint[0, :]).dot(params.normal[0, :])
117
+ dist2 = (params.workingset2 - params.corepoint[0, :]).dot(params.normal[0, :])
118
+ dist1.sort()
119
+ dist2.sort()
120
+
121
+ median1 = average_pos(dist1, 0.5, 2)
122
+ median2 = average_pos(dist2, 0.5, 2)
123
+ iqr1 = average_pos(dist1, 0.75, 4) - average_pos(dist1, 0.25, 4)
124
+ iqr2 = average_pos(dist2, 0.75, 4) - average_pos(dist2, 0.25, 4)
125
+
126
+ # The structured array that describes the full uncertainty
127
+ uncertainty = _py4dgeo.DistanceUncertainty(
128
+ lodetection=1.96
129
+ * (
130
+ np.sqrt(
131
+ iqr1 * iqr1 / params.workingset1.shape[0]
132
+ + iqr2 * iqr2 / params.workingset2.shape[0]
133
+ )
134
+ + params.registration_error
135
+ ),
136
+ spread1=iqr1,
137
+ num_samples1=params.workingset1.shape[0],
138
+ spread2=iqr2,
139
+ num_samples2=params.workingset2.shape[0],
140
+ )
141
+
142
+ return median2 - median1, uncertainty
143
+
144
+
145
+ class PythonFallbackM3C2(M3C2):
146
+ """An implementation of M3C2 that makes use of Python fallback implementations"""
147
+
148
+ @property
149
+ def name(self):
150
+ return "M3C2 (Python Fallback)"
151
+
152
+ def callback_workingset_finder(self):
153
+ return cylinder_workingset_finder
154
+
155
+ def callback_distance_calculation(self):
156
+ if self.robust_aggr:
157
+ return median_iqr_distance
158
+ else:
159
+ return mean_stddev_distance
py4dgeo/logger.py ADDED
@@ -0,0 +1,77 @@
1
+ import contextlib
2
+ import logging
3
+ import sys
4
+ import time
5
+
6
+
7
+ def create_default_logger(filename=None):
8
+ # Create the logger instance
9
+ logger = logging.getLogger("py4dgeo")
10
+
11
+ # Reset the handlers to avoid handler duplication
12
+ logger.handlers.clear()
13
+
14
+ # Apply default for logfile name
15
+ if filename is None:
16
+ filename = "py4dgeo.log"
17
+
18
+ # We format messages including the date
19
+ _formatter = logging.Formatter(
20
+ "[%(asctime)s][%(levelname)s] %(message)s", "%Y-%m-%d %H:%M:%S"
21
+ )
22
+
23
+ # We use stdout for DEBUG and INFO messages
24
+ _stdoutandler = logging.StreamHandler(sys.stdout)
25
+ _stdoutandler.setLevel(logging.DEBUG)
26
+ _stdoutandler.addFilter(lambda r: r.levelno <= logging.INFO)
27
+ _stdoutandler.setFormatter(_formatter)
28
+ logger.addHandler(_stdoutandler)
29
+
30
+ # We use stderr for WARNING and ERROR messages
31
+ _stderrhandler = logging.StreamHandler(sys.stderr)
32
+ _stderrhandler.setLevel(logging.WARNING)
33
+ _stderrhandler.setFormatter(_formatter)
34
+ logger.addHandler(_stderrhandler)
35
+
36
+ # We additionally use a file that is automatically generated
37
+ _filehandler = logging.FileHandler(filename, mode="a", delay=True)
38
+ _filehandler.setLevel(logging.DEBUG)
39
+ _filehandler.setFormatter(_formatter)
40
+ logger.addHandler(_filehandler)
41
+
42
+ logger.setLevel(logging.INFO)
43
+
44
+ return logger
45
+
46
+
47
+ # Storage to keep the logger instance alive + initial creation
48
+ _logger = create_default_logger()
49
+
50
+
51
+ def set_py4dgeo_logfile(filename):
52
+ """Set the logfile used by py4dgeo
53
+
54
+ All log messages produced by py4dgeo are logged into this file
55
+ in addition to be logged to stdout/stderr. By default, that file
56
+ is called 'py4dgeo.log'.
57
+
58
+ :param filename:
59
+ The name of the logfile to use
60
+ :type filename: str
61
+ """
62
+ global _logger
63
+ _logger = create_default_logger(filename)
64
+
65
+
66
+ @contextlib.contextmanager
67
+ def logger_context(msg, level=logging.INFO):
68
+ # Log a message that we started the task described by message
69
+ logger = logging.getLogger("py4dgeo")
70
+ logger.log(level, f"Starting: {msg}")
71
+
72
+ # Measure time
73
+ start = time.perf_counter()
74
+ yield
75
+ duration = time.perf_counter() - start
76
+
77
+ logger.log(level, f"Finished in {duration:.4f}s: {msg}")
py4dgeo/m3c2.py ADDED
@@ -0,0 +1,244 @@
1
+ from ast import List
2
+ from py4dgeo.epoch import Epoch, as_epoch
3
+ from py4dgeo.util import (
4
+ as_double_precision,
5
+ MemoryPolicy,
6
+ Py4DGeoError,
7
+ make_contiguous,
8
+ memory_policy_is_minimum,
9
+ )
10
+
11
+ import abc
12
+ import logging
13
+ import numpy as np
14
+ import typing
15
+ import laspy
16
+
17
+ import _py4dgeo
18
+
19
+
20
+ logger = logging.getLogger("py4dgeo")
21
+
22
+
23
+ class M3C2LikeAlgorithm(abc.ABC):
24
+ def __init__(
25
+ self,
26
+ epochs: typing.Optional[typing.Tuple[Epoch, ...]] = None,
27
+ corepoints: typing.Optional[np.ndarray] = None,
28
+ cyl_radii: typing.Optional[List] = None,
29
+ cyl_radius: typing.Optional[float] = None,
30
+ max_distance: float = 0.0,
31
+ registration_error: float = 0.0,
32
+ robust_aggr: bool = False,
33
+ ):
34
+ self.epochs = epochs
35
+ self.corepoints = corepoints
36
+ self.cyl_radii = cyl_radii
37
+ self.cyl_radius = cyl_radius
38
+ self.max_distance = max_distance
39
+ self.registration_error = registration_error
40
+ self.robust_aggr = robust_aggr
41
+
42
+ @property
43
+ def corepoints(self):
44
+ return self._corepoints
45
+
46
+ @corepoints.setter
47
+ def corepoints(self, _corepoints):
48
+ if _corepoints is None:
49
+ self._corepoints = None
50
+ else:
51
+ if len(_corepoints.shape) != 2 or _corepoints.shape[1] != 3:
52
+ raise Py4DGeoError(
53
+ "Corepoints need to be given as an array of shape nx3"
54
+ )
55
+ self._corepoints = as_double_precision(make_contiguous(_corepoints))
56
+
57
+ @property
58
+ def epochs(self):
59
+ return self._epochs
60
+
61
+ @epochs.setter
62
+ def epochs(self, _epochs):
63
+ if _epochs is not None and len(_epochs) != 2:
64
+ raise Py4DGeoError("Exactly two epochs need to be given!")
65
+ self._epochs = _epochs
66
+
67
+ @property
68
+ def name(self):
69
+ raise NotImplementedError
70
+
71
+ def directions(self):
72
+ """The normal direction(s) to use for this algorithm."""
73
+ raise NotImplementedError
74
+
75
+ def calculate_distances(self, epoch1, epoch2):
76
+ """Calculate the distances between two epochs"""
77
+
78
+ if isinstance(self.cyl_radii, typing.Iterable):
79
+ logger.warning(
80
+ "DEPRECATION: use cyl_radius instead of cyl_radii. In a future version, cyl_radii will be removed!"
81
+ )
82
+ if len(self.cyl_radii) != 1:
83
+ Py4DGeoError("cyl_radii must be a list containing a single float!")
84
+ elif self.cyl_radius is None:
85
+ self.cyl_radius = self.cyl_radii[0]
86
+ self.cyl_radii = None
87
+
88
+ if self.cyl_radius is None:
89
+ raise Py4DGeoError(
90
+ f"{self.name} requires exactly one cylinder radius to be given as a float."
91
+ )
92
+
93
+ # Ensure that the KDTree data structures have been built. This is no-op
94
+ # if it has already been triggered before - e.g. by a user with a custom
95
+ # leaf cutoff parameter.
96
+ epoch1.build_kdtree()
97
+ epoch2.build_kdtree()
98
+
99
+ distances, uncertainties = _py4dgeo.compute_distances(
100
+ self.corepoints,
101
+ self.cyl_radius,
102
+ epoch1,
103
+ epoch2,
104
+ self.directions(),
105
+ self.max_distance,
106
+ self.registration_error,
107
+ self.callback_workingset_finder(),
108
+ self.callback_distance_calculation(),
109
+ )
110
+
111
+ return distances, uncertainties
112
+
113
+ def run(self):
114
+ """Main entry point for running the algorithm"""
115
+ return self.calculate_distances(self.epochs[0], self.epochs[1])
116
+
117
+ def callback_workingset_finder(self):
118
+ """The callback used to determine the point cloud subset around a corepoint"""
119
+ return _py4dgeo.cylinder_workingset_finder
120
+
121
+ def callback_distance_calculation(self):
122
+ """The callback used to calculate the distance between two point clouds"""
123
+ if self.robust_aggr:
124
+ return _py4dgeo.median_iqr_distance
125
+ else:
126
+ return _py4dgeo.mean_stddev_distance
127
+
128
+
129
+ class M3C2(M3C2LikeAlgorithm):
130
+ def __init__(
131
+ self,
132
+ normal_radii: typing.List[float] = None,
133
+ orientation_vector: np.ndarray = np.array([0, 0, 1]),
134
+ corepoint_normals: np.ndarray = None,
135
+ cloud_for_normals: Epoch = None,
136
+ **kwargs,
137
+ ):
138
+ self.normal_radii = normal_radii
139
+ self.orientation_vector = as_double_precision(
140
+ make_contiguous(orientation_vector), policy_check=False
141
+ )
142
+ self.cloud_for_normals = cloud_for_normals
143
+ self.corepoint_normals = corepoint_normals
144
+ self._directions_radii = None
145
+ super().__init__(**kwargs)
146
+
147
+ def directions(self):
148
+ # If we already have normals, we return them. This happens e.g. if the user
149
+ # explicitly provided them or if we already computed them in a previous run.
150
+ if self.corepoint_normals is not None:
151
+ # Make sure that the normals use double precision
152
+ self.corepoint_normals = as_double_precision(self.corepoint_normals)
153
+
154
+ # Assert that the normal array has the correct shape
155
+ if (
156
+ len(self.corepoint_normals.shape) != 2
157
+ or self.corepoint_normals.shape[0] not in (1, self.corepoints.shape[0])
158
+ or self.corepoint_normals.shape[1] != 3
159
+ ):
160
+ raise Py4DGeoError(
161
+ f"Incompative size of corepoint normal array {self.corepoint_normals.shape}, expected {self.corepoints.shape} or (1, 3)!"
162
+ )
163
+
164
+ return self.corepoint_normals
165
+
166
+ # This does not work in STRICT mode
167
+ if not memory_policy_is_minimum(MemoryPolicy.MINIMAL):
168
+ raise Py4DGeoError(
169
+ "M3C2 requires at least the MINIMUM memory policy level to compute multiscale normals"
170
+ )
171
+
172
+ # Find the correct epoch to use for normal calculation
173
+ normals_epoch = self.cloud_for_normals
174
+ if normals_epoch is None:
175
+ normals_epoch = self.epochs[0]
176
+ normals_epoch = as_epoch(normals_epoch)
177
+ # Ensure that the KDTree data structures have been built.
178
+ normals_epoch.build_kdtree()
179
+
180
+ # Trigger the precomputation
181
+ self.corepoint_normals, self._directions_radii = (
182
+ _py4dgeo.compute_multiscale_directions(
183
+ normals_epoch,
184
+ self.corepoints,
185
+ self.normal_radii,
186
+ self.orientation_vector,
187
+ )
188
+ )
189
+
190
+ return self.corepoint_normals
191
+
192
+ def directions_radii(self):
193
+ if self._directions_radii is None:
194
+ raise ValueError(
195
+ "Radii are only available after calculating directions with py4dgeo."
196
+ )
197
+
198
+ return self._directions_radii
199
+
200
+ @property
201
+ def name(self):
202
+ return "M3C2"
203
+
204
+
205
+ def write_m3c2_results_to_las(
206
+ outfilepath: str, m3c2: M3C2LikeAlgorithm, attribute_dict: dict = {}
207
+ ):
208
+ """Save the corepoints, distances and other attributes to a given las filename
209
+
210
+ :param outfilepath:
211
+ The las file path to save the corepoints, distances and other attributes.
212
+ :type outfilepath: str
213
+ :param m3c2:
214
+ The M3C2LikeAlgorithm object.
215
+ :type m3c2: M3C2LikeAlgorithm
216
+ :param attribute_dict:
217
+ The dictionary of attributes which will be saved together with corepoints.
218
+ :type attribute_dict: dict
219
+ """
220
+ # Will utilize Epoch.save(), by creating epoch from m3c2 corepoints and attribute_dict
221
+ # to be the epoch additional_dimensions, write this epoch to las not a zip file.
222
+ outpoints = m3c2.corepoints
223
+ hdr = laspy.LasHeader(version="1.4", point_format=6)
224
+ hdr.x_scale = 0.00025
225
+ hdr.y_scale = 0.00025
226
+ hdr.z_scale = 0.00025
227
+ mean_extent = np.mean(outpoints, axis=0)
228
+ hdr.x_offset = int(mean_extent[0])
229
+ hdr.y_offset = int(mean_extent[1])
230
+ hdr.z_offset = int(mean_extent[2])
231
+
232
+ las = laspy.LasData(hdr)
233
+
234
+ las.x = outpoints[:, 0]
235
+ las.y = outpoints[:, 1]
236
+ las.z = outpoints[:, 2]
237
+ for key, vals in attribute_dict.items():
238
+ try:
239
+ las[key] = vals
240
+ except:
241
+ las.add_extra_dim(laspy.ExtraBytesParams(name=key, type=type(vals[0])))
242
+ las[key] = vals
243
+
244
+ las.write(outfilepath)