py4dgeo 1.0.0__cp314-cp314t-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
py4dgeo/fallback.py ADDED
@@ -0,0 +1,159 @@
1
+ """Fallback implementations for C++ components of the M3C2 algorithms"""
2
+
3
+ from py4dgeo.m3c2 import M3C2
4
+
5
+ import numpy as np
6
+ import _py4dgeo
7
+
8
+
9
+ def radius_workingset_finder(params: _py4dgeo.WorkingSetFinderParameters) -> np.ndarray:
10
+ indices = params.epoch._radius_search(params.corepoint, params.radius)
11
+ return params.epoch._cloud[indices, :]
12
+
13
+
14
+ def cylinder_workingset_finder(
15
+ params: _py4dgeo.WorkingSetFinderParameters,
16
+ ) -> np.ndarray:
17
+ # Cut the cylinder into N segments, perform radius searches around the
18
+ # segment midpoints and create the union of indices
19
+ N = 1
20
+ max_cylinder_length = params.max_distance
21
+ if max_cylinder_length >= params.radius:
22
+ N = np.ceil(max_cylinder_length / params.radius)
23
+ else:
24
+ max_cylinder_length = params.radius
25
+
26
+ r_cyl = np.sqrt(
27
+ params.radius * params.radius
28
+ + max_cylinder_length * max_cylinder_length / (N * N)
29
+ )
30
+
31
+ slabs = []
32
+ for i in range(int(N)):
33
+ # Find indices around slab midpoint
34
+ qp = (
35
+ params.corepoint[0, :]
36
+ + (2 * i - N + 1) / N * max_cylinder_length * params.cylinder_axis[0, :]
37
+ )
38
+ indices = params.epoch._radius_search(qp, r_cyl)
39
+
40
+ # Gather the points from the point cloud
41
+ superset = params.epoch._cloud[indices, :]
42
+
43
+ # Calculate distance from the axis and the plane perpendicular to the axis
44
+ to_corepoint = superset - qp
45
+ to_corepoint_plane = to_corepoint.dot(params.cylinder_axis[0, :])
46
+ to_axis2 = np.sum(
47
+ np.square(
48
+ to_corepoint
49
+ - np.multiply(
50
+ to_corepoint_plane[:, np.newaxis], params.cylinder_axis[0, :]
51
+ )
52
+ ),
53
+ axis=1,
54
+ )
55
+
56
+ # Filter the points that are not within the slab
57
+ filtered = superset[
58
+ np.logical_and(
59
+ to_axis2 <= params.radius * params.radius,
60
+ np.abs(to_corepoint_plane) < max_cylinder_length / N,
61
+ )
62
+ ]
63
+
64
+ slabs.append(filtered)
65
+
66
+ return np.concatenate(tuple(slabs))
67
+
68
+
69
+ def mean_stddev_distance(
70
+ params: _py4dgeo.DistanceUncertaintyCalculationParameters,
71
+ ) -> tuple:
72
+ # Calculate distance
73
+ distance = params.normal[0, :].dot(
74
+ params.workingset2.mean(axis=0) - params.workingset1.mean(axis=0)
75
+ )
76
+
77
+ # Calculate variances
78
+ variance1 = params.normal @ np.cov(params.workingset1.T) @ params.normal.T
79
+ variance2 = params.normal @ np.cov(params.workingset2.T) @ params.normal.T
80
+
81
+ # The structured array that describes the full uncertainty
82
+ uncertainty = _py4dgeo.DistanceUncertainty(
83
+ lodetection=1.96
84
+ * (
85
+ np.sqrt(
86
+ variance1 / params.workingset1.shape[0]
87
+ + variance2 / params.workingset2.shape[0]
88
+ ).item()
89
+ + params.registration_error
90
+ ),
91
+ spread1=np.sqrt(variance1).item(),
92
+ num_samples1=params.workingset1.shape[0],
93
+ spread2=np.sqrt(variance2).item(),
94
+ num_samples2=params.workingset2.shape[0],
95
+ )
96
+
97
+ return distance, uncertainty
98
+
99
+
100
+ def average_pos(a, pos, div):
101
+ # This is an unfortunate helper, but numpy.percentile does not do
102
+ # the correct thing. It sometimes averages although we have an exact
103
+ # match for the position we are searching.
104
+ if len(a) % div == 0:
105
+ return (
106
+ a[int(np.floor(pos * len(a)))] + a[int(np.floor(pos * len(a))) - 1]
107
+ ) / 2.0
108
+ else:
109
+ return a[int(np.floor(pos * len(a)))]
110
+
111
+
112
+ def median_iqr_distance(
113
+ params: _py4dgeo.DistanceUncertaintyCalculationParameters,
114
+ ) -> tuple:
115
+ # Calculate distributions
116
+ dist1 = (params.workingset1 - params.corepoint[0, :]).dot(params.normal[0, :])
117
+ dist2 = (params.workingset2 - params.corepoint[0, :]).dot(params.normal[0, :])
118
+ dist1.sort()
119
+ dist2.sort()
120
+
121
+ median1 = average_pos(dist1, 0.5, 2)
122
+ median2 = average_pos(dist2, 0.5, 2)
123
+ iqr1 = average_pos(dist1, 0.75, 4) - average_pos(dist1, 0.25, 4)
124
+ iqr2 = average_pos(dist2, 0.75, 4) - average_pos(dist2, 0.25, 4)
125
+
126
+ # The structured array that describes the full uncertainty
127
+ uncertainty = _py4dgeo.DistanceUncertainty(
128
+ lodetection=1.96
129
+ * (
130
+ np.sqrt(
131
+ iqr1 * iqr1 / params.workingset1.shape[0]
132
+ + iqr2 * iqr2 / params.workingset2.shape[0]
133
+ )
134
+ + params.registration_error
135
+ ),
136
+ spread1=iqr1,
137
+ num_samples1=params.workingset1.shape[0],
138
+ spread2=iqr2,
139
+ num_samples2=params.workingset2.shape[0],
140
+ )
141
+
142
+ return median2 - median1, uncertainty
143
+
144
+
145
+ class PythonFallbackM3C2(M3C2):
146
+ """An implementation of M3C2 that makes use of Python fallback implementations"""
147
+
148
+ @property
149
+ def name(self):
150
+ return "M3C2 (Python Fallback)"
151
+
152
+ def callback_workingset_finder(self):
153
+ return cylinder_workingset_finder
154
+
155
+ def callback_distance_calculation(self):
156
+ if self.robust_aggr:
157
+ return median_iqr_distance
158
+ else:
159
+ return mean_stddev_distance
py4dgeo/logger.py ADDED
@@ -0,0 +1,82 @@
1
+ import contextlib
2
+ import logging
3
+ import sys
4
+ import time
5
+
6
+
7
+ def create_default_logger(filename=None):
8
+ # Create the logger instance
9
+ logger = logging.getLogger("py4dgeo")
10
+
11
+ # Close and remove existing handlers to avoid duplication and leaks
12
+ for handler in logger.handlers[:]:
13
+ try:
14
+ handler.close()
15
+ except Exception:
16
+ pass
17
+ logger.removeHandler(handler)
18
+
19
+ # Apply default for logfile name
20
+ if filename is None:
21
+ filename = "py4dgeo.log"
22
+
23
+ # We format messages including the date
24
+ _formatter = logging.Formatter(
25
+ "[%(asctime)s][%(levelname)s] %(message)s", "%Y-%m-%d %H:%M:%S"
26
+ )
27
+
28
+ # We use stdout for DEBUG and INFO messages
29
+ _stdoutandler = logging.StreamHandler(sys.stdout)
30
+ _stdoutandler.setLevel(logging.DEBUG)
31
+ _stdoutandler.addFilter(lambda r: r.levelno <= logging.INFO)
32
+ _stdoutandler.setFormatter(_formatter)
33
+ logger.addHandler(_stdoutandler)
34
+
35
+ # We use stderr for WARNING and ERROR messages
36
+ _stderrhandler = logging.StreamHandler(sys.stderr)
37
+ _stderrhandler.setLevel(logging.WARNING)
38
+ _stderrhandler.setFormatter(_formatter)
39
+ logger.addHandler(_stderrhandler)
40
+
41
+ # We additionally use a file that is automatically generated
42
+ _filehandler = logging.FileHandler(filename, mode="a", delay=True)
43
+ _filehandler.setLevel(logging.DEBUG)
44
+ _filehandler.setFormatter(_formatter)
45
+ logger.addHandler(_filehandler)
46
+
47
+ logger.setLevel(logging.INFO)
48
+
49
+ return logger
50
+
51
+
52
+ # Storage to keep the logger instance alive + initial creation
53
+ _logger = create_default_logger()
54
+
55
+
56
+ def set_py4dgeo_logfile(filename):
57
+ """Set the logfile used by py4dgeo
58
+
59
+ All log messages produced by py4dgeo are logged into this file
60
+ in addition to be logged to stdout/stderr. By default, that file
61
+ is called 'py4dgeo.log'.
62
+
63
+ :param filename:
64
+ The name of the logfile to use
65
+ :type filename: str
66
+ """
67
+ global _logger
68
+ _logger = create_default_logger(filename)
69
+
70
+
71
+ @contextlib.contextmanager
72
+ def logger_context(msg, level=logging.INFO):
73
+ # Log a message that we started the task described by message
74
+ logger = logging.getLogger("py4dgeo")
75
+ logger.log(level, f"Starting: {msg}")
76
+
77
+ # Measure time
78
+ start = time.perf_counter()
79
+ yield
80
+ duration = time.perf_counter() - start
81
+
82
+ logger.log(level, f"Finished in {duration:.4f}s: {msg}")
py4dgeo/m3c2.py ADDED
@@ -0,0 +1,246 @@
1
+ from ast import List
2
+ from py4dgeo.epoch import Epoch, as_epoch
3
+ from py4dgeo.util import (
4
+ as_double_precision,
5
+ MemoryPolicy,
6
+ Py4DGeoError,
7
+ make_contiguous,
8
+ memory_policy_is_minimum,
9
+ )
10
+
11
+ import abc
12
+ import logging
13
+ import numpy as np
14
+ import typing
15
+ import laspy
16
+
17
+ import _py4dgeo
18
+
19
+ logger = logging.getLogger("py4dgeo")
20
+
21
+
22
+ class M3C2LikeAlgorithm(abc.ABC):
23
+ def __init__(
24
+ self,
25
+ epochs: typing.Optional[typing.Tuple[Epoch, ...]] = None,
26
+ corepoints: typing.Optional[np.ndarray] = None,
27
+ cyl_radii: typing.Optional[List] = None,
28
+ cyl_radius: typing.Optional[float] = None,
29
+ max_distance: float = 0.0,
30
+ registration_error: float = 0.0,
31
+ robust_aggr: bool = False,
32
+ ):
33
+ self.epochs = epochs
34
+ self.corepoints = corepoints
35
+ self.cyl_radii = cyl_radii
36
+ self.cyl_radius = cyl_radius
37
+ self.max_distance = max_distance
38
+ self.registration_error = registration_error
39
+ self.robust_aggr = robust_aggr
40
+
41
+ @property
42
+ def corepoints(self):
43
+ return self._corepoints
44
+
45
+ @corepoints.setter
46
+ def corepoints(self, _corepoints):
47
+ if _corepoints is None:
48
+ self._corepoints = None
49
+ else:
50
+ if len(_corepoints.shape) != 2 or _corepoints.shape[1] != 3:
51
+ raise Py4DGeoError(
52
+ "Corepoints need to be given as an array of shape nx3"
53
+ )
54
+ self._corepoints = as_double_precision(make_contiguous(_corepoints))
55
+
56
+ @property
57
+ def epochs(self):
58
+ return self._epochs
59
+
60
+ @epochs.setter
61
+ def epochs(self, _epochs):
62
+ if _epochs is not None and len(_epochs) != 2:
63
+ raise Py4DGeoError("Exactly two epochs need to be given!")
64
+ self._epochs = _epochs
65
+
66
+ @property
67
+ def name(self):
68
+ raise NotImplementedError
69
+
70
+ def directions(self):
71
+ """The normal direction(s) to use for this algorithm."""
72
+ raise NotImplementedError
73
+
74
+ def calculate_distances(
75
+ self, epoch1, epoch2, searchtree: typing.Optional[str] = None
76
+ ):
77
+ """Calculate the distances between two epochs"""
78
+
79
+ if isinstance(self.cyl_radii, typing.Iterable):
80
+ logger.warning(
81
+ "DEPRECATION: use cyl_radius instead of cyl_radii. In a future version, cyl_radii will be removed!"
82
+ )
83
+ if len(self.cyl_radii) != 1:
84
+ raise Py4DGeoError(
85
+ "cyl_radii must be a list containing a single float!"
86
+ )
87
+ elif self.cyl_radius is None:
88
+ self.cyl_radius = self.cyl_radii[0]
89
+ self.cyl_radii = None
90
+
91
+ if self.cyl_radius is None:
92
+ raise Py4DGeoError(
93
+ f"{self.name} requires exactly one cylinder radius to be given as a float."
94
+ )
95
+
96
+ # Ensure appropriate trees are built
97
+ epoch1._validate_search_tree()
98
+ epoch2._validate_search_tree()
99
+
100
+ distances, uncertainties = _py4dgeo.compute_distances(
101
+ self.corepoints,
102
+ self.cyl_radius,
103
+ epoch1,
104
+ epoch2,
105
+ self.directions(),
106
+ self.max_distance,
107
+ self.registration_error,
108
+ self.callback_workingset_finder(),
109
+ self.callback_distance_calculation(),
110
+ )
111
+
112
+ return distances, uncertainties
113
+
114
+ def run(self):
115
+ """Main entry point for running the algorithm"""
116
+ return self.calculate_distances(self.epochs[0], self.epochs[1])
117
+
118
+ def callback_workingset_finder(self):
119
+ """The callback used to determine the point cloud subset around a corepoint"""
120
+ return _py4dgeo.cylinder_workingset_finder
121
+
122
+ def callback_distance_calculation(self):
123
+ """The callback used to calculate the distance between two point clouds"""
124
+ if self.robust_aggr:
125
+ return _py4dgeo.median_iqr_distance
126
+ else:
127
+ return _py4dgeo.mean_stddev_distance
128
+
129
+
130
+ class M3C2(M3C2LikeAlgorithm):
131
+ def __init__(
132
+ self,
133
+ normal_radii: typing.List[float] = None,
134
+ orientation_vector: np.ndarray = np.array([0, 0, 1]),
135
+ corepoint_normals: np.ndarray = None,
136
+ cloud_for_normals: Epoch = None,
137
+ **kwargs,
138
+ ):
139
+ self.normal_radii = normal_radii
140
+ self.orientation_vector = as_double_precision(
141
+ make_contiguous(orientation_vector), policy_check=False
142
+ )
143
+ self.cloud_for_normals = cloud_for_normals
144
+ self.corepoint_normals = corepoint_normals
145
+ self._directions_radii = None
146
+ super().__init__(**kwargs)
147
+
148
+ def directions(self):
149
+ # If we already have normals, we return them. This happens e.g. if the user
150
+ # explicitly provided them or if we already computed them in a previous run.
151
+ if self.corepoint_normals is not None:
152
+ # Make sure that the normals use double precision
153
+ self.corepoint_normals = as_double_precision(self.corepoint_normals)
154
+
155
+ # Assert that the normal array has the correct shape
156
+ if (
157
+ len(self.corepoint_normals.shape) != 2
158
+ or self.corepoint_normals.shape[0] not in (1, self.corepoints.shape[0])
159
+ or self.corepoint_normals.shape[1] != 3
160
+ ):
161
+ raise Py4DGeoError(
162
+ f"Incompative size of corepoint normal array {self.corepoint_normals.shape}, expected {self.corepoints.shape} or (1, 3)!"
163
+ )
164
+
165
+ return self.corepoint_normals
166
+
167
+ # This does not work in STRICT mode
168
+ if not memory_policy_is_minimum(MemoryPolicy.MINIMAL):
169
+ raise Py4DGeoError(
170
+ "M3C2 requires at least the MINIMUM memory policy level to compute multiscale normals"
171
+ )
172
+
173
+ # Find the correct epoch to use for normal calculation
174
+ normals_epoch = self.cloud_for_normals
175
+ if normals_epoch is None:
176
+ normals_epoch = self.epochs[0]
177
+ normals_epoch = as_epoch(normals_epoch)
178
+
179
+ # Ensure appropriate tree structures have been built
180
+ normals_epoch._validate_search_tree()
181
+
182
+ # Trigger the precomputation
183
+ self.corepoint_normals, self._directions_radii = (
184
+ _py4dgeo.compute_multiscale_directions(
185
+ normals_epoch,
186
+ self.corepoints,
187
+ self.normal_radii,
188
+ self.orientation_vector,
189
+ )
190
+ )
191
+
192
+ return self.corepoint_normals
193
+
194
+ def directions_radii(self):
195
+ if self._directions_radii is None:
196
+ raise ValueError(
197
+ "Radii are only available after calculating directions with py4dgeo."
198
+ )
199
+
200
+ return self._directions_radii
201
+
202
+ @property
203
+ def name(self):
204
+ return "M3C2"
205
+
206
+
207
+ def write_m3c2_results_to_las(
208
+ outfilepath: str, m3c2: M3C2LikeAlgorithm, attribute_dict: dict = {}
209
+ ):
210
+ """Save the corepoints, distances and other attributes to a given las filename
211
+
212
+ :param outfilepath:
213
+ The las file path to save the corepoints, distances and other attributes.
214
+ :type outfilepath: str
215
+ :param m3c2:
216
+ The M3C2LikeAlgorithm object.
217
+ :type m3c2: M3C2LikeAlgorithm
218
+ :param attribute_dict:
219
+ The dictionary of attributes which will be saved together with corepoints.
220
+ :type attribute_dict: dict
221
+ """
222
+ # Will utilize Epoch.save(), by creating epoch from m3c2 corepoints and attribute_dict
223
+ # to be the epoch additional_dimensions, write this epoch to las not a zip file.
224
+ outpoints = m3c2.corepoints
225
+ hdr = laspy.LasHeader(version="1.4", point_format=6)
226
+ hdr.x_scale = 0.00025
227
+ hdr.y_scale = 0.00025
228
+ hdr.z_scale = 0.00025
229
+ mean_extent = np.mean(outpoints, axis=0)
230
+ hdr.x_offset = int(mean_extent[0])
231
+ hdr.y_offset = int(mean_extent[1])
232
+ hdr.z_offset = int(mean_extent[2])
233
+
234
+ las = laspy.LasData(hdr)
235
+
236
+ las.x = outpoints[:, 0]
237
+ las.y = outpoints[:, 1]
238
+ las.z = outpoints[:, 2]
239
+ for key, vals in attribute_dict.items():
240
+ try:
241
+ las[key] = vals
242
+ except:
243
+ las.add_extra_dim(laspy.ExtraBytesParams(name=key, type=type(vals[0])))
244
+ las[key] = vals
245
+
246
+ las.write(outfilepath)