py4dgeo 0.7.0__cp313-cp313-macosx_14_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- _py4dgeo.cpython-313-darwin.so +0 -0
- py4dgeo/.dylibs/libomp.dylib +0 -0
- py4dgeo/UpdateableZipFile.py +81 -0
- py4dgeo/__init__.py +32 -0
- py4dgeo/cloudcompare.py +32 -0
- py4dgeo/epoch.py +814 -0
- py4dgeo/fallback.py +159 -0
- py4dgeo/logger.py +77 -0
- py4dgeo/m3c2.py +244 -0
- py4dgeo/m3c2ep.py +855 -0
- py4dgeo/pbm3c2.py +3870 -0
- py4dgeo/py4dgeo_python.cpp +487 -0
- py4dgeo/registration.py +474 -0
- py4dgeo/segmentation.py +1280 -0
- py4dgeo/util.py +263 -0
- py4dgeo-0.7.0.dist-info/METADATA +200 -0
- py4dgeo-0.7.0.dist-info/RECORD +21 -0
- py4dgeo-0.7.0.dist-info/WHEEL +5 -0
- py4dgeo-0.7.0.dist-info/entry_points.txt +3 -0
- py4dgeo-0.7.0.dist-info/licenses/COPYING.md +17 -0
- py4dgeo-0.7.0.dist-info/licenses/LICENSE.md +5 -0
py4dgeo/epoch.py
ADDED
|
@@ -0,0 +1,814 @@
|
|
|
1
|
+
from py4dgeo.logger import logger_context
|
|
2
|
+
from py4dgeo.registration import Transformation
|
|
3
|
+
from py4dgeo.util import (
|
|
4
|
+
Py4DGeoError,
|
|
5
|
+
append_file_extension,
|
|
6
|
+
as_double_precision,
|
|
7
|
+
find_file,
|
|
8
|
+
make_contiguous,
|
|
9
|
+
is_iterable,
|
|
10
|
+
)
|
|
11
|
+
from numpy.lib.recfunctions import append_fields
|
|
12
|
+
|
|
13
|
+
import dateparser
|
|
14
|
+
import datetime
|
|
15
|
+
import json
|
|
16
|
+
import laspy
|
|
17
|
+
import logging
|
|
18
|
+
import numpy as np
|
|
19
|
+
import os
|
|
20
|
+
import tempfile
|
|
21
|
+
import typing
|
|
22
|
+
import zipfile
|
|
23
|
+
|
|
24
|
+
import _py4dgeo
|
|
25
|
+
|
|
26
|
+
logger = logging.getLogger("py4dgeo")
|
|
27
|
+
|
|
28
|
+
# This integer controls the versioning of the epoch file format. Whenever the
|
|
29
|
+
# format is changed, this version should be increased, so that py4dgeo can warn
|
|
30
|
+
# about incompatibilities of py4dgeo with loaded data. This version is intentionally
|
|
31
|
+
# different from py4dgeo's version, because not all releases of py4dgeo necessarily
|
|
32
|
+
# change the epoch file format and we want to be as compatible as possible.
|
|
33
|
+
PY4DGEO_EPOCH_FILE_FORMAT_VERSION = 4
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class NumpyArrayEncoder(json.JSONEncoder):
|
|
37
|
+
def default(self, obj):
|
|
38
|
+
if isinstance(obj, np.ndarray):
|
|
39
|
+
return obj.tolist()
|
|
40
|
+
return json.JSONEncoder.default(self, obj)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class Epoch(_py4dgeo.Epoch):
|
|
44
|
+
def __init__(
|
|
45
|
+
self,
|
|
46
|
+
cloud: np.ndarray,
|
|
47
|
+
normals: np.ndarray = None,
|
|
48
|
+
additional_dimensions: np.ndarray = None,
|
|
49
|
+
timestamp=None,
|
|
50
|
+
scanpos_info: dict = None,
|
|
51
|
+
):
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
:param cloud:
|
|
55
|
+
The point cloud array of shape (n, 3).
|
|
56
|
+
|
|
57
|
+
:param normals:
|
|
58
|
+
The point cloud normals of shape (n, 3) where n is the
|
|
59
|
+
same as the number of points in the point cloud.
|
|
60
|
+
|
|
61
|
+
:param additional_dimensions:
|
|
62
|
+
A numpy array of additional, per-point data in the point cloud. The
|
|
63
|
+
numpy data type is expected to be a structured dtype, so that the data
|
|
64
|
+
columns are accessible by their name.
|
|
65
|
+
|
|
66
|
+
:param timestamp:
|
|
67
|
+
The point cloud timestamp, default is None.
|
|
68
|
+
|
|
69
|
+
:param scanpos_info:
|
|
70
|
+
The point scan positions information, default is None..
|
|
71
|
+
"""
|
|
72
|
+
# Check the given array shapes
|
|
73
|
+
if len(cloud.shape) != 2 or cloud.shape[1] != 3:
|
|
74
|
+
raise Py4DGeoError("Clouds need to be an array of shape nx3")
|
|
75
|
+
|
|
76
|
+
# Make sure that cloud is double precision and contiguous in memory
|
|
77
|
+
cloud = as_double_precision(cloud)
|
|
78
|
+
cloud = make_contiguous(cloud)
|
|
79
|
+
|
|
80
|
+
# Set identity transformation
|
|
81
|
+
self._transformations = []
|
|
82
|
+
|
|
83
|
+
# Make sure that given normals are DP and contiguous as well
|
|
84
|
+
if normals is not None:
|
|
85
|
+
normals = make_contiguous(as_double_precision(normals))
|
|
86
|
+
self._normals = normals
|
|
87
|
+
|
|
88
|
+
# Set metadata properties
|
|
89
|
+
self.timestamp = timestamp
|
|
90
|
+
self.scanpos_info = scanpos_info
|
|
91
|
+
|
|
92
|
+
# Set the additional information (e.g. segment ids, normals, etc)
|
|
93
|
+
self.additional_dimensions = additional_dimensions
|
|
94
|
+
|
|
95
|
+
# Call base class constructor
|
|
96
|
+
super().__init__(cloud)
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def cloud(self):
|
|
100
|
+
return self._cloud
|
|
101
|
+
|
|
102
|
+
@cloud.setter
|
|
103
|
+
def cloud(self, cloud):
|
|
104
|
+
raise Py4DGeoError(
|
|
105
|
+
"The Epoch point cloud cannot be changed after initialization. Please construct a new Epoch, e.g. by slicing an existing one."
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
@property
|
|
109
|
+
def kdtree(self):
|
|
110
|
+
return self._kdtree
|
|
111
|
+
|
|
112
|
+
@kdtree.setter
|
|
113
|
+
def kdtree(self, kdtree):
|
|
114
|
+
raise Py4DGeoError(
|
|
115
|
+
"The KDTree of an Epoch cannot be changed after initialization."
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
@property
|
|
119
|
+
def normals(self):
|
|
120
|
+
# Maybe calculate normals
|
|
121
|
+
if self._normals is None:
|
|
122
|
+
raise Py4DGeoError(
|
|
123
|
+
"Normals for this Epoch have not been calculated! Please use Epoch.calculate_normals or load externally calculated normals."
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
return self._normals
|
|
127
|
+
|
|
128
|
+
def calculate_normals(
|
|
129
|
+
self, radius=1.0, orientation_vector: np.ndarray = np.array([0, 0, 1])
|
|
130
|
+
):
|
|
131
|
+
"""Calculate point cloud normals
|
|
132
|
+
|
|
133
|
+
:param radius:
|
|
134
|
+
The radius used to determine the neighborhood of a point.
|
|
135
|
+
|
|
136
|
+
:param orientation_vector:
|
|
137
|
+
A vector to determine orientation of the normals. It should point "up".
|
|
138
|
+
"""
|
|
139
|
+
|
|
140
|
+
# Ensure that the KDTree is built
|
|
141
|
+
if self.kdtree.leaf_parameter() == 0:
|
|
142
|
+
self.build_kdtree()
|
|
143
|
+
|
|
144
|
+
# Reuse the multiscale code with a single radius in order to
|
|
145
|
+
# avoid code duplication.
|
|
146
|
+
with logger_context("Calculating point cloud normals:"):
|
|
147
|
+
self._normals, _ = _py4dgeo.compute_multiscale_directions(
|
|
148
|
+
self,
|
|
149
|
+
self.cloud,
|
|
150
|
+
[radius],
|
|
151
|
+
orientation_vector,
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
return self.normals
|
|
155
|
+
|
|
156
|
+
def normals_attachment(self, normals_array):
|
|
157
|
+
"""Attach normals to the epoch object
|
|
158
|
+
|
|
159
|
+
:param normals:
|
|
160
|
+
The point cloud normals of shape (n, 3) where n is the
|
|
161
|
+
same as the number of points in the point cloud.
|
|
162
|
+
"""
|
|
163
|
+
|
|
164
|
+
if normals_array.shape == self.cloud.shape:
|
|
165
|
+
self._normals = normals_array
|
|
166
|
+
else:
|
|
167
|
+
raise ValueError("Normals cannot be added. Shape does not match.")
|
|
168
|
+
|
|
169
|
+
def copy(self):
|
|
170
|
+
"""Copy the epoch object"""
|
|
171
|
+
|
|
172
|
+
new_epoch = Epoch(
|
|
173
|
+
self.cloud.copy(),
|
|
174
|
+
normals=self.normals.copy() if self.normals is not None else None,
|
|
175
|
+
additional_dimensions=(
|
|
176
|
+
self.additional_dimensions.copy()
|
|
177
|
+
if self.additional_dimensions is not None
|
|
178
|
+
else None
|
|
179
|
+
),
|
|
180
|
+
timestamp=self.timestamp,
|
|
181
|
+
scanpos_info=(
|
|
182
|
+
self.scanpos_info.copy() if self.scanpos_info is not None else None
|
|
183
|
+
),
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
return new_epoch
|
|
187
|
+
|
|
188
|
+
def __getitem__(self, ind):
|
|
189
|
+
"""Slice the epoch in order to e.g. downsample it.
|
|
190
|
+
|
|
191
|
+
Creates a copy of the epoch.
|
|
192
|
+
"""
|
|
193
|
+
|
|
194
|
+
return Epoch(
|
|
195
|
+
self.cloud[ind],
|
|
196
|
+
normals=self.normals[ind] if self.normals is not None else None,
|
|
197
|
+
additional_dimensions=(
|
|
198
|
+
self.additional_dimensions[ind]
|
|
199
|
+
if self.additional_dimensions is not None
|
|
200
|
+
else None
|
|
201
|
+
),
|
|
202
|
+
**self.metadata,
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
@property
|
|
206
|
+
def timestamp(self):
|
|
207
|
+
return self._timestamp
|
|
208
|
+
|
|
209
|
+
@timestamp.setter
|
|
210
|
+
def timestamp(self, timestamp):
|
|
211
|
+
self._timestamp = normalize_timestamp(timestamp)
|
|
212
|
+
|
|
213
|
+
@property
|
|
214
|
+
def scanpos_info(self):
|
|
215
|
+
return self._scanpos_info
|
|
216
|
+
|
|
217
|
+
@scanpos_info.setter
|
|
218
|
+
def scanpos_info(self, scanpos_info):
|
|
219
|
+
if isinstance(scanpos_info, list):
|
|
220
|
+
self._scanpos_info = scanpos_info
|
|
221
|
+
elif isinstance(scanpos_info, dict):
|
|
222
|
+
self._scanpos_info = scan_positions_info_from_dict(scanpos_info)
|
|
223
|
+
else:
|
|
224
|
+
self._scanpos_info = None
|
|
225
|
+
|
|
226
|
+
@property
|
|
227
|
+
def scanpos_id(self):
|
|
228
|
+
return (
|
|
229
|
+
self.additional_dimensions["scanpos_id"]
|
|
230
|
+
.reshape(self.cloud.shape[0])
|
|
231
|
+
.astype(np.int32)
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
@scanpos_id.setter
|
|
235
|
+
def scanpos_id(self, scanpos_id):
|
|
236
|
+
if self.additional_dimensions is None:
|
|
237
|
+
additional_columns = np.empty(
|
|
238
|
+
shape=(self.cloud.shape[0], 1),
|
|
239
|
+
dtype=np.dtype([("scanpos_id", "<i4")]),
|
|
240
|
+
)
|
|
241
|
+
additional_columns["scanpos_id"] = np.array(
|
|
242
|
+
scanpos_id, dtype=np.int32
|
|
243
|
+
).reshape(-1, 1)
|
|
244
|
+
self.additional_dimensions = additional_columns
|
|
245
|
+
else:
|
|
246
|
+
scanpos_id = np.array(scanpos_id, dtype=np.int32)
|
|
247
|
+
new_additional_dimensions = append_fields(
|
|
248
|
+
self.additional_dimensions, "scanpos_id", scanpos_id, usemask=False
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
self.additional_dimensions = new_additional_dimensions
|
|
252
|
+
|
|
253
|
+
@property
|
|
254
|
+
def metadata(self):
|
|
255
|
+
"""Provide the metadata of this epoch as a Python dictionary
|
|
256
|
+
|
|
257
|
+
The return value of this property only makes use of Python built-in
|
|
258
|
+
data structures such that it can e.g. be serialized using the JSON
|
|
259
|
+
module. Also, the returned values are understood by :ref:`Epoch.__init__`
|
|
260
|
+
such that you can do :code:`Epoch(cloud, **other.metadata)`.
|
|
261
|
+
"""
|
|
262
|
+
|
|
263
|
+
return {
|
|
264
|
+
"timestamp": None if self.timestamp is None else str(self.timestamp),
|
|
265
|
+
"scanpos_info": None if self.scanpos_info is None else self.scanpos_info,
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
def build_kdtree(self, leaf_size=10, force_rebuild=False):
|
|
269
|
+
"""Build the search tree index
|
|
270
|
+
|
|
271
|
+
:param leaf_size:
|
|
272
|
+
An internal optimization parameter of the search tree data structure.
|
|
273
|
+
The algorithm uses a bruteforce search on subtrees of size below the
|
|
274
|
+
given threshold. Increasing this value speeds up search tree build time,
|
|
275
|
+
but slows down query times.
|
|
276
|
+
:type leaf_size: int
|
|
277
|
+
:param force_rebuild:
|
|
278
|
+
Rebuild the search tree even if it was already built before.
|
|
279
|
+
:type force_rebuild: bool
|
|
280
|
+
"""
|
|
281
|
+
if self.kdtree.leaf_parameter() == 0 or force_rebuild:
|
|
282
|
+
logger.info(f"Building KDTree structure with leaf parameter {leaf_size}")
|
|
283
|
+
self.kdtree.build_tree(leaf_size)
|
|
284
|
+
|
|
285
|
+
def transform(
|
|
286
|
+
self,
|
|
287
|
+
transformation: typing.Optional[Transformation] = None,
|
|
288
|
+
affine_transformation: typing.Optional[np.ndarray] = None,
|
|
289
|
+
rotation: typing.Optional[np.ndarray] = None,
|
|
290
|
+
translation: typing.Optional[np.ndarray] = None,
|
|
291
|
+
reduction_point: typing.Optional[np.ndarray] = None,
|
|
292
|
+
):
|
|
293
|
+
"""Transform the epoch with an affine transformation
|
|
294
|
+
|
|
295
|
+
:param transformation:
|
|
296
|
+
A Transformation object that describes the transformation to apply.
|
|
297
|
+
If this argument is given, the other arguments are ignored.
|
|
298
|
+
This parameter is typically used if the transformation was calculated
|
|
299
|
+
by py4dgeo itself.
|
|
300
|
+
:type transformation: Transformation
|
|
301
|
+
:param affine_transformation:
|
|
302
|
+
A 4x4 or 3x4 matrix representing the affine transformation. Given
|
|
303
|
+
as a numpy array. If this argument is given, the rotation and
|
|
304
|
+
translation arguments are ignored.
|
|
305
|
+
:type transformation: np.ndarray
|
|
306
|
+
:param rotation:
|
|
307
|
+
A 3x3 matrix specifying the rotation to apply
|
|
308
|
+
:type rotation: np.ndarray
|
|
309
|
+
:param translation:
|
|
310
|
+
A vector specifying the translation to apply
|
|
311
|
+
:type translation: np.ndarray
|
|
312
|
+
:param reduction_point:
|
|
313
|
+
A translation vector to apply before applying rotation and scaling.
|
|
314
|
+
This is used to increase the numerical accuracy of transformation.
|
|
315
|
+
If a transformation is given, this argument is ignored.
|
|
316
|
+
:type reduction_point: np.ndarray
|
|
317
|
+
"""
|
|
318
|
+
|
|
319
|
+
# Extract the affine transformation and reduction point from the given transformation
|
|
320
|
+
if transformation is not None:
|
|
321
|
+
assert isinstance(transformation, Transformation)
|
|
322
|
+
affine_transformation = transformation.affine_transformation
|
|
323
|
+
reduction_point = transformation.reduction_point
|
|
324
|
+
|
|
325
|
+
# Build the transformation if it is not explicitly given
|
|
326
|
+
if affine_transformation is None:
|
|
327
|
+
trafo = np.identity(4, dtype=np.float64)
|
|
328
|
+
trafo[:3, :3] = rotation
|
|
329
|
+
trafo[:3, 3] = translation
|
|
330
|
+
else:
|
|
331
|
+
# If it was given, make a copy and potentially resize it
|
|
332
|
+
trafo = affine_transformation.copy()
|
|
333
|
+
if trafo.shape[0] == 3:
|
|
334
|
+
trafo.resize((4, 4), refcheck=False)
|
|
335
|
+
trafo[3, 3] = 1
|
|
336
|
+
|
|
337
|
+
if reduction_point is None:
|
|
338
|
+
reduction_point = np.array([0, 0, 0], dtype=np.float64)
|
|
339
|
+
|
|
340
|
+
# Ensure contiguous DP memory
|
|
341
|
+
trafo = as_double_precision(make_contiguous(trafo))
|
|
342
|
+
|
|
343
|
+
# Invalidate the KDTree
|
|
344
|
+
self.kdtree.invalidate()
|
|
345
|
+
|
|
346
|
+
if self._normals is None:
|
|
347
|
+
self._normals = np.empty((1, 3)) # dummy array to avoid error in C++ code
|
|
348
|
+
# Apply the actual transformation as efficient C++
|
|
349
|
+
_py4dgeo.transform_pointcloud_inplace(
|
|
350
|
+
self.cloud, trafo, reduction_point, self._normals
|
|
351
|
+
)
|
|
352
|
+
|
|
353
|
+
# Store the transformation
|
|
354
|
+
self._transformations.append(
|
|
355
|
+
Transformation(affine_transformation=trafo, reduction_point=reduction_point)
|
|
356
|
+
)
|
|
357
|
+
|
|
358
|
+
@property
|
|
359
|
+
def transformation(self):
|
|
360
|
+
"""Access the affine transformations that were applied to this epoch
|
|
361
|
+
|
|
362
|
+
In order to set this property please use the transform method instead,
|
|
363
|
+
which will make sure to also apply the transformation.
|
|
364
|
+
|
|
365
|
+
:returns:
|
|
366
|
+
Returns a list of applied transformations. These are given
|
|
367
|
+
as a tuple of a 4x4 matrix defining the affine transformation
|
|
368
|
+
and the reduction point used when applying it.
|
|
369
|
+
"""
|
|
370
|
+
return self._transformations
|
|
371
|
+
|
|
372
|
+
def save(self, filename):
|
|
373
|
+
"""Save this epoch to a file
|
|
374
|
+
|
|
375
|
+
:param filename:
|
|
376
|
+
The filename to save the epoch in.
|
|
377
|
+
:type filename: str
|
|
378
|
+
"""
|
|
379
|
+
|
|
380
|
+
# Ensure that we have a file extension
|
|
381
|
+
filename = append_file_extension(filename, "zip")
|
|
382
|
+
logger.info(f"Saving epoch to file '{filename}'")
|
|
383
|
+
|
|
384
|
+
# Use a temporary directory when creating files
|
|
385
|
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
|
386
|
+
# Create the final archive
|
|
387
|
+
with zipfile.ZipFile(
|
|
388
|
+
filename, mode="w", compression=zipfile.ZIP_BZIP2
|
|
389
|
+
) as zf:
|
|
390
|
+
# Write the epoch file format version number
|
|
391
|
+
zf.writestr("EPOCH_FILE_FORMAT", str(PY4DGEO_EPOCH_FILE_FORMAT_VERSION))
|
|
392
|
+
|
|
393
|
+
# Write the metadata dictionary into a json file
|
|
394
|
+
metadatafile = os.path.join(tmp_dir, "metadata.json")
|
|
395
|
+
with open(metadatafile, "w") as f:
|
|
396
|
+
json.dump(self.metadata, f)
|
|
397
|
+
zf.write(metadatafile, arcname="metadata.json")
|
|
398
|
+
|
|
399
|
+
# Write the transformation into a file
|
|
400
|
+
trafofile = os.path.join(tmp_dir, "trafo.json")
|
|
401
|
+
with open(trafofile, "w") as f:
|
|
402
|
+
json.dump(
|
|
403
|
+
[t.__dict__ for t in self._transformations],
|
|
404
|
+
f,
|
|
405
|
+
cls=NumpyArrayEncoder,
|
|
406
|
+
)
|
|
407
|
+
zf.write(trafofile, arcname="trafo.json")
|
|
408
|
+
|
|
409
|
+
# Write the actual point cloud array using laspy - LAZ compression
|
|
410
|
+
# is far better than any compression numpy + zipfile can do.
|
|
411
|
+
cloudfile = os.path.join(tmp_dir, "cloud.laz")
|
|
412
|
+
hdr = laspy.LasHeader(version="1.4", point_format=6)
|
|
413
|
+
hdr.x_scale = 0.00025
|
|
414
|
+
hdr.y_scale = 0.00025
|
|
415
|
+
hdr.z_scale = 0.00025
|
|
416
|
+
mean_extent = np.mean(self.cloud, axis=0)
|
|
417
|
+
hdr.x_offset = int(mean_extent[0])
|
|
418
|
+
hdr.y_offset = int(mean_extent[1])
|
|
419
|
+
hdr.z_offset = int(mean_extent[2])
|
|
420
|
+
lasfile = laspy.LasData(hdr)
|
|
421
|
+
lasfile.x = self.cloud[:, 0]
|
|
422
|
+
lasfile.y = self.cloud[:, 1]
|
|
423
|
+
lasfile.z = self.cloud[:, 2]
|
|
424
|
+
|
|
425
|
+
# define dimensions for normals below:
|
|
426
|
+
if self._normals is not None:
|
|
427
|
+
lasfile.add_extra_dim(
|
|
428
|
+
laspy.ExtraBytesParams(
|
|
429
|
+
name="NormalX", type="f8", description="X axis of normals"
|
|
430
|
+
)
|
|
431
|
+
)
|
|
432
|
+
lasfile.add_extra_dim(
|
|
433
|
+
laspy.ExtraBytesParams(
|
|
434
|
+
name="NormalY", type="f8", description="Y axis of normals"
|
|
435
|
+
)
|
|
436
|
+
)
|
|
437
|
+
lasfile.add_extra_dim(
|
|
438
|
+
laspy.ExtraBytesParams(
|
|
439
|
+
name="NormalZ", type="f8", description="Z axis of normals"
|
|
440
|
+
)
|
|
441
|
+
)
|
|
442
|
+
lasfile.NormalX = self.normals[:, 0]
|
|
443
|
+
lasfile.NormalY = self.normals[:, 1]
|
|
444
|
+
lasfile.NormalZ = self.normals[:, 2]
|
|
445
|
+
else:
|
|
446
|
+
logger.info("Saving a file without normals.")
|
|
447
|
+
|
|
448
|
+
lasfile.write(cloudfile)
|
|
449
|
+
zf.write(cloudfile, arcname="cloud.laz")
|
|
450
|
+
|
|
451
|
+
kdtreefile = os.path.join(tmp_dir, "kdtree")
|
|
452
|
+
with open(kdtreefile, "w") as f:
|
|
453
|
+
self.kdtree.save_index(kdtreefile)
|
|
454
|
+
zf.write(kdtreefile, arcname="kdtree")
|
|
455
|
+
|
|
456
|
+
@staticmethod
|
|
457
|
+
def load(filename):
|
|
458
|
+
"""Construct an Epoch instance by loading it from a file
|
|
459
|
+
|
|
460
|
+
:param filename:
|
|
461
|
+
The filename to load the epoch from.
|
|
462
|
+
:type filename: str
|
|
463
|
+
"""
|
|
464
|
+
|
|
465
|
+
# Ensure that we have a file extension
|
|
466
|
+
filename = append_file_extension(filename, "zip")
|
|
467
|
+
logger.info(f"Restoring epoch from file '{filename}'")
|
|
468
|
+
|
|
469
|
+
# Use temporary directory for extraction of files
|
|
470
|
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
|
471
|
+
# Open the ZIP archive
|
|
472
|
+
with zipfile.ZipFile(filename, mode="r") as zf:
|
|
473
|
+
# Read the epoch file version number and compare to current
|
|
474
|
+
version = int(zf.read("EPOCH_FILE_FORMAT").decode())
|
|
475
|
+
if version > PY4DGEO_EPOCH_FILE_FORMAT_VERSION:
|
|
476
|
+
raise Py4DGeoError(
|
|
477
|
+
"Epoch file format not known - please update py4dgeo!"
|
|
478
|
+
)
|
|
479
|
+
|
|
480
|
+
# Read the metadata JSON file
|
|
481
|
+
metadatafile = zf.extract("metadata.json", path=tmp_dir)
|
|
482
|
+
with open(metadatafile, "r") as f:
|
|
483
|
+
metadata = json.load(f)
|
|
484
|
+
|
|
485
|
+
# Restore the point cloud itself
|
|
486
|
+
cloudfile = zf.extract("cloud.laz", path=tmp_dir)
|
|
487
|
+
lasfile = laspy.read(cloudfile)
|
|
488
|
+
cloud = np.vstack((lasfile.x, lasfile.y, lasfile.z)).transpose()
|
|
489
|
+
try:
|
|
490
|
+
normals = np.vstack(
|
|
491
|
+
(lasfile.NormalX, lasfile.NormalY, lasfile.NormalZ)
|
|
492
|
+
).transpose()
|
|
493
|
+
except AttributeError:
|
|
494
|
+
normals = None
|
|
495
|
+
# Construct the epoch object
|
|
496
|
+
epoch = Epoch(cloud, normals=normals, **metadata)
|
|
497
|
+
|
|
498
|
+
# Restore the KDTree object
|
|
499
|
+
kdtreefile = zf.extract("kdtree", path=tmp_dir)
|
|
500
|
+
epoch.kdtree.load_index(kdtreefile)
|
|
501
|
+
|
|
502
|
+
# Read the transformation if it exists
|
|
503
|
+
if version >= 3:
|
|
504
|
+
trafofile = zf.extract("trafo.json", path=tmp_dir)
|
|
505
|
+
with open(trafofile, "r") as f:
|
|
506
|
+
trafo = json.load(f)
|
|
507
|
+
epoch._transformations = [Transformation(**t) for t in trafo]
|
|
508
|
+
|
|
509
|
+
return epoch
|
|
510
|
+
|
|
511
|
+
def __getstate__(self):
|
|
512
|
+
return (
|
|
513
|
+
PY4DGEO_EPOCH_FILE_FORMAT_VERSION,
|
|
514
|
+
self.metadata,
|
|
515
|
+
_py4dgeo.Epoch.__getstate__(self),
|
|
516
|
+
)
|
|
517
|
+
|
|
518
|
+
def __setstate__(self, state):
|
|
519
|
+
v, metadata, base = state
|
|
520
|
+
|
|
521
|
+
if v != PY4DGEO_EPOCH_FILE_FORMAT_VERSION:
|
|
522
|
+
raise Py4DGeoError("Epoch file format is out of date!")
|
|
523
|
+
|
|
524
|
+
# Restore metadata
|
|
525
|
+
for k, v in metadata.items():
|
|
526
|
+
setattr(self, k, v)
|
|
527
|
+
|
|
528
|
+
# Set the base class object
|
|
529
|
+
_py4dgeo.Epoch.__setstate__(self, base)
|
|
530
|
+
|
|
531
|
+
|
|
532
|
+
def save_epoch(epoch, filename):
|
|
533
|
+
"""Save an epoch to a given filename
|
|
534
|
+
|
|
535
|
+
:param epoch:
|
|
536
|
+
The epoch that should be saved.
|
|
537
|
+
:type epoch: Epoch
|
|
538
|
+
:param filename:
|
|
539
|
+
The filename where to save the epoch
|
|
540
|
+
:type filename: str
|
|
541
|
+
"""
|
|
542
|
+
return epoch.save(filename)
|
|
543
|
+
|
|
544
|
+
|
|
545
|
+
def load_epoch(filename):
|
|
546
|
+
"""Load an epoch from a given filename
|
|
547
|
+
|
|
548
|
+
:param filename:
|
|
549
|
+
The filename to load the epoch from.
|
|
550
|
+
:type filename: str
|
|
551
|
+
"""
|
|
552
|
+
return Epoch.load(filename)
|
|
553
|
+
|
|
554
|
+
|
|
555
|
+
def as_epoch(cloud):
|
|
556
|
+
"""Create an epoch from a cloud
|
|
557
|
+
|
|
558
|
+
Idempotent operation to create an epoch from a cloud.
|
|
559
|
+
"""
|
|
560
|
+
|
|
561
|
+
# If this is already an epoch, this is a no-op
|
|
562
|
+
if isinstance(cloud, Epoch):
|
|
563
|
+
return cloud
|
|
564
|
+
|
|
565
|
+
# Initialize an epoch from the given cloud
|
|
566
|
+
logger.info("Initializing Epoch object from given point cloud")
|
|
567
|
+
return Epoch(cloud)
|
|
568
|
+
|
|
569
|
+
|
|
570
|
+
def _as_tuple(x):
|
|
571
|
+
if isinstance(x, tuple):
|
|
572
|
+
return x
|
|
573
|
+
return (x,)
|
|
574
|
+
|
|
575
|
+
|
|
576
|
+
def read_from_xyz(
|
|
577
|
+
*filenames,
|
|
578
|
+
xyz_columns=[0, 1, 2],
|
|
579
|
+
normal_columns=[],
|
|
580
|
+
additional_dimensions={},
|
|
581
|
+
**parse_opts,
|
|
582
|
+
):
|
|
583
|
+
"""Create an epoch from an xyz file
|
|
584
|
+
|
|
585
|
+
:param filename:
|
|
586
|
+
The filename to read from. Each line in the input file is expected
|
|
587
|
+
to contain three space separated numbers.
|
|
588
|
+
:type filename: str
|
|
589
|
+
:param xyz_columns:
|
|
590
|
+
The column indices of X, Y and Z coordinates. Defaults to [0, 1, 2].
|
|
591
|
+
:type xyz_columns: list
|
|
592
|
+
:param normal_columns:
|
|
593
|
+
The column indices of the normal vector components. Leave empty, if
|
|
594
|
+
your data file does not contain normals, otherwise exactly three indices
|
|
595
|
+
for the x, y and z components need to be given.
|
|
596
|
+
:type normal_columns: list
|
|
597
|
+
:param parse_opts:
|
|
598
|
+
Additional options forwarded to numpy.genfromtxt. This can be used
|
|
599
|
+
to e.g. change the delimiter character, remove header_lines or manually
|
|
600
|
+
specify which columns of the input contain the XYZ coordinates.
|
|
601
|
+
:param additional_dimensions:
|
|
602
|
+
A dictionary, mapping column indices to names of additional data dimensions.
|
|
603
|
+
They will be read from the file and are accessible under their names from the
|
|
604
|
+
created Epoch objects.
|
|
605
|
+
Additional column indexes start with 3.
|
|
606
|
+
:type parse_opts: dict
|
|
607
|
+
"""
|
|
608
|
+
|
|
609
|
+
# Resolve the given path
|
|
610
|
+
filename = find_file(filenames[0])
|
|
611
|
+
|
|
612
|
+
# Ensure that usecols is not passed by the user, we need to use this
|
|
613
|
+
if "usecols" in parse_opts:
|
|
614
|
+
raise Py4DGeoError(
|
|
615
|
+
"read_from_xyz cannot be customized by using usecols, please use xyz_columns, normal_columns or additional_dimensions instead!"
|
|
616
|
+
)
|
|
617
|
+
|
|
618
|
+
# Read the point cloud
|
|
619
|
+
logger.info(f"Reading point cloud from file '{filename}'")
|
|
620
|
+
|
|
621
|
+
try:
|
|
622
|
+
cloud = np.genfromtxt(
|
|
623
|
+
filename, dtype=np.float64, usecols=xyz_columns, **parse_opts
|
|
624
|
+
)
|
|
625
|
+
except ValueError:
|
|
626
|
+
raise Py4DGeoError("Malformed XYZ file")
|
|
627
|
+
|
|
628
|
+
# Potentially read normals
|
|
629
|
+
normals = None
|
|
630
|
+
if normal_columns:
|
|
631
|
+
if len(normal_columns) != 3:
|
|
632
|
+
raise Py4DGeoError("normal_columns need to be a list of three integers!")
|
|
633
|
+
|
|
634
|
+
try:
|
|
635
|
+
normals = np.genfromtxt(
|
|
636
|
+
filename, dtype=np.float64, usecols=normal_columns, **parse_opts
|
|
637
|
+
)
|
|
638
|
+
except ValueError:
|
|
639
|
+
raise Py4DGeoError("Malformed XYZ file")
|
|
640
|
+
|
|
641
|
+
# Potentially read additional_dimensions passed by the user
|
|
642
|
+
additional_columns = np.empty(
|
|
643
|
+
shape=(cloud.shape[0], 1),
|
|
644
|
+
dtype=np.dtype([(name, "<f8") for name in additional_dimensions.values()]),
|
|
645
|
+
)
|
|
646
|
+
|
|
647
|
+
add_cols = list(sorted(additional_dimensions.keys()))
|
|
648
|
+
try:
|
|
649
|
+
parsed_additionals = np.genfromtxt(
|
|
650
|
+
filename, dtype=np.float64, usecols=add_cols, **parse_opts
|
|
651
|
+
)
|
|
652
|
+
# Ensure that the parsed array is two-dimensional, even if only
|
|
653
|
+
# one additional dimension was given (avoids an edge case)
|
|
654
|
+
parsed_additionals = parsed_additionals.reshape(-1, 1)
|
|
655
|
+
except ValueError:
|
|
656
|
+
raise Py4DGeoError("Malformed XYZ file")
|
|
657
|
+
|
|
658
|
+
for i, col in enumerate(add_cols):
|
|
659
|
+
additional_columns[additional_dimensions[col]] = parsed_additionals[
|
|
660
|
+
:, i
|
|
661
|
+
].reshape(-1, 1)
|
|
662
|
+
|
|
663
|
+
# Finalize the construction of the new epoch
|
|
664
|
+
new_epoch = Epoch(cloud, normals=normals, additional_dimensions=additional_columns)
|
|
665
|
+
|
|
666
|
+
if len(filenames) == 1:
|
|
667
|
+
# End recursion and return non-tuple to make the case that the user
|
|
668
|
+
# called this with only one filename more intuitive
|
|
669
|
+
return new_epoch
|
|
670
|
+
else:
|
|
671
|
+
# Go into recursion
|
|
672
|
+
return (new_epoch,) + _as_tuple(
|
|
673
|
+
read_from_xyz(
|
|
674
|
+
*filenames[1:],
|
|
675
|
+
xyz_columns=xyz_columns,
|
|
676
|
+
normal_columns=normal_columns,
|
|
677
|
+
additional_dimensions=additional_dimensions,
|
|
678
|
+
**parse_opts,
|
|
679
|
+
)
|
|
680
|
+
)
|
|
681
|
+
|
|
682
|
+
|
|
683
|
+
def read_from_las(*filenames, normal_columns=[], additional_dimensions={}):
|
|
684
|
+
"""Create an epoch from a LAS/LAZ file
|
|
685
|
+
|
|
686
|
+
:param filename:
|
|
687
|
+
The filename to read from. It is expected to be in LAS/LAZ format
|
|
688
|
+
and will be processed using laspy.
|
|
689
|
+
:type filename: str
|
|
690
|
+
:param normal_columns:
|
|
691
|
+
The column names of the normal vector components, e.g. "NormalX", "nx", "normal_x" etc., keep in mind that there
|
|
692
|
+
must be exactly 3 columns. Leave empty, if your data file does not contain normals.
|
|
693
|
+
:type normal_columns: list
|
|
694
|
+
:param additional_dimensions:
|
|
695
|
+
A dictionary, mapping names of additional data dimensions in the input
|
|
696
|
+
dataset to additional data dimensions in our epoch data structure.
|
|
697
|
+
:type additional_dimensions: dict
|
|
698
|
+
"""
|
|
699
|
+
|
|
700
|
+
# Resolve the given path
|
|
701
|
+
filename = find_file(filenames[0])
|
|
702
|
+
|
|
703
|
+
# Read the lasfile using laspy
|
|
704
|
+
logger.info(f"Reading point cloud from file '{filename}'")
|
|
705
|
+
lasfile = laspy.read(filename)
|
|
706
|
+
|
|
707
|
+
cloud = np.vstack(
|
|
708
|
+
(
|
|
709
|
+
lasfile.x,
|
|
710
|
+
lasfile.y,
|
|
711
|
+
lasfile.z,
|
|
712
|
+
)
|
|
713
|
+
).transpose()
|
|
714
|
+
|
|
715
|
+
normals = None
|
|
716
|
+
if normal_columns:
|
|
717
|
+
if len(normal_columns) != 3:
|
|
718
|
+
raise Py4DGeoError("normal_columns need to be a list of three strings!")
|
|
719
|
+
|
|
720
|
+
normals = np.vstack(
|
|
721
|
+
[
|
|
722
|
+
lasfile.points[normal_columns[0]],
|
|
723
|
+
lasfile.points[normal_columns[1]],
|
|
724
|
+
lasfile.points[normal_columns[2]],
|
|
725
|
+
]
|
|
726
|
+
).transpose()
|
|
727
|
+
|
|
728
|
+
# set scan positions
|
|
729
|
+
# build additional_dimensions dtype structure
|
|
730
|
+
additional_columns = np.empty(
|
|
731
|
+
shape=(cloud.shape[0], 1),
|
|
732
|
+
dtype=np.dtype([(name, "<f8") for name in additional_dimensions.values()]),
|
|
733
|
+
)
|
|
734
|
+
for column_id, column_name in additional_dimensions.items():
|
|
735
|
+
additional_columns[column_name] = np.array(
|
|
736
|
+
lasfile.points[column_id], dtype=np.int32
|
|
737
|
+
).reshape(-1, 1)
|
|
738
|
+
|
|
739
|
+
# Construct Epoch and go into recursion
|
|
740
|
+
new_epoch = Epoch(
|
|
741
|
+
cloud,
|
|
742
|
+
normals=normals,
|
|
743
|
+
timestamp=lasfile.header.creation_date,
|
|
744
|
+
additional_dimensions=additional_columns,
|
|
745
|
+
)
|
|
746
|
+
|
|
747
|
+
if len(filenames) == 1:
|
|
748
|
+
# End recursion and return non-tuple to make the case that the user
|
|
749
|
+
# called this with only one filename more intuitive
|
|
750
|
+
return new_epoch
|
|
751
|
+
else:
|
|
752
|
+
# Go into recursion
|
|
753
|
+
return (new_epoch,) + _as_tuple(
|
|
754
|
+
read_from_las(
|
|
755
|
+
*filenames[1:],
|
|
756
|
+
normal_columns=normal_columns,
|
|
757
|
+
additional_dimensions=additional_dimensions,
|
|
758
|
+
)
|
|
759
|
+
)
|
|
760
|
+
|
|
761
|
+
|
|
762
|
+
def normalize_timestamp(timestamp):
|
|
763
|
+
"""Bring a given timestamp into a standardized Python format"""
|
|
764
|
+
|
|
765
|
+
# This might be normalized already or non-existing
|
|
766
|
+
if isinstance(timestamp, datetime.datetime) or timestamp is None:
|
|
767
|
+
return timestamp
|
|
768
|
+
|
|
769
|
+
# This might be a date without time information e.g. from laspy
|
|
770
|
+
if isinstance(timestamp, datetime.date):
|
|
771
|
+
return datetime.datetime(timestamp.year, timestamp.month, timestamp.day)
|
|
772
|
+
|
|
773
|
+
# If this is a tuple of (year, day of year) as used in the LAS
|
|
774
|
+
# file header, we convert it.
|
|
775
|
+
if is_iterable(timestamp):
|
|
776
|
+
if len(timestamp) == 2:
|
|
777
|
+
return datetime.datetime(timestamp[0], 1, 1) + datetime.timedelta(
|
|
778
|
+
timestamp[1] - 1
|
|
779
|
+
)
|
|
780
|
+
|
|
781
|
+
# If this is a string we use the dateparser library that understands
|
|
782
|
+
# all sorts of human-readable timestamps
|
|
783
|
+
if isinstance(timestamp, str):
|
|
784
|
+
parsed = dateparser.parse(timestamp)
|
|
785
|
+
|
|
786
|
+
# dateparser returns None for anything it does not understand
|
|
787
|
+
if parsed is not None:
|
|
788
|
+
return parsed
|
|
789
|
+
|
|
790
|
+
raise Py4DGeoError(f"The timestamp '{timestamp}' was not understood by py4dgeo.")
|
|
791
|
+
|
|
792
|
+
|
|
793
|
+
def scan_positions_info_from_dict(info_dict: dict):
|
|
794
|
+
if info_dict is None:
|
|
795
|
+
return None
|
|
796
|
+
if not isinstance(info_dict, dict):
|
|
797
|
+
raise Py4DGeoError(f"The input scan position information should be dictionary.")
|
|
798
|
+
return None
|
|
799
|
+
# Compatible with both integer key and string key as index of the scan positions in json file
|
|
800
|
+
# load scan positions from dictionary, standardize loading via json format dumps to string key
|
|
801
|
+
scanpos_dict_load = json.loads(json.dumps(info_dict))
|
|
802
|
+
sps_list = []
|
|
803
|
+
for i in range(1, 1 + len(scanpos_dict_load)):
|
|
804
|
+
sps_list.append(scanpos_dict_load[str(i)])
|
|
805
|
+
|
|
806
|
+
for sp in sps_list:
|
|
807
|
+
sp_check = True
|
|
808
|
+
sp_check = False if len(sp["origin"]) != 3 else sp_check
|
|
809
|
+
sp_check = False if not isinstance(sp["sigma_range"], float) else sp_check
|
|
810
|
+
sp_check = False if not isinstance(sp["sigma_scan"], float) else sp_check
|
|
811
|
+
sp_check = False if not isinstance(sp["sigma_yaw"], float) else sp_check
|
|
812
|
+
if not sp_check:
|
|
813
|
+
raise Py4DGeoError("Scan positions load failed, please check format. ")
|
|
814
|
+
return sps_list
|