py4dgeo 1.0.0__cp314-cp314-macosx_14_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
py4dgeo/epoch.py ADDED
@@ -0,0 +1,865 @@
1
+ from py4dgeo.logger import logger_context
2
+ from py4dgeo.registration import Transformation
3
+ from py4dgeo.util import (
4
+ Py4DGeoError,
5
+ append_file_extension,
6
+ as_double_precision,
7
+ find_file,
8
+ make_contiguous,
9
+ is_iterable,
10
+ )
11
+ from numpy.lib.recfunctions import append_fields
12
+
13
+ import dateparser
14
+ import datetime
15
+ import json
16
+ import laspy
17
+ import logging
18
+ import numpy as np
19
+ import os
20
+ import tempfile
21
+ import typing
22
+ import zipfile
23
+
24
+ import _py4dgeo
25
+
26
+ logger = logging.getLogger("py4dgeo")
27
+
28
+ # This integer controls the versioning of the epoch file format. Whenever the
29
+ # format is changed, this version should be increased, so that py4dgeo can warn
30
+ # about incompatibilities of py4dgeo with loaded data. This version is intentionally
31
+ # different from py4dgeo's version, because not all releases of py4dgeo necessarily
32
+ # change the epoch file format and we want to be as compatible as possible.
33
+ PY4DGEO_EPOCH_FILE_FORMAT_VERSION = 4
34
+
35
+
36
+ class NumpyArrayEncoder(json.JSONEncoder):
37
+ def default(self, obj):
38
+ if isinstance(obj, np.ndarray):
39
+ return obj.tolist()
40
+ return json.JSONEncoder.default(self, obj)
41
+
42
+
43
+ class Epoch(_py4dgeo.Epoch):
44
+ def __init__(
45
+ self,
46
+ cloud: np.ndarray,
47
+ normals: np.ndarray = None,
48
+ additional_dimensions: np.ndarray = None,
49
+ timestamp=None,
50
+ scanpos_info: dict = None,
51
+ ):
52
+ """
53
+
54
+ :param cloud:
55
+ The point cloud array of shape (n, 3).
56
+
57
+ :param normals:
58
+ The point cloud normals of shape (n, 3) where n is the
59
+ same as the number of points in the point cloud.
60
+
61
+ :param additional_dimensions:
62
+ A numpy array of additional, per-point data in the point cloud. The
63
+ numpy data type is expected to be a structured dtype, so that the data
64
+ columns are accessible by their name.
65
+
66
+ :param timestamp:
67
+ The point cloud timestamp, default is None.
68
+
69
+ :param scanpos_info:
70
+ The point scan positions information, default is None..
71
+ """
72
+ # Check the given array shapes
73
+ if len(cloud.shape) != 2 or cloud.shape[1] != 3:
74
+ raise Py4DGeoError("Clouds need to be an array of shape nx3")
75
+
76
+ # Make sure that cloud is double precision and contiguous in memory
77
+ cloud = as_double_precision(cloud)
78
+ cloud = make_contiguous(cloud)
79
+
80
+ # Set identity transformation
81
+ self._transformations = []
82
+
83
+ # Make sure that given normals are DP and contiguous as well
84
+ if normals is not None:
85
+ normals = make_contiguous(as_double_precision(normals))
86
+ self._normals = normals
87
+
88
+ # Set metadata properties
89
+ self.timestamp = timestamp
90
+ self.scanpos_info = scanpos_info
91
+
92
+ # Set the additional information (e.g. segment ids, normals, etc)
93
+ self.additional_dimensions = additional_dimensions
94
+
95
+ # Call base class constructor
96
+ super().__init__(cloud)
97
+
98
+ @property
99
+ def cloud(self):
100
+ return self._cloud
101
+
102
+ @cloud.setter
103
+ def cloud(self, cloud):
104
+ raise Py4DGeoError(
105
+ "The Epoch point cloud cannot be changed after initialization. Please construct a new Epoch, e.g. by slicing an existing one."
106
+ )
107
+
108
+ @property
109
+ def kdtree(self):
110
+ return self._kdtree
111
+
112
+ @kdtree.setter
113
+ def kdtree(self, kdtree):
114
+ raise Py4DGeoError(
115
+ "The KDTree of an Epoch cannot be changed after initialization."
116
+ )
117
+
118
+ @property
119
+ def octree(self):
120
+ return self._octree
121
+
122
+ @octree.setter
123
+ def octree(self, octree):
124
+ raise Py4DGeoError(
125
+ "The Octree of an Epoch cannot be changed after initialization."
126
+ )
127
+
128
+ @property
129
+ def normals(self):
130
+ # Maybe calculate normals
131
+ if self._normals is None:
132
+ raise Py4DGeoError(
133
+ "Normals for this Epoch have not been calculated! Please use Epoch.calculate_normals or load externally calculated normals."
134
+ )
135
+
136
+ return self._normals
137
+
138
+ def calculate_normals(
139
+ self, radius=1.0, orientation_vector: np.ndarray = np.array([0, 0, 1])
140
+ ):
141
+ """Calculate point cloud normals
142
+
143
+ :param radius:
144
+ The radius used to determine the neighborhood of a point.
145
+
146
+ :param orientation_vector:
147
+ A vector to determine orientation of the normals. It should point "up".
148
+ """
149
+
150
+ self._validate_search_tree()
151
+
152
+ # Reuse the multiscale code with a single radius in order to
153
+ # avoid code duplication.
154
+ with logger_context("Calculating point cloud normals:"):
155
+ self._normals, _ = _py4dgeo.compute_multiscale_directions(
156
+ self,
157
+ self.cloud,
158
+ [radius],
159
+ orientation_vector,
160
+ )
161
+
162
+ return self.normals
163
+
164
+ def _validate_search_tree(self):
165
+ """ "Check if the default search tree is built"""
166
+
167
+ tree_type = self.get_default_radius_search_tree()
168
+
169
+ if tree_type == _py4dgeo.SearchTree.KDTreeSearch:
170
+ if self.kdtree.leaf_parameter() == 0:
171
+ self.build_kdtree()
172
+ else:
173
+ if self.octree.get_number_of_points() == 0:
174
+ self.build_octree()
175
+
176
+ def normals_attachment(self, normals_array):
177
+ """Attach normals to the epoch object
178
+
179
+ :param normals:
180
+ The point cloud normals of shape (n, 3) where n is the
181
+ same as the number of points in the point cloud.
182
+ """
183
+
184
+ if normals_array.shape == self.cloud.shape:
185
+ self._normals = normals_array
186
+ else:
187
+ raise ValueError("Normals cannot be added. Shape does not match.")
188
+
189
+ def copy(self):
190
+ """Copy the epoch object"""
191
+
192
+ new_epoch = Epoch(
193
+ self.cloud.copy(),
194
+ normals=self.normals.copy() if self.normals is not None else None,
195
+ additional_dimensions=(
196
+ self.additional_dimensions.copy()
197
+ if self.additional_dimensions is not None
198
+ else None
199
+ ),
200
+ timestamp=self.timestamp,
201
+ scanpos_info=(
202
+ self.scanpos_info.copy() if self.scanpos_info is not None else None
203
+ ),
204
+ )
205
+
206
+ return new_epoch
207
+
208
+ def __getitem__(self, ind):
209
+ """Slice the epoch in order to e.g. downsample it.
210
+
211
+ Creates a copy of the epoch.
212
+ """
213
+
214
+ return Epoch(
215
+ self.cloud[ind],
216
+ normals=self.normals[ind] if self.normals is not None else None,
217
+ additional_dimensions=(
218
+ self.additional_dimensions[ind]
219
+ if self.additional_dimensions is not None
220
+ else None
221
+ ),
222
+ **self.metadata,
223
+ )
224
+
225
+ @property
226
+ def timestamp(self):
227
+ return self._timestamp
228
+
229
+ @timestamp.setter
230
+ def timestamp(self, timestamp):
231
+ self._timestamp = normalize_timestamp(timestamp)
232
+
233
+ @property
234
+ def scanpos_info(self):
235
+ return self._scanpos_info
236
+
237
+ @scanpos_info.setter
238
+ def scanpos_info(self, scanpos_info):
239
+ if isinstance(scanpos_info, list):
240
+ self._scanpos_info = scanpos_info
241
+ elif isinstance(scanpos_info, dict):
242
+ self._scanpos_info = scan_positions_info_from_dict(scanpos_info)
243
+ else:
244
+ self._scanpos_info = None
245
+
246
+ @property
247
+ def scanpos_id(self):
248
+ return (
249
+ self.additional_dimensions["scanpos_id"]
250
+ .reshape(self.cloud.shape[0])
251
+ .astype(np.int32)
252
+ )
253
+
254
+ @scanpos_id.setter
255
+ def scanpos_id(self, scanpos_id):
256
+ if self.additional_dimensions is None:
257
+ additional_columns = np.empty(
258
+ shape=(self.cloud.shape[0], 1),
259
+ dtype=np.dtype([("scanpos_id", "<i4")]),
260
+ )
261
+ additional_columns["scanpos_id"] = np.array(
262
+ scanpos_id, dtype=np.int32
263
+ ).reshape(-1, 1)
264
+ self.additional_dimensions = additional_columns
265
+ else:
266
+ scanpos_id = np.array(scanpos_id, dtype=np.int32)
267
+ new_additional_dimensions = append_fields(
268
+ self.additional_dimensions, "scanpos_id", scanpos_id, usemask=False
269
+ )
270
+
271
+ self.additional_dimensions = new_additional_dimensions
272
+
273
+ @property
274
+ def metadata(self):
275
+ """Provide the metadata of this epoch as a Python dictionary
276
+
277
+ The return value of this property only makes use of Python built-in
278
+ data structures such that it can e.g. be serialized using the JSON
279
+ module. Also, the returned values are understood by :ref:`Epoch.__init__`
280
+ such that you can do :code:`Epoch(cloud, **other.metadata)`.
281
+ """
282
+
283
+ return {
284
+ "timestamp": None if self.timestamp is None else str(self.timestamp),
285
+ "scanpos_info": None if self.scanpos_info is None else self.scanpos_info,
286
+ }
287
+
288
+ def build_kdtree(self, leaf_size=10, force_rebuild=False):
289
+ """Build the search tree index
290
+
291
+ :param leaf_size:
292
+ An internal optimization parameter of the search tree data structure.
293
+ The algorithm uses a bruteforce search on subtrees of size below the
294
+ given threshold. Increasing this value speeds up search tree build time,
295
+ but slows down query times.
296
+ :type leaf_size: int
297
+ :param force_rebuild:
298
+ Rebuild the search tree even if it was already built before.
299
+ :type force_rebuild: bool
300
+ """
301
+ if self.kdtree.leaf_parameter() == 0 or force_rebuild:
302
+ logger.info(f"Building KDTree structure with leaf parameter {leaf_size}")
303
+ self.kdtree.build_tree(leaf_size)
304
+
305
+ def build_octree(self):
306
+ """Build the search octree index"""
307
+ if self.octree.get_number_of_points() == 0:
308
+ logger.info(f"Building Octree structure")
309
+ self.octree.build_tree()
310
+
311
+ def transform(
312
+ self,
313
+ transformation: typing.Optional[Transformation] = None,
314
+ affine_transformation: typing.Optional[np.ndarray] = None,
315
+ rotation: typing.Optional[np.ndarray] = None,
316
+ translation: typing.Optional[np.ndarray] = None,
317
+ reduction_point: typing.Optional[np.ndarray] = None,
318
+ ):
319
+ """Transform the epoch with an affine transformation
320
+
321
+ :param transformation:
322
+ A Transformation object that describes the transformation to apply.
323
+ If this argument is given, the other arguments are ignored.
324
+ This parameter is typically used if the transformation was calculated
325
+ by py4dgeo itself.
326
+ :type transformation: Transformation
327
+ :param affine_transformation:
328
+ A 4x4 or 3x4 matrix representing the affine transformation. Given
329
+ as a numpy array. If this argument is given, the rotation and
330
+ translation arguments are ignored.
331
+ :type transformation: np.ndarray
332
+ :param rotation:
333
+ A 3x3 matrix specifying the rotation to apply
334
+ :type rotation: np.ndarray
335
+ :param translation:
336
+ A vector specifying the translation to apply
337
+ :type translation: np.ndarray
338
+ :param reduction_point:
339
+ A translation vector to apply before applying rotation and scaling.
340
+ This is used to increase the numerical accuracy of transformation.
341
+ If a transformation is given, this argument is ignored.
342
+ :type reduction_point: np.ndarray
343
+ """
344
+
345
+ # Extract the affine transformation and reduction point from the given transformation
346
+ if transformation is not None:
347
+ assert isinstance(transformation, Transformation)
348
+ affine_transformation = transformation.affine_transformation
349
+ reduction_point = transformation.reduction_point
350
+
351
+ # Build the transformation if it is not explicitly given
352
+ if affine_transformation is None:
353
+ trafo = np.identity(4, dtype=np.float64)
354
+ trafo[:3, :3] = rotation
355
+ trafo[:3, 3] = translation
356
+ else:
357
+ # If it was given, make a copy and potentially resize it
358
+ trafo = affine_transformation.copy()
359
+ if trafo.shape[0] == 3:
360
+ trafo.resize((4, 4), refcheck=False)
361
+ trafo[3, 3] = 1
362
+
363
+ if reduction_point is None:
364
+ reduction_point = np.array([0, 0, 0], dtype=np.float64)
365
+
366
+ # Ensure contiguous DP memory
367
+ trafo = as_double_precision(make_contiguous(trafo))
368
+
369
+ # Invalidate the KDTree
370
+ self.kdtree.invalidate()
371
+
372
+ # Invalidate the Octree
373
+ self.octree.invalidate()
374
+
375
+ if self._normals is None:
376
+ self._normals = np.empty((1, 3)) # dummy array to avoid error in C++ code
377
+ # Apply the actual transformation as efficient C++
378
+ _py4dgeo.transform_pointcloud_inplace(
379
+ self.cloud, trafo, reduction_point, self._normals
380
+ )
381
+
382
+ # Store the transformation
383
+ self._transformations.append(
384
+ Transformation(affine_transformation=trafo, reduction_point=reduction_point)
385
+ )
386
+
387
+ @property
388
+ def transformation(self):
389
+ """Access the affine transformations that were applied to this epoch
390
+
391
+ In order to set this property please use the transform method instead,
392
+ which will make sure to also apply the transformation.
393
+
394
+ :returns:
395
+ Returns a list of applied transformations. These are given
396
+ as a tuple of a 4x4 matrix defining the affine transformation
397
+ and the reduction point used when applying it.
398
+ """
399
+ return self._transformations
400
+
401
+ def save(self, filename):
402
+ """Save this epoch to a file
403
+
404
+ :param filename:
405
+ The filename to save the epoch in.
406
+ :type filename: str
407
+ """
408
+
409
+ # Ensure that we have a file extension
410
+ filename = append_file_extension(filename, "zip")
411
+ logger.info(f"Saving epoch to file '{filename}'")
412
+
413
+ # Use a temporary directory when creating files
414
+ with tempfile.TemporaryDirectory() as tmp_dir:
415
+ # Create the final archive
416
+ with zipfile.ZipFile(
417
+ filename, mode="w", compression=zipfile.ZIP_BZIP2
418
+ ) as zf:
419
+ # Write the epoch file format version number
420
+ zf.writestr("EPOCH_FILE_FORMAT", str(PY4DGEO_EPOCH_FILE_FORMAT_VERSION))
421
+
422
+ # Write the metadata dictionary into a json file
423
+ metadatafile = os.path.join(tmp_dir, "metadata.json")
424
+ with open(metadatafile, "w") as f:
425
+ json.dump(self.metadata, f)
426
+ zf.write(metadatafile, arcname="metadata.json")
427
+
428
+ # Write the transformation into a file
429
+ trafofile = os.path.join(tmp_dir, "trafo.json")
430
+ with open(trafofile, "w") as f:
431
+ json.dump(
432
+ [t.__dict__ for t in self._transformations],
433
+ f,
434
+ cls=NumpyArrayEncoder,
435
+ )
436
+ zf.write(trafofile, arcname="trafo.json")
437
+
438
+ # Write the actual point cloud array using laspy - LAZ compression
439
+ # is far better than any compression numpy + zipfile can do.
440
+ cloudfile = os.path.join(tmp_dir, "cloud.laz")
441
+ hdr = laspy.LasHeader(version="1.4", point_format=6)
442
+ hdr.x_scale = 0.00025
443
+ hdr.y_scale = 0.00025
444
+ hdr.z_scale = 0.00025
445
+ mean_extent = np.mean(self.cloud, axis=0)
446
+ hdr.x_offset = int(mean_extent[0])
447
+ hdr.y_offset = int(mean_extent[1])
448
+ hdr.z_offset = int(mean_extent[2])
449
+ lasfile = laspy.LasData(hdr)
450
+ lasfile.x = self.cloud[:, 0]
451
+ lasfile.y = self.cloud[:, 1]
452
+ lasfile.z = self.cloud[:, 2]
453
+
454
+ # define dimensions for normals below:
455
+ if self._normals is not None:
456
+ lasfile.add_extra_dim(
457
+ laspy.ExtraBytesParams(
458
+ name="NormalX", type="f8", description="X axis of normals"
459
+ )
460
+ )
461
+ lasfile.add_extra_dim(
462
+ laspy.ExtraBytesParams(
463
+ name="NormalY", type="f8", description="Y axis of normals"
464
+ )
465
+ )
466
+ lasfile.add_extra_dim(
467
+ laspy.ExtraBytesParams(
468
+ name="NormalZ", type="f8", description="Z axis of normals"
469
+ )
470
+ )
471
+ lasfile.NormalX = self.normals[:, 0]
472
+ lasfile.NormalY = self.normals[:, 1]
473
+ lasfile.NormalZ = self.normals[:, 2]
474
+ else:
475
+ logger.info("Saving a file without normals.")
476
+
477
+ lasfile.write(cloudfile)
478
+ zf.write(cloudfile, arcname="cloud.laz")
479
+
480
+ kdtreefile = os.path.join(tmp_dir, "kdtree")
481
+ with open(kdtreefile, "w") as f:
482
+ self.kdtree.save_index(kdtreefile)
483
+ zf.write(kdtreefile, arcname="kdtree")
484
+
485
+ octreefile = os.path.join(tmp_dir, "octree")
486
+ with open(octreefile, "w") as f:
487
+ self.octree.save_index(octreefile)
488
+ zf.write(octreefile, arcname="octree")
489
+
490
+ @staticmethod
491
+ def load(filename):
492
+ """Construct an Epoch instance by loading it from a file
493
+
494
+ :param filename:
495
+ The filename to load the epoch from.
496
+ :type filename: str
497
+ """
498
+
499
+ # Ensure that we have a file extension
500
+ filename = append_file_extension(filename, "zip")
501
+ logger.info(f"Restoring epoch from file '{filename}'")
502
+
503
+ # Use temporary directory for extraction of files
504
+ with tempfile.TemporaryDirectory() as tmp_dir:
505
+ # Open the ZIP archive
506
+ with zipfile.ZipFile(filename, mode="r") as zf:
507
+ # Read the epoch file version number and compare to current
508
+ version = int(zf.read("EPOCH_FILE_FORMAT").decode())
509
+ if version > PY4DGEO_EPOCH_FILE_FORMAT_VERSION:
510
+ raise Py4DGeoError(
511
+ "Epoch file format not known - please update py4dgeo!"
512
+ )
513
+
514
+ # Read the metadata JSON file
515
+ metadatafile = zf.extract("metadata.json", path=tmp_dir)
516
+ with open(metadatafile, "r") as f:
517
+ metadata = json.load(f)
518
+
519
+ # Restore the point cloud itself
520
+ cloudfile = zf.extract("cloud.laz", path=tmp_dir)
521
+ lasfile = laspy.read(cloudfile)
522
+ cloud = np.vstack((lasfile.x, lasfile.y, lasfile.z)).transpose()
523
+ try:
524
+ normals = np.vstack(
525
+ (lasfile.NormalX, lasfile.NormalY, lasfile.NormalZ)
526
+ ).transpose()
527
+ except AttributeError:
528
+ normals = None
529
+ # Construct the epoch object
530
+ epoch = Epoch(cloud, normals=normals, **metadata)
531
+
532
+ # Restore the KDTree object
533
+ kdtreefile = zf.extract("kdtree", path=tmp_dir)
534
+ epoch.kdtree.load_index(kdtreefile)
535
+
536
+ # Restore the Octree object if present
537
+ try:
538
+ octreefile = zf.extract("octree", path=tmp_dir)
539
+ epoch.octree.load_index(octreefile)
540
+ except KeyError:
541
+ logger.warning(
542
+ "No octree found in the archive. Skipping octree loading."
543
+ )
544
+
545
+ # Read the transformation if it exists
546
+ if version >= 3:
547
+ trafofile = zf.extract("trafo.json", path=tmp_dir)
548
+ with open(trafofile, "r") as f:
549
+ trafo = json.load(f)
550
+ epoch._transformations = [Transformation(**t) for t in trafo]
551
+
552
+ return epoch
553
+
554
+ def __getstate__(self):
555
+ return (
556
+ PY4DGEO_EPOCH_FILE_FORMAT_VERSION,
557
+ self.metadata,
558
+ _py4dgeo.Epoch.__getstate__(self),
559
+ )
560
+
561
+ def __setstate__(self, state):
562
+ v, metadata, base = state
563
+
564
+ if v != PY4DGEO_EPOCH_FILE_FORMAT_VERSION:
565
+ raise Py4DGeoError("Epoch file format is out of date!")
566
+
567
+ # Restore metadata
568
+ for k, v in metadata.items():
569
+ setattr(self, k, v)
570
+
571
+ # Set the base class object
572
+ _py4dgeo.Epoch.__setstate__(self, base)
573
+
574
+
575
+ def save_epoch(epoch, filename):
576
+ """Save an epoch to a given filename
577
+
578
+ :param epoch:
579
+ The epoch that should be saved.
580
+ :type epoch: Epoch
581
+ :param filename:
582
+ The filename where to save the epoch
583
+ :type filename: str
584
+ """
585
+ return epoch.save(filename)
586
+
587
+
588
+ def load_epoch(filename):
589
+ """Load an epoch from a given filename
590
+
591
+ :param filename:
592
+ The filename to load the epoch from.
593
+ :type filename: str
594
+ """
595
+ return Epoch.load(filename)
596
+
597
+
598
+ def as_epoch(cloud):
599
+ """Create an epoch from a cloud
600
+
601
+ Idempotent operation to create an epoch from a cloud.
602
+ """
603
+
604
+ # If this is already an epoch, this is a no-op
605
+ if isinstance(cloud, Epoch):
606
+ return cloud
607
+
608
+ # Initialize an epoch from the given cloud
609
+ logger.info("Initializing Epoch object from given point cloud")
610
+ return Epoch(cloud)
611
+
612
+
613
+ def _as_tuple(x):
614
+ if isinstance(x, tuple):
615
+ return x
616
+ return (x,)
617
+
618
+
619
+ def read_from_xyz(
620
+ *filenames,
621
+ xyz_columns=[0, 1, 2],
622
+ normal_columns=[],
623
+ additional_dimensions={},
624
+ additional_dimensions_dtypes={},
625
+ **parse_opts,
626
+ ):
627
+ """Create an epoch from an xyz file
628
+
629
+ :param filename:
630
+ The filename to read from. Each line in the input file is expected
631
+ to contain three space separated numbers.
632
+ :type filename: str
633
+ :param xyz_columns:
634
+ The column indices of X, Y and Z coordinates. Defaults to [0, 1, 2].
635
+ :type xyz_columns: list
636
+ :param normal_columns:
637
+ The column indices of the normal vector components. Leave empty, if
638
+ your data file does not contain normals, otherwise exactly three indices
639
+ for the x, y and z components need to be given.
640
+ :type normal_columns: list
641
+ :param parse_opts:
642
+ Additional options forwarded to numpy.genfromtxt. This can be used
643
+ to e.g. change the delimiter character, remove header_lines or manually
644
+ specify which columns of the input contain the XYZ coordinates.
645
+ :param additional_dimensions:
646
+ A dictionary, mapping column indices to names of additional data dimensions.
647
+ They will be read from the file and are accessible under their names from the
648
+ created Epoch objects.
649
+ Additional column indexes start with 3.
650
+ :type additional_dimensions: dict
651
+ :param additional_dimensions_dtypes:
652
+ A dictionary, mapping column names to numpy dtypes which should be used
653
+ in parsing the data.
654
+ :type additional_dimensions_dtypes: dict
655
+ :type parse_opts: dict
656
+ """
657
+
658
+ # Resolve the given path
659
+ filename = find_file(filenames[0])
660
+
661
+ # Ensure that usecols is not passed by the user, we need to use this
662
+ if "usecols" in parse_opts:
663
+ raise Py4DGeoError(
664
+ "read_from_xyz cannot be customized by using usecols, please use xyz_columns, normal_columns or additional_dimensions instead!"
665
+ )
666
+
667
+ # Read the point cloud
668
+ logger.info(f"Reading point cloud from file '{filename}'")
669
+
670
+ try:
671
+ cloud = np.genfromtxt(
672
+ filename, dtype=np.float64, usecols=xyz_columns, **parse_opts
673
+ )
674
+ except ValueError:
675
+ raise Py4DGeoError("Malformed XYZ file")
676
+
677
+ # Potentially read normals
678
+ normals = None
679
+ if normal_columns:
680
+ if len(normal_columns) != 3:
681
+ raise Py4DGeoError("normal_columns need to be a list of three integers!")
682
+
683
+ try:
684
+ normals = np.genfromtxt(
685
+ filename,
686
+ dtype=np.float64,
687
+ usecols=normal_columns,
688
+ **parse_opts,
689
+ )
690
+ except ValueError:
691
+ raise Py4DGeoError("Malformed XYZ file")
692
+
693
+ # Potentially read additional_dimensions passed by the user
694
+ if additional_dimensions:
695
+ additional_columns = np.genfromtxt(
696
+ filename,
697
+ dtype=np.dtype(
698
+ [
699
+ (name, additional_dimensions_dtypes.get(name, np.float64))
700
+ for name in additional_dimensions.values()
701
+ ]
702
+ ),
703
+ usecols=additional_dimensions.keys(),
704
+ **parse_opts,
705
+ )
706
+ else:
707
+ additional_columns = np.empty(shape=(cloud.shape[0], 1), dtype=[])
708
+
709
+ # Finalize the construction of the new epoch
710
+ new_epoch = Epoch(cloud, normals=normals, additional_dimensions=additional_columns)
711
+
712
+ if len(filenames) == 1:
713
+ # End recursion and return non-tuple to make the case that the user
714
+ # called this with only one filename more intuitive
715
+ return new_epoch
716
+ else:
717
+ # Go into recursion
718
+ return (new_epoch,) + _as_tuple(
719
+ read_from_xyz(
720
+ *filenames[1:],
721
+ xyz_columns=xyz_columns,
722
+ normal_columns=normal_columns,
723
+ additional_dimensions=additional_dimensions,
724
+ additional_dimensions_dtypes=additional_dimensions_dtypes,
725
+ **parse_opts,
726
+ )
727
+ )
728
+
729
+
730
+ def read_from_las(*filenames, normal_columns=[], additional_dimensions={}):
731
+ """Create an epoch from a LAS/LAZ file
732
+
733
+ :param filename:
734
+ The filename to read from. It is expected to be in LAS/LAZ format
735
+ and will be processed using laspy.
736
+ :type filename: str
737
+ :param normal_columns:
738
+ The column names of the normal vector components, e.g. "NormalX", "nx", "normal_x" etc., keep in mind that there
739
+ must be exactly 3 columns. Leave empty, if your data file does not contain normals.
740
+ :type normal_columns: list
741
+ :param additional_dimensions:
742
+ A dictionary, mapping names of additional data dimensions in the input
743
+ dataset to additional data dimensions in our epoch data structure.
744
+ :type additional_dimensions: dict
745
+ """
746
+
747
+ # Resolve the given path
748
+ filename = find_file(filenames[0])
749
+
750
+ # Read the lasfile using laspy
751
+ logger.info(f"Reading point cloud from file '{filename}'")
752
+ lasfile = laspy.read(filename)
753
+
754
+ cloud = np.vstack(
755
+ (
756
+ lasfile.x,
757
+ lasfile.y,
758
+ lasfile.z,
759
+ )
760
+ ).transpose()
761
+
762
+ normals = None
763
+ if normal_columns:
764
+ if len(normal_columns) != 3:
765
+ raise Py4DGeoError("normal_columns need to be a list of three strings!")
766
+
767
+ normals = np.vstack(
768
+ [
769
+ lasfile.points[normal_columns[0]],
770
+ lasfile.points[normal_columns[1]],
771
+ lasfile.points[normal_columns[2]],
772
+ ]
773
+ ).transpose()
774
+
775
+ # build additional_dimensions dtype structure
776
+ additional_columns = np.empty(
777
+ shape=(cloud.shape[0], 1),
778
+ dtype=np.dtype(
779
+ [
780
+ (column_name, lasfile.points[column_id].dtype)
781
+ for column_id, column_name in additional_dimensions.items()
782
+ ]
783
+ ),
784
+ )
785
+
786
+ # and fill it with the data from the lasfile
787
+ for column_id, column_name in additional_dimensions.items():
788
+ additional_columns[column_name] = lasfile.points[column_id].reshape(-1, 1)
789
+
790
+ # Construct Epoch and go into recursion
791
+ new_epoch = Epoch(
792
+ cloud,
793
+ normals=normals,
794
+ timestamp=lasfile.header.creation_date,
795
+ additional_dimensions=additional_columns,
796
+ )
797
+
798
+ if len(filenames) == 1:
799
+ # End recursion and return non-tuple to make the case that the user
800
+ # called this with only one filename more intuitive
801
+ return new_epoch
802
+ else:
803
+ # Go into recursion
804
+ return (new_epoch,) + _as_tuple(
805
+ read_from_las(
806
+ *filenames[1:],
807
+ normal_columns=normal_columns,
808
+ additional_dimensions=additional_dimensions,
809
+ )
810
+ )
811
+
812
+
813
+ def normalize_timestamp(timestamp):
814
+ """Bring a given timestamp into a standardized Python format"""
815
+
816
+ # This might be normalized already or non-existing
817
+ if isinstance(timestamp, datetime.datetime) or timestamp is None:
818
+ return timestamp
819
+
820
+ # This might be a date without time information e.g. from laspy
821
+ if isinstance(timestamp, datetime.date):
822
+ return datetime.datetime(timestamp.year, timestamp.month, timestamp.day)
823
+
824
+ # If this is a tuple of (year, day of year) as used in the LAS
825
+ # file header, we convert it.
826
+ if is_iterable(timestamp):
827
+ if len(timestamp) == 2:
828
+ return datetime.datetime(timestamp[0], 1, 1) + datetime.timedelta(
829
+ timestamp[1] - 1
830
+ )
831
+
832
+ # If this is a string we use the dateparser library that understands
833
+ # all sorts of human-readable timestamps
834
+ if isinstance(timestamp, str):
835
+ parsed = dateparser.parse(timestamp)
836
+
837
+ # dateparser returns None for anything it does not understand
838
+ if parsed is not None:
839
+ return parsed
840
+
841
+ raise Py4DGeoError(f"The timestamp '{timestamp}' was not understood by py4dgeo.")
842
+
843
+
844
+ def scan_positions_info_from_dict(info_dict: dict):
845
+ if info_dict is None:
846
+ return None
847
+ if not isinstance(info_dict, dict):
848
+ raise Py4DGeoError(f"The input scan position information should be dictionary.")
849
+ return None
850
+ # Compatible with both integer key and string key as index of the scan positions in json file
851
+ # load scan positions from dictionary, standardize loading via json format dumps to string key
852
+ scanpos_dict_load = json.loads(json.dumps(info_dict))
853
+ sps_list = []
854
+ for i in range(1, 1 + len(scanpos_dict_load)):
855
+ sps_list.append(scanpos_dict_load[str(i)])
856
+
857
+ for sp in sps_list:
858
+ sp_check = True
859
+ sp_check = False if len(sp["origin"]) != 3 else sp_check
860
+ sp_check = False if not isinstance(sp["sigma_range"], float) else sp_check
861
+ sp_check = False if not isinstance(sp["sigma_scan"], float) else sp_check
862
+ sp_check = False if not isinstance(sp["sigma_yaw"], float) else sp_check
863
+ if not sp_check:
864
+ raise Py4DGeoError("Scan positions load failed, please check format. ")
865
+ return sps_list