py4dgeo 0.6.0__cp39-cp39-win_amd64.whl → 0.7.0__cp39-cp39-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Binary file
py4dgeo/__init__.py CHANGED
@@ -9,7 +9,11 @@ from py4dgeo.epoch import (
9
9
  )
10
10
  from py4dgeo.m3c2 import M3C2, write_m3c2_results_to_las
11
11
  from py4dgeo.m3c2ep import M3C2EP
12
- from py4dgeo.registration import iterative_closest_point
12
+ from py4dgeo.registration import (
13
+ iterative_closest_point,
14
+ point_to_plane_icp,
15
+ icp_with_stable_areas,
16
+ )
13
17
  from py4dgeo.segmentation import (
14
18
  RegionGrowingAlgorithm,
15
19
  SpatiotemporalAnalysis,
py4dgeo/cloudcompare.py CHANGED
@@ -5,7 +5,7 @@ _cloudcompare_param_mapping = {
5
5
  "normalscale": "normal_radii",
6
6
  "registrationerror": "reg_error",
7
7
  "searchdepth": "max_distance",
8
- "searchscale": "cyl_radii",
8
+ "searchscale": "cyl_radius",
9
9
  "usemedian": "robust_aggr",
10
10
  }
11
11
 
@@ -21,10 +21,8 @@ class CloudCompareM3C2(M3C2):
21
21
  # Apply changes that are not a mere renaming
22
22
 
23
23
  # Scale parameters are diameters in CloudCompare and radii in py4dgeo
24
- if "cyl_radii" in py4dgeo_params:
25
- py4dgeo_params["cyl_radii"] = tuple(
26
- 0.5 * r for r in py4dgeo_params["cyl_radii"]
27
- )
24
+ if "cyl_radius" in py4dgeo_params:
25
+ py4dgeo_params["cyl_radius"] = py4dgeo_params["cyl_radius"] * 0.5
28
26
  if "normal_radii" in py4dgeo_params:
29
27
  py4dgeo_params["normal_radii"] = tuple(
30
28
  0.5 * r for r in py4dgeo_params["normal_radii"]
py4dgeo/epoch.py CHANGED
@@ -95,6 +95,26 @@ class Epoch(_py4dgeo.Epoch):
95
95
  # Call base class constructor
96
96
  super().__init__(cloud)
97
97
 
98
+ @property
99
+ def cloud(self):
100
+ return self._cloud
101
+
102
+ @cloud.setter
103
+ def cloud(self, cloud):
104
+ raise Py4DGeoError(
105
+ "The Epoch point cloud cannot be changed after initialization. Please construct a new Epoch, e.g. by slicing an existing one."
106
+ )
107
+
108
+ @property
109
+ def kdtree(self):
110
+ return self._kdtree
111
+
112
+ @kdtree.setter
113
+ def kdtree(self, kdtree):
114
+ raise Py4DGeoError(
115
+ "The KDTree of an Epoch cannot be changed after initialization."
116
+ )
117
+
98
118
  @property
99
119
  def normals(self):
100
120
  # Maybe calculate normals
@@ -121,22 +141,67 @@ class Epoch(_py4dgeo.Epoch):
121
141
  if self.kdtree.leaf_parameter() == 0:
122
142
  self.build_kdtree()
123
143
 
124
- # Allocate memory for the normals
125
- self._normals = np.empty(self.cloud.shape, dtype=np.float64)
126
-
127
144
  # Reuse the multiscale code with a single radius in order to
128
145
  # avoid code duplication.
129
146
  with logger_context("Calculating point cloud normals:"):
130
- _py4dgeo.compute_multiscale_directions(
147
+ self._normals, _ = _py4dgeo.compute_multiscale_directions(
131
148
  self,
132
149
  self.cloud,
133
150
  [radius],
134
151
  orientation_vector,
135
- self._normals,
136
152
  )
137
153
 
138
154
  return self.normals
139
155
 
156
+ def normals_attachment(self, normals_array):
157
+ """Attach normals to the epoch object
158
+
159
+ :param normals:
160
+ The point cloud normals of shape (n, 3) where n is the
161
+ same as the number of points in the point cloud.
162
+ """
163
+
164
+ if normals_array.shape == self.cloud.shape:
165
+ self._normals = normals_array
166
+ else:
167
+ raise ValueError("Normals cannot be added. Shape does not match.")
168
+
169
+ def copy(self):
170
+ """Copy the epoch object"""
171
+
172
+ new_epoch = Epoch(
173
+ self.cloud.copy(),
174
+ normals=self.normals.copy() if self.normals is not None else None,
175
+ additional_dimensions=(
176
+ self.additional_dimensions.copy()
177
+ if self.additional_dimensions is not None
178
+ else None
179
+ ),
180
+ timestamp=self.timestamp,
181
+ scanpos_info=(
182
+ self.scanpos_info.copy() if self.scanpos_info is not None else None
183
+ ),
184
+ )
185
+
186
+ return new_epoch
187
+
188
+ def __getitem__(self, ind):
189
+ """Slice the epoch in order to e.g. downsample it.
190
+
191
+ Creates a copy of the epoch.
192
+ """
193
+
194
+ return Epoch(
195
+ self.cloud[ind],
196
+ normals=self.normals[ind] if self.normals is not None else None,
197
+ additional_dimensions=(
198
+ self.additional_dimensions[ind]
199
+ if self.additional_dimensions is not None
200
+ else None
201
+ ),
202
+ **self.metadata,
203
+ )
204
+
140
205
  @property
141
206
  def timestamp(self):
142
207
  return self._timestamp
@@ -278,8 +343,12 @@ class Epoch(_py4dgeo.Epoch):
278
343
  # Invalidate the KDTree
279
344
  self.kdtree.invalidate()
280
345
 
346
+ if self._normals is None:
347
+ self._normals = np.empty((1, 3)) # dummy array to avoid error in C++ code
281
348
  # Apply the actual transformation as efficient C++
282
- _py4dgeo.transform_pointcloud_inplace(self.cloud, trafo, reduction_point)
349
+ _py4dgeo.transform_pointcloud_inplace(
350
+ self.cloud, trafo, reduction_point, self._normals
351
+ )
283
352
 
284
353
  # Store the transformation
285
354
  self._transformations.append(
py4dgeo/fallback.py CHANGED
@@ -1,4 +1,4 @@
1
- """Fallback implementations for C++ components of the M3C2 algorithms """
1
+ """Fallback implementations for C++ components of the M3C2 algorithms"""
2
2
 
3
3
  from py4dgeo.m3c2 import M3C2
4
4
 
@@ -7,8 +7,8 @@ import _py4dgeo
7
7
 
8
8
 
9
9
  def radius_workingset_finder(params: _py4dgeo.WorkingSetFinderParameters) -> np.ndarray:
10
- indices = params.epoch.kdtree.radius_search(params.corepoint, params.radius)
11
- return params.epoch.cloud[indices, :]
10
+ indices = params.epoch._kdtree.radius_search(params.corepoint, params.radius)
11
+ return params.epoch._cloud[indices, :]
12
12
 
13
13
 
14
14
  def cylinder_workingset_finder(
@@ -35,10 +35,10 @@ def cylinder_workingset_finder(
35
35
  params.corepoint[0, :]
36
36
  + (2 * i - N + 1) / N * max_cylinder_length * params.cylinder_axis[0, :]
37
37
  )
38
- indices = params.epoch.kdtree.radius_search(qp, r_cyl)
38
+ indices = params.epoch._kdtree.radius_search(qp, r_cyl)
39
39
 
40
40
  # Gather the points from the point cloud
41
- superset = params.epoch.cloud[indices, :]
41
+ superset = params.epoch._cloud[indices, :]
42
42
 
43
43
  # Calculate distance from the axis and the plane perpendicular to the axis
44
44
  to_corepoint = superset - qp
py4dgeo/m3c2.py CHANGED
@@ -1,3 +1,4 @@
1
+ from ast import List
1
2
  from py4dgeo.epoch import Epoch, as_epoch
2
3
  from py4dgeo.util import (
3
4
  as_double_precision,
@@ -22,9 +23,10 @@ logger = logging.getLogger("py4dgeo")
22
23
  class M3C2LikeAlgorithm(abc.ABC):
23
24
  def __init__(
24
25
  self,
25
- epochs: typing.Tuple[Epoch, ...] = None,
26
- corepoints: np.ndarray = None,
27
- cyl_radii: typing.List[float] = None,
26
+ epochs: typing.Optional[typing.Tuple[Epoch, ...]] = None,
27
+ corepoints: typing.Optional[np.ndarray] = None,
28
+ cyl_radii: typing.Optional[List] = None,
29
+ cyl_radius: typing.Optional[float] = None,
28
30
  max_distance: float = 0.0,
29
31
  registration_error: float = 0.0,
30
32
  robust_aggr: bool = False,
@@ -32,6 +34,7 @@ class M3C2LikeAlgorithm(abc.ABC):
32
34
  self.epochs = epochs
33
35
  self.corepoints = corepoints
34
36
  self.cyl_radii = cyl_radii
37
+ self.cyl_radius = cyl_radius
35
38
  self.max_distance = max_distance
36
39
  self.registration_error = registration_error
37
40
  self.robust_aggr = robust_aggr
@@ -72,9 +75,19 @@ class M3C2LikeAlgorithm(abc.ABC):
72
75
  def calculate_distances(self, epoch1, epoch2):
73
76
  """Calculate the distances between two epochs"""
74
77
 
75
- if self.cyl_radii is None or len(self.cyl_radii) != 1:
78
+ if isinstance(self.cyl_radii, typing.Iterable):
79
+ logger.warning(
80
+ "DEPRECATION: use cyl_radius instead of cyl_radii. In a future version, cyl_radii will be removed!"
81
+ )
82
+ if len(self.cyl_radii) != 1:
83
+ Py4DGeoError("cyl_radii must be a list containing a single float!")
84
+ elif self.cyl_radius is None:
85
+ self.cyl_radius = self.cyl_radii[0]
86
+ self.cyl_radii = None
87
+
88
+ if self.cyl_radius is None:
76
89
  raise Py4DGeoError(
77
- f"{self.name} requires exactly one cylinder radius to be given"
90
+ f"{self.name} requires exactly one cylinder radius to be given as a float."
78
91
  )
79
92
 
80
93
  # Ensure that the KDTree data structures have been built. This is no-op
@@ -85,7 +98,7 @@ class M3C2LikeAlgorithm(abc.ABC):
85
98
 
86
99
  distances, uncertainties = _py4dgeo.compute_distances(
87
100
  self.corepoints,
88
- self.cyl_radii[0],
101
+ self.cyl_radius,
89
102
  epoch1,
90
103
  epoch2,
91
104
  self.directions(),
@@ -128,6 +141,7 @@ class M3C2(M3C2LikeAlgorithm):
128
141
  )
129
142
  self.cloud_for_normals = cloud_for_normals
130
143
  self.corepoint_normals = corepoint_normals
144
+ self._directions_radii = None
131
145
  super().__init__(**kwargs)
132
146
 
133
147
  def directions(self):
@@ -155,9 +169,6 @@ class M3C2(M3C2LikeAlgorithm):
155
169
  "M3C2 requires at least the MINIMUM memory policy level to compute multiscale normals"
156
170
  )
157
171
 
158
- # Allocate the storage for the computed normals
159
- self.corepoint_normals = np.empty(self.corepoints.shape, dtype=np.float64)
160
-
161
172
  # Find the correct epoch to use for normal calculation
162
173
  normals_epoch = self.cloud_for_normals
163
174
  if normals_epoch is None:
@@ -167,16 +178,25 @@ class M3C2(M3C2LikeAlgorithm):
167
178
  normals_epoch.build_kdtree()
168
179
 
169
180
  # Trigger the precomputation
170
- _py4dgeo.compute_multiscale_directions(
171
- normals_epoch,
172
- self.corepoints,
173
- self.normal_radii,
174
- self.orientation_vector,
175
- self.corepoint_normals,
181
+ self.corepoint_normals, self._directions_radii = (
182
+ _py4dgeo.compute_multiscale_directions(
183
+ normals_epoch,
184
+ self.corepoints,
185
+ self.normal_radii,
186
+ self.orientation_vector,
187
+ )
176
188
  )
177
189
 
178
190
  return self.corepoint_normals
179
191
 
192
+ def directions_radii(self):
193
+ if self._directions_radii is None:
194
+ raise ValueError(
195
+ "Radii are only available after calculating directions with py4dgeo."
196
+ )
197
+
198
+ return self._directions_radii
199
+
180
200
  @property
181
201
  def name(self):
182
202
  return "M3C2"
py4dgeo/m3c2ep.py CHANGED
@@ -56,7 +56,7 @@ class M3C2EP(M3C2):
56
56
  print(self.name + " running")
57
57
  """Calculate the distances between two epochs"""
58
58
 
59
- if self.cyl_radii is None or len(self.cyl_radii) != 1:
59
+ if not isinstance(self.cyl_radius, float):
60
60
  raise Py4DGeoError(
61
61
  f"{self.name} requires exactly one cylinder radius to be given"
62
62
  )
@@ -71,7 +71,7 @@ class M3C2EP(M3C2):
71
71
 
72
72
  # set default M3C2Meta
73
73
  M3C2Meta = {"searchrad": 0.5, "maxdist": 3, "minneigh": 5, "maxneigh": 100000}
74
- M3C2Meta["searchrad"] = self.cyl_radii[0]
74
+ M3C2Meta["searchrad"] = self.cyl_radius
75
75
  M3C2Meta["maxdist"] = self.max_distance
76
76
 
77
77
  M3C2Meta["spInfos"] = [epoch1.scanpos_info, epoch2.scanpos_info]
@@ -194,9 +194,11 @@ class M3C2EP(M3C2):
194
194
  p2_coords_shm.unlink()
195
195
 
196
196
  out_attrs = {
197
- key: np.empty((query_coords.shape[0], 3, 3), dtype=val.dtype)
198
- if key == "m3c2_cov1" or key == "m3c2_cov2"
199
- else np.empty(query_coords.shape[0], dtype=val.dtype)
197
+ key: (
198
+ np.empty((query_coords.shape[0], 3, 3), dtype=val.dtype)
199
+ if key == "m3c2_cov1" or key == "m3c2_cov2"
200
+ else np.empty(query_coords.shape[0], dtype=val.dtype)
201
+ )
200
202
  for key, val in return_dict[0].items()
201
203
  }
202
204
  for key in out_attrs:
py4dgeo/pbm3c2.py CHANGED
@@ -1286,9 +1286,9 @@ class Segmentation(BaseTransformer):
1286
1286
  >= self.roughness_threshold
1287
1287
  ):
1288
1288
  mask_seg_id = X[:, self.columns.SEGMENT_ID_COLUMN] == seg_id
1289
- X[
1290
- mask_seg_id, self.columns.SEGMENT_ID_COLUMN
1291
- ] = DEFAULT_NO_SEGMENT
1289
+ X[mask_seg_id, self.columns.SEGMENT_ID_COLUMN] = (
1290
+ DEFAULT_NO_SEGMENT
1291
+ )
1292
1292
  # since we don't have a new segment
1293
1293
  seg_id -= 1
1294
1294
  else:
@@ -2027,6 +2027,11 @@ class PBM3C2:
2027
2027
  extract_segments=ExtractSegments(),
2028
2028
  classifier=ClassifierWrapper(),
2029
2029
  ):
2030
+
2031
+ logger.warning(
2032
+ f"This method is in experimental stage and undergoing active development."
2033
+ )
2034
+
2030
2035
  """
2031
2036
  :param per_point_computation:
2032
2037
  lowest local surface variation and PCA computation. (computes the normal vector as well)
@@ -65,8 +65,8 @@ PYBIND11_MODULE(_py4dgeo, m)
65
65
  epoch.def(py::init<EigenPointCloudRef>(), py::keep_alive<1, 2>());
66
66
 
67
67
  // We can directly access the point cloud and the kdtree
68
- epoch.def_readwrite("cloud", &Epoch::cloud);
69
- epoch.def_readwrite("kdtree", &Epoch::kdtree);
68
+ epoch.def_readwrite("_cloud", &Epoch::cloud);
69
+ epoch.def_readwrite("_kdtree", &Epoch::kdtree);
70
70
 
71
71
  // Pickling support for the Epoch class
72
72
  epoch.def(py::pickle(
@@ -127,12 +127,22 @@ PYBIND11_MODULE(_py4dgeo, m)
127
127
 
128
128
  kdtree.def(
129
129
  "nearest_neighbors",
130
- [](const KDTree& self, EigenPointCloudConstRef cloud) {
130
+ [](const KDTree& self, EigenPointCloudConstRef cloud, int k) {
131
131
  KDTree::NearestNeighborsDistanceResult result;
132
- self.nearest_neighbors_with_distances(cloud, result);
132
+ self.nearest_neighbors_with_distances(cloud, result, k);
133
133
 
134
- return std::make_tuple(as_pyarray(std::move(result.first)),
135
- as_pyarray(std::move(result.second)));
134
+ py::array_t<long int> indices_array(result.size());
135
+ py::array_t<double> distances_array(result.size());
136
+
137
+ auto indices_array_ptr = indices_array.mutable_data();
138
+ auto distances_array_ptr = distances_array.mutable_data();
139
+
140
+ for (size_t i = 0; i < result.size(); ++i) {
141
+ *indices_array_ptr++ = result[i].first[result[i].first.size() - 1];
142
+ *distances_array_ptr++ = result[i].second[result[i].second.size() - 1];
143
+ }
144
+
145
+ return std::make_pair(indices_array, distances_array);
136
146
  },
137
147
  "Find nearest neighbors for all points in a cloud!");
138
148
 
@@ -148,6 +158,63 @@ PYBIND11_MODULE(_py4dgeo, m)
148
158
  };
149
159
  });
150
160
 
161
+ // Segment point cloud into a supervoxels
162
+ m.def("segment_pc_in_supervoxels",
163
+ [](Epoch& epoch,
164
+ const KDTree& kdtree,
165
+ EigenNormalSetConstRef normals,
166
+ double resolution,
167
+ int k,
168
+ int minSVPvalue) {
169
+ std::vector<Supervoxel> supervoxels =
170
+ segment_pc(epoch, kdtree, normals, resolution, k, minSVPvalue);
171
+
172
+ py::list np_arrays_cloud;
173
+ py::list np_arrays_centroid;
174
+ py::list np_arrays_boundary_points;
175
+ py::list np_arrays_normals;
176
+
177
+ for (const auto& sv : supervoxels) {
178
+ // Convert Eigen::MatrixXd to a NumPy array
179
+ auto np_array_cloud = py::array_t<double>(
180
+ sv.cloud.rows() * sv.cloud.cols(), sv.cloud.data());
181
+ auto np_array_normals = py::array_t<double>(
182
+ sv.normals.rows() * sv.normals.cols(), sv.normals.data());
183
+ auto np_array_centroid =
184
+ py::array_t<double>(sv.centroid.size(), sv.centroid.data());
185
+ auto np_array_boundary_points = py::array_t<double>(
186
+ sv.boundary_points.rows() * sv.boundary_points.cols(),
187
+ sv.boundary_points.data());
188
+
189
+ // Reshape the arrays to their original shape
190
+ np_array_cloud.resize({ sv.cloud.rows(), sv.cloud.cols() });
191
+ np_array_normals.resize({ sv.normals.rows(), sv.normals.cols() });
192
+ np_array_centroid.resize({ sv.centroid.size() });
193
+ np_array_boundary_points.resize(
194
+ { sv.boundary_points.rows(), sv.boundary_points.cols() });
195
+
196
+ np_arrays_cloud.append(np_array_cloud);
197
+ np_arrays_normals.append(np_array_normals);
198
+ np_arrays_centroid.append(np_array_centroid);
199
+ np_arrays_boundary_points.append(np_array_boundary_points);
200
+ }
201
+
202
+ return std::make_tuple(np_arrays_cloud,
203
+ np_arrays_normals,
204
+ np_arrays_centroid,
205
+ np_arrays_boundary_points);
206
+ });
207
+
208
+ // Perform a transformation of a point cloud using Gauss-Newton method
209
+ m.def("fit_transform_GN",
210
+ [](EigenPointCloudConstRef cloud1,
211
+ EigenPointCloudConstRef cloud2,
212
+ EigenNormalSetConstRef normals) {
213
+ Eigen::Matrix4d transformation =
214
+ fit_transform_GN(cloud1, cloud2, normals);
215
+ return transformation;
216
+ });
217
+
151
218
  // The main distance computation function that is the main entry point of M3C2
152
219
  m.def(
153
220
  "compute_distances",
@@ -189,9 +256,27 @@ PYBIND11_MODULE(_py4dgeo, m)
189
256
  "The main M3C2 distance calculation algorithm");
190
257
 
191
258
  // Multiscale direction computation
192
- m.def("compute_multiscale_directions",
193
- &compute_multiscale_directions,
194
- "Compute M3C2 multiscale directions");
259
+ m.def(
260
+ "compute_multiscale_directions",
261
+ [](const Epoch& epoch,
262
+ EigenPointCloudConstRef corepoints,
263
+ const std::vector<double>& normal_radii,
264
+ EigenNormalSetConstRef orientation) {
265
+ EigenNormalSet result(corepoints.rows(), 3);
266
+ std::vector<double> used_radii;
267
+
268
+ compute_multiscale_directions(
269
+ epoch, corepoints, normal_radii, orientation, result, used_radii);
270
+
271
+ return std::make_tuple(std::move(result),
272
+ as_pyarray(std::move(used_radii)));
273
+ },
274
+ "Compute M3C2 multiscale directions");
275
+
276
+ // Corresponence distances computation
277
+ m.def("compute_correspondence_distances",
278
+ &compute_correspondence_distances,
279
+ "Compute correspondence distances");
195
280
 
196
281
  // Callback parameter structs
197
282
  py::class_<WorkingSetFinderParameters> ws_params(
@@ -362,7 +447,8 @@ PYBIND11_MODULE(_py4dgeo, m)
362
447
  m.def("transform_pointcloud_inplace",
363
448
  [](EigenPointCloudRef cloud,
364
449
  const py::array_t<double>& t,
365
- EigenPointCloudConstRef rp) {
450
+ EigenPointCloudConstRef rp,
451
+ EigenNormalSetRef normals) {
366
452
  Transformation trafo;
367
453
 
368
454
  auto r = t.unchecked<2>();
@@ -370,7 +456,7 @@ PYBIND11_MODULE(_py4dgeo, m)
370
456
  for (IndexType j = 0; j < 4; ++j)
371
457
  trafo(i, j) = r(i, j);
372
458
 
373
- transform_pointcloud_inplace(cloud, trafo, rp);
459
+ transform_pointcloud_inplace(cloud, trafo, rp, normals);
374
460
  });
375
461
 
376
462
  // The main algorithms for the spatiotemporal segmentations
py4dgeo/registration.py CHANGED
@@ -1,3 +1,6 @@
1
+ from py4dgeo.util import Py4DGeoError
2
+
3
+ from copy import deepcopy
1
4
  import dataclasses
2
5
  import numpy as np
3
6
 
@@ -80,20 +83,26 @@ def iterative_closest_point(
80
83
 
81
84
  # Make a copy of the cloud to be transformed.
82
85
  cloud = epoch.cloud.copy()
83
-
84
86
  prev_error = 0
85
87
 
86
88
  for _ in range(max_iterations):
87
- indices, distances = reference_epoch.kdtree.nearest_neighbors(cloud)
88
- # Calculate a transform and apply it
89
+ neighbor_arrays = np.asarray(reference_epoch.kdtree.nearest_neighbors(cloud, 1))
90
+ indices, distances = np.split(neighbor_arrays, 2, axis=0)
89
91
 
92
+ indices = np.squeeze(indices.astype(int))
93
+ distances = np.squeeze(distances)
94
+
95
+ # Calculate a transform and apply it
90
96
  T = _fit_transform(
91
97
  cloud, reference_epoch.cloud[indices, :], reduction_point=reduction_point
92
98
  )
93
- _py4dgeo.transform_pointcloud_inplace(cloud, T, reduction_point)
99
+ _py4dgeo.transform_pointcloud_inplace(
100
+ cloud, T, reduction_point, np.empty((1, 3))
101
+ )
94
102
 
95
103
  # Determine convergence
96
104
  mean_error = np.mean(np.sqrt(distances))
105
+
97
106
  if np.abs(prev_error - mean_error) < tolerance:
98
107
  break
99
108
  prev_error = mean_error
@@ -102,3 +111,364 @@ def iterative_closest_point(
102
111
  affine_transformation=_fit_transform(epoch.cloud, cloud),
103
112
  reduction_point=reduction_point,
104
113
  )
114
+
115
+
116
+ def point_to_plane_icp(
117
+ reference_epoch, epoch, max_iterations=50, tolerance=0.00001, reduction_point=None
118
+ ):
119
+ """Perform a point to plane Iterative Closest Point algorithm (ICP), based on Gauss-Newton method for computing the least squares solution
120
+
121
+ :param reference_epoch:
122
+ The reference epoch to match with. This epoch has to have calculated normals.
123
+ :type reference_epoch: py4dgeo.Epoch
124
+ :param epoch:
125
+ The epoch to be transformed to the reference epoch
126
+ :type epoch: py4dgeo.Epoch
127
+ :param max_iterations:
128
+ The maximum number of iterations to be performed in the ICP algorithm
129
+ :type max_iterations: int
130
+ :param tolerance:
131
+ The tolerance criterium used to terminate ICP iteration.
132
+ :type tolerance: float
133
+ :param reduction_point:
134
+ A translation vector to apply before applying rotation and scaling.
135
+ This is used to increase the numerical accuracy of transformation.
136
+ :type reduction_point: np.ndarray
137
+ """
138
+
139
+ from py4dgeo.epoch import Epoch
140
+
141
+ # Ensure that Epoch has calculated normals
142
+ if reference_epoch.normals is None:
143
+ raise Py4DGeoError(
144
+ "Normals for this Reference Epoch have not been calculated! Please use Epoch.calculate_normals or load externally calculated normals."
145
+ )
146
+
147
+ # Ensure that reference_epoch has its KDTree built
148
+ if reference_epoch.kdtree.leaf_parameter() == 0:
149
+ reference_epoch.build_kdtree()
150
+
151
+ # Apply the default for the registration point
152
+ if reduction_point is None:
153
+ reduction_point = np.array([0, 0, 0])
154
+
155
+ # Make a copy of the cloud to be transformed.
156
+ trans_epoch = epoch.copy()
157
+
158
+ prev_error = 0
159
+ for _ in range(max_iterations):
160
+ neighbor_arrays = np.asarray(
161
+ reference_epoch.kdtree.nearest_neighbors(trans_epoch.cloud, 1)
162
+ )
163
+ indices, distances = np.split(neighbor_arrays, 2, axis=0)
164
+
165
+ indices = np.squeeze(indices.astype(int))
166
+ distances = np.squeeze(distances)
167
+
168
+ # Calculate a transform and apply it
169
+ T = _py4dgeo.fit_transform_GN(
170
+ trans_epoch.cloud,
171
+ reference_epoch.cloud[indices, :],
172
+ reference_epoch.normals[indices, :],
173
+ )
174
+ trans_epoch.transform(
175
+ Transformation(affine_transformation=T, reduction_point=reduction_point)
176
+ )
177
+
178
+ # Determine convergence
179
+ mean_error = np.mean(np.sqrt(distances))
180
+ if np.abs(prev_error - mean_error) < tolerance:
181
+ break
182
+ prev_error = mean_error
183
+
184
+ return Transformation(
185
+ affine_transformation=_py4dgeo.fit_transform_GN(
186
+ epoch.cloud,
187
+ trans_epoch.cloud,
188
+ trans_epoch.normals,
189
+ ),
190
+ reduction_point=reduction_point,
191
+ )
192
+
193
+
194
+ def calculate_bounding_box(point_cloud):
195
+ """
196
+ Calculate the bounding box of a point cloud.
197
+
198
+ Parameters:
199
+ - point_cloud: NumPy array with shape (N, 3), where N is the number of points.
200
+
201
+ Returns:
202
+ - min_bound: 1D array representing the minimum coordinates of the bounding box.
203
+ - max_bound: 1D array representing the maximum coordinates of the bounding box.
204
+ """
205
+ min_bound = np.min(point_cloud, axis=0)
206
+ max_bound = np.max(point_cloud, axis=0)
207
+
208
+ return min_bound, max_bound
209
+
210
+
211
+ def calculate_bounding_box_change(
212
+ bounding_box_min, bounding_box_max, transformation_matrix
213
+ ):
214
+ """Calculate the change in kdtree bounding box corners after applying a transformation matrix.
215
+ Parameters:
216
+ - bounding_box_min: 1D array representing the minimum coordinates of the bounding box.
217
+ - bounding_box_max: 1D array representing the maximum coordinates of the bounding box.
218
+ - transformation_matrix: 2D array representing the transformation matrix.
219
+ Returns:
220
+ - max_change: The maximum change in the bounding box corners.
221
+ """
222
+
223
+ # Convert bounding box to homogeneous coordinates
224
+ bounding_box_min_homogeneous = np.concatenate((bounding_box_min, [1]))
225
+ bounding_box_max_homogeneous = np.concatenate((bounding_box_max, [1]))
226
+ bounding_box_min_homogeneous = np.reshape(bounding_box_min_homogeneous, (4, 1))
227
+ bounding_box_max_homogeneous = np.reshape(bounding_box_max_homogeneous, (4, 1))
228
+
229
+ # Calculate the change in bounding box corners
230
+ bb_c2p1 = np.dot(transformation_matrix, bounding_box_min_homogeneous)
231
+ bb_c2p2 = np.dot(transformation_matrix, bounding_box_max_homogeneous)
232
+
233
+ dif_bb_pmin = np.sum(np.abs(bb_c2p1[:3] - bounding_box_min_homogeneous[:3]))
234
+ dif_bb_pmax = np.sum(np.abs(bb_c2p2[:3] - bounding_box_max_homogeneous[:3]))
235
+
236
+ return max(dif_bb_pmin, dif_bb_pmax)
237
+
238
+
239
+ def calculate_dis_threshold(epoch1, epoch2):
240
+ """Calculate the distance threshold for the next iteration of the registration method
241
+ Parameters:
242
+ - epoch1: The reference epoch.
243
+ - epoch2: Stable points of epoch.
244
+ Returns:
245
+ - dis_threshold: The distance threshold.
246
+ """
247
+ neighbor_arrays = np.asarray(epoch1.kdtree.nearest_neighbors(epoch2.cloud, 1))
248
+ indices, distances = np.split(neighbor_arrays, 2, axis=0)
249
+ distances = np.squeeze(distances)
250
+
251
+ if indices.size > 0:
252
+ # Calculate mean distance
253
+ mean_dis = np.mean(np.sqrt(distances))
254
+
255
+ # Calculate standard deviation
256
+ std_dis = np.sqrt(np.mean((mean_dis - distances) ** 2))
257
+
258
+ dis_threshold = mean_dis + 1.0 * std_dis
259
+
260
+ return dis_threshold
261
+
262
+
263
+ def icp_with_stable_areas(
264
+ reference_epoch,
265
+ epoch,
266
+ initial_distance_threshold,
267
+ level_of_detection,
268
+ reference_supervoxel_resolution,
269
+ supervoxel_resolution,
270
+ min_svp_num=10,
271
+ reduction_point=None,
272
+ ):
273
+ """Perform a registration method
274
+
275
+ :param reference_epoch:
276
+ The reference epoch to match with. This epoch has to have calculated normals.
277
+ :type reference_epoch: py4dgeo.Epoch
278
+ :param epoch:
279
+ The epoch to be transformed to the reference epoch
280
+ :type epoch: py4dgeo.Epoch
281
+ :param initial_distance_threshold:
282
+ The upper boundary of the distance threshold in the iteration. It can be (1) an empirical value manually set by the user according to the approximate accuracy of coarse registration,
283
+ or (2) calculated by the mean and standard of the nearest neighbor distances of all points.
284
+ :type initial_distance_threshold: float
285
+ :param level_of_detection:
286
+ The lower boundary (minimum) of the distance threshold in the iteration.
287
+ It can be (1) an empirical value manually set by the user according to the approximate uncertainty of laser scanning measurements in different scanning configurations and scenarios
288
+ (e.g., 1 cm for TLS point clouds in short distance and 4 cm in long distance, 8 cm for ALS point clouds, etc.),
289
+ or (2) calculated by estimating the standard deviation from local modeling (e.g., using the level of detection in M3C2 or M3C2-EP calculations).
290
+ :type level_of_detection: float
291
+ :param reference_supervoxel_resolution:
292
+ The approximate size of generated supervoxels for the reference epoch.
293
+ It can be (1) an empirical value manually set by the user according to different surface geometries and scanning distance (e.g., 2-10 cm for indoor scenes, 1-3 m for landslide surface),
294
+ or (2) calculated by 10-20 times the average point spacing (original resolution of point clouds). In both cases, the number of points in each supervoxel should be at least 10 (i.e., minSVPnum = 10).
295
+ :type reference_supervoxel_resolution: float
296
+ :param supervoxel_resolution:
297
+ The same as `reference_supervoxel_resolution`, but for a different epoch.
298
+ :type supervoxel_resolution: float
299
+ :param min_svp_num:
300
+ Minimum number of points for supervoxels to be taken into account in further calculations.
301
+ :type min_svp_num: int
302
+ :param reduction_point:
303
+ A translation vector to apply before applying rotation and scaling.
304
+ This is used to increase the numerical accuracy of transformation.
305
+ :type reduction_point: np.ndarray
306
+
307
+ """
308
+
309
+ from py4dgeo.epoch import as_epoch
310
+
311
+ # Ensure that reference_epoch has its KDTree build
312
+ if reference_epoch.kdtree.leaf_parameter() == 0:
313
+ reference_epoch.build_kdtree()
314
+
315
+ # Ensure that epoch has its KDTree build
316
+ if epoch.kdtree.leaf_parameter() == 0:
317
+ epoch.build_kdtree()
318
+
319
+ # Ensure that Epoch has calculated normals
320
+ # Ensure that Epoch has calculated normals
321
+ if reference_epoch.normals is None:
322
+ raise Py4DGeoError(
323
+ "Normals for this Reference Epoch have not been calculated! Please use Epoch.calculate_normals or load externally calculated normals."
324
+ )
325
+
326
+ # Ensure that Epoch has calculated normals
327
+
328
+ # Ensure that Epoch has calculated normals
329
+ if epoch.normals is None:
330
+ raise Py4DGeoError(
331
+ "Normals for this Epoch have not been calculated! Please use Epoch.calculate_normals or load externally calculated normals."
332
+ )
333
+
334
+ # Apply the default for the registration point
335
+ if reduction_point is None:
336
+ reduction_point = np.array([0, 0, 0])
337
+
338
+ if initial_distance_threshold <= level_of_detection:
339
+ initial_distance_threshold = level_of_detection
340
+
341
+ transMatFinal = np.identity(4) # Identity matrix for initial transMatFinal
342
+ stage3 = stage4 = 0
343
+ epoch_copy = epoch.copy() # Create copy of epoch for applying transformation
344
+
345
+ k = 50 # Number of nearest neighbors to consider in supervoxel segmentation
346
+
347
+ clouds_pc1, _, centroids_pc1, _ = _py4dgeo.segment_pc_in_supervoxels(
348
+ reference_epoch,
349
+ reference_epoch.kdtree,
350
+ reference_epoch.normals,
351
+ reference_supervoxel_resolution,
352
+ k,
353
+ min_svp_num,
354
+ )
355
+ (
356
+ clouds_pc2,
357
+ normals2,
358
+ centroids_pc2,
359
+ boundary_points_pc2,
360
+ ) = _py4dgeo.segment_pc_in_supervoxels(
361
+ epoch, epoch.kdtree, epoch.normals, supervoxel_resolution, k, min_svp_num
362
+ )
363
+
364
+ centroids_pc1 = as_epoch(np.array(centroids_pc1))
365
+ centroids_pc1.build_kdtree()
366
+ centroids_pc2 = np.array(centroids_pc2)
367
+ boundary_points_pc2 = np.concatenate(boundary_points_pc2, axis=0)
368
+
369
+ _, reference_distances = np.split(
370
+ np.asarray(reference_epoch.kdtree.nearest_neighbors(reference_epoch.cloud, 2)),
371
+ 2,
372
+ axis=0,
373
+ )
374
+ basicRes = np.mean(np.squeeze(reference_distances))
375
+ dis_threshold = initial_distance_threshold
376
+
377
+ while stage4 == 0:
378
+ cor_dist_ct = _py4dgeo.compute_correspondence_distances(
379
+ centroids_pc1, centroids_pc2, clouds_pc1, len(reference_epoch.cloud)
380
+ )
381
+ # Calculation BP2-CT1
382
+ cor_dist_bp = _py4dgeo.compute_correspondence_distances(
383
+ centroids_pc1,
384
+ boundary_points_pc2,
385
+ clouds_pc1,
386
+ len(reference_epoch.cloud),
387
+ )
388
+ # calculation BP2- CP1
389
+ cor_dist_pc = _py4dgeo.compute_correspondence_distances(
390
+ reference_epoch,
391
+ boundary_points_pc2,
392
+ clouds_pc1,
393
+ len(reference_epoch.cloud),
394
+ )
395
+
396
+ stablePC2 = [] # Stable supervoxels
397
+ normPC2 = [] # Stable supervoxel's normals
398
+
399
+ dt_point = dis_threshold + 2 * basicRes
400
+
401
+ for i in range(len(centroids_pc2)):
402
+ if cor_dist_ct[i] < dis_threshold and all(
403
+ cor_dist_bp[j + 6 * i] < dis_threshold
404
+ and cor_dist_pc[j + 6 * i] < dt_point
405
+ for j in range(6)
406
+ ):
407
+ stablePC2.append(clouds_pc2[i])
408
+ normPC2.append(normals2[i])
409
+
410
+ # Handle empty stablePC2
411
+ if len(stablePC2) == 0:
412
+ raise Py4DGeoError(
413
+ "No stable supervoxels found! Please adjust the parameters."
414
+ )
415
+
416
+ stablePC2 = np.vstack(stablePC2)
417
+ stablePC2 = as_epoch(stablePC2)
418
+ normPC2 = np.vstack(normPC2)
419
+ stablePC2.normals_attachment(normPC2)
420
+ trans_mat_cur_obj = point_to_plane_icp(
421
+ reference_epoch,
422
+ stablePC2,
423
+ max_iterations=50,
424
+ tolerance=0.00001,
425
+ reduction_point=reduction_point,
426
+ )
427
+
428
+ trans_mat_cur = trans_mat_cur_obj.affine_transformation
429
+
430
+ # BB
431
+ initial_min_bound, initial_max_bound = calculate_bounding_box(epoch_copy.cloud)
432
+ max_bb_change = calculate_bounding_box_change(
433
+ initial_min_bound, initial_max_bound, trans_mat_cur
434
+ )
435
+ # update DT
436
+ if stage3 == 0 and max_bb_change < 2 * level_of_detection:
437
+ stage3 = 1
438
+ elif dis_threshold == level_of_detection:
439
+ stage4 = 1
440
+
441
+ if stage3 == 0:
442
+ dis_threshold = calculate_dis_threshold(reference_epoch, stablePC2)
443
+ if dis_threshold <= level_of_detection:
444
+ dis_threshold = level_of_detection
445
+
446
+ if stage3 == 1 and stage4 == 0:
447
+ dis_threshold = 0.8 * dis_threshold
448
+ if dis_threshold <= level_of_detection:
449
+ dis_threshold = level_of_detection
450
+
451
+ # update values and apply changes
452
+ # Apply the transformation to the epoch
453
+ epoch_copy.transform(
454
+ Transformation(
455
+ affine_transformation=trans_mat_cur, reduction_point=reduction_point
456
+ )
457
+ )
458
+ _py4dgeo.transform_pointcloud_inplace(
459
+ centroids_pc2, trans_mat_cur, reduction_point, np.empty((1, 3))
460
+ )
461
+ _py4dgeo.transform_pointcloud_inplace(
462
+ boundary_points_pc2, trans_mat_cur, reduction_point, np.empty((1, 3))
463
+ )
464
+ for i in range(len(clouds_pc2)):
465
+ _py4dgeo.transform_pointcloud_inplace(
466
+ clouds_pc2[i], trans_mat_cur, reduction_point, np.empty((1, 3))
467
+ )
468
+
469
+ transMatFinal = trans_mat_cur @ transMatFinal
470
+
471
+ return Transformation(
472
+ affine_transformation=transMatFinal,
473
+ reduction_point=reduction_point,
474
+ )
py4dgeo/segmentation.py CHANGED
@@ -14,7 +14,6 @@ import pickle
14
14
  import seaborn
15
15
  import tempfile
16
16
  import zipfile
17
-
18
17
  import _py4dgeo
19
18
 
20
19
 
@@ -557,7 +556,7 @@ class SpatiotemporalAnalysis:
557
556
 
558
557
  zf.write(objectsfile, arcname="objects.pickle")
559
558
 
560
- def invalidate_results(self, seeds=True, objects=True, smoothed_distances=True):
559
+ def invalidate_results(self, seeds=True, objects=True, smoothed_distances=False):
561
560
  """Invalidate (and remove) calculated results
562
561
 
563
562
  This is automatically called when new epochs are added or when
@@ -697,10 +696,24 @@ class RegionGrowingAlgorithmBase:
697
696
  analysis.invalidate_results()
698
697
 
699
698
  # Return pre-calculated objects if they are available
700
- precalculated = analysis.objects
699
+ # precalculated = analysis.objects
700
+ # if precalculated is not None:
701
+ # logger.info("Reusing objects by change stored in analysis object")
702
+ # return precalculated
703
+
704
+ # Check if there are pre-calculated objects.
705
+ # If so, create objects list from these and continue growing objects, taking into consideration objects that are already grown.
706
+ # if not initiate new empty objects list
707
+ precalculated = analysis.objects # TODO: do not assign to new object
701
708
  if precalculated is not None:
702
709
  logger.info("Reusing objects by change stored in analysis object")
703
- return precalculated
710
+ objects = (
711
+ precalculated.copy()
712
+ ) # test if .copy() solves memory problem, or deepcopy?
713
+ else:
714
+ objects = (
715
+ []
716
+ ) # TODO: test initializing this in the analysis class, see if it crashes instantly
704
717
 
705
718
  # Get corepoints from M3C2 class and build a KDTree on them
706
719
  corepoints = as_epoch(analysis.corepoints)
@@ -720,11 +733,34 @@ class RegionGrowingAlgorithmBase:
720
733
  analysis.seeds = seeds
721
734
  else:
722
735
  logger.info("Reusing seed candidates stored in analysis object")
723
-
724
- objects = []
736
+ # write the number of seeds to a separate text file if self.write_nr_seeds is True
737
+ if self.write_nr_seeds:
738
+ with open("number_of_seeds.txt", "w") as f:
739
+ f.write(str(len(seeds)))
725
740
 
726
741
  # Iterate over the seeds to maybe turn them into objects
727
- for i, seed in enumerate(seeds):
742
+ for i, seed in enumerate(
743
+ seeds
744
+ ): # [self.resume_from_seed-1:]): # starting seed ranked at the `resume_from_seed` variable (representing 1 for index 0)
745
+ # or to keep within the same index range when resuming from seed:
746
+ if i < (
747
+ self.resume_from_seed - 1
748
+ ): # resume from index 0 when `resume_from_seed` == 1
749
+ continue
750
+ if i >= (self.stop_at_seed - 1): # stop at index 0 when `stop_at_seed` == 1
751
+ break
752
+
753
+ # save objects to analysis object when at index `intermediate_saving`
754
+ if (
755
+ (self.intermediate_saving)
756
+ and ((i % self.intermediate_saving) == 0)
757
+ and (i != 0)
758
+ ):
759
+ with logger_context(
760
+ f"Intermediate saving of first {len(objects)} objects, grown from first {i+1}/{len(seeds)} seeds"
761
+ ):
762
+ analysis.objects = objects # This assigns itself to itself
763
+
728
764
  # Check all already calculated objects whether they overlap with this seed.
729
765
  found = False
730
766
  for obj in objects:
@@ -762,7 +798,9 @@ class RegionGrowingAlgorithmBase:
762
798
 
763
799
  # If the returned object has 0 indices, the min_segments threshold was violated
764
800
  if objdata.indices_distances:
765
- obj = ObjectByChange(objdata, seed, analysis)
801
+ obj = ObjectByChange(
802
+ objdata, seed, analysis
803
+ ) # TODO: check, does it copy the whole analysis object when initializing
766
804
  if self.filter_objects(obj):
767
805
  objects.append(obj)
768
806
 
@@ -775,7 +813,7 @@ class RegionGrowingAlgorithmBase:
775
813
  # Store the results in the analysis object
776
814
  analysis.objects = objects
777
815
 
778
- # Potentially remove objects from memory # TODO Why do we remove these?
816
+ # Potentially remove objects from memory
779
817
  del analysis.smoothed_distances
780
818
  del analysis.distances
781
819
 
@@ -793,6 +831,11 @@ class RegionGrowingAlgorithm(RegionGrowingAlgorithmBase):
793
831
  window_penalty=1.0,
794
832
  minperiod=24,
795
833
  height_threshold=0.0,
834
+ use_unfinished=True,
835
+ intermediate_saving=0,
836
+ resume_from_seed=0,
837
+ stop_at_seed=np.inf,
838
+ write_nr_seeds=False,
796
839
  **kwargs,
797
840
  ):
798
841
  """Construct the 4D-OBC algorithm.
@@ -836,7 +879,27 @@ class RegionGrowingAlgorithm(RegionGrowingAlgorithmBase):
836
879
  as unsigned difference between magnitude (i.e. distance) at start epoch and peak magnitude.
837
880
  The default is 0.0, in which case all detected changes are used as seed candidates.
838
881
  :type height_threshold: float
839
-
882
+ :param use_unfinished:
883
+ If False, seed candidates that are not finished by the end of the time series are not considered in further
884
+ analysis. The default is True, in which case unfinished seed_candidates are regarded as seeds region growing.
885
+ :type use_unfinished: bool
886
+ :param intermediate_saving:
887
+ Parameter that determines after how many considered seeds, the resulting list of 4D-OBCs is saved to the SpatiotemporalAnalysis object.
888
+ This is to ensure that if the algorithm is terminated unexpectedly not all results are lost. If set to 0 no intermediate saving is done.
889
+ :type intermediate_saving: int
890
+ :param resume_from_seed:
891
+ Parameter specifying from which seed index the region growing algorithm must resume. If zero all seeds are considered, starting from the highest ranked seed.
892
+ Default is 0.
893
+ :type resume_from_seed: int
894
+ :param stop_at_seed:
895
+ Parameter specifying at which seed to stop region growing and terminate the run function.
896
+ Default is np.inf, meaning all seeds are considered.
897
+ :type stop_at_seed: int
898
+ :param write_nr_seeds:
899
+ If True, after seed detection, a text file is written in the working directory containing the total number of detected seeds.
900
+ This can be used to split up the consecutive 4D-OBC segmentation into different subsets.
901
+ Default is False, meaning no txt file is written.
902
+ :type write_nr_seeds: bool
840
903
  """
841
904
 
842
905
  # Initialize base class
@@ -851,6 +914,11 @@ class RegionGrowingAlgorithm(RegionGrowingAlgorithmBase):
851
914
  self.window_penalty = window_penalty
852
915
  self.minperiod = minperiod
853
916
  self.height_threshold = height_threshold
917
+ self.use_unfinished = use_unfinished
918
+ self.intermediate_saving = intermediate_saving
919
+ self.resume_from_seed = resume_from_seed
920
+ self.stop_at_seed = stop_at_seed
921
+ self.write_nr_seeds = write_nr_seeds
854
922
 
855
923
  def find_seedpoints(self):
856
924
  """Calculate seedpoints for the region growing algorithm"""
@@ -941,8 +1009,14 @@ class RegionGrowingAlgorithm(RegionGrowingAlgorithmBase):
941
1009
 
942
1010
  # Check whether the volume started decreasing
943
1011
  if previous_volume > volume:
944
- # Only add seed if larger than the minimum period
945
- if target_idx - start_idx >= self.minperiod:
1012
+ # Only add seed if larger than the minimum period and height of the change form larger than threshold
1013
+ if (target_idx - start_idx >= self.minperiod) and (
1014
+ np.abs(
1015
+ np.max(used_timeseries[start_idx : target_idx + 1])
1016
+ - np.min(used_timeseries[start_idx : target_idx + 1])
1017
+ )
1018
+ >= self.height_threshold
1019
+ ):
946
1020
  corepoint_seeds.append(
947
1021
  RegionGrowingSeed(i, start_idx, target_idx)
948
1022
  )
@@ -953,7 +1027,7 @@ class RegionGrowingAlgorithm(RegionGrowingAlgorithmBase):
953
1027
  # This causes a seed to always be detected if the volume doesn't decrease before present
954
1028
  # Useful when used in an online setting, can be filtered before region growing
955
1029
  # Only if the last epoch is reached we use the segment as seed
956
- if target_idx == timeseries.shape[0] - 1:
1030
+ if (target_idx == timeseries.shape[0] - 1) and self.use_unfinished:
957
1031
  # We reached the present and add a seed based on it
958
1032
  corepoint_seeds.append(
959
1033
  RegionGrowingSeed(i, start_idx, timeseries.shape[0] - 1)
@@ -973,6 +1047,10 @@ class RegionGrowingAlgorithm(RegionGrowingAlgorithmBase):
973
1047
  neighbors = self.analysis.corepoints.kdtree.radius_search(
974
1048
  self.analysis.corepoints.cloud[seed.index, :], self.neighborhood_radius
975
1049
  )
1050
+ # if no neighbors are found make sure the algorithm continues its search but with a large dissimilarity
1051
+ if len(neighbors) < 2:
1052
+ return 9999999.0 # return very large number? or delete the seed point, but then also delete from the seeds list
1053
+
976
1054
  similarities = []
977
1055
  for n in neighbors:
978
1056
  data = _py4dgeo.TimeseriesDistanceFunctionData(
py4dgeo/util.py CHANGED
@@ -14,8 +14,8 @@ import _py4dgeo
14
14
 
15
15
 
16
16
  # The current data archive URL
17
- TEST_DATA_ARCHIVE = "https://github.com/3dgeo-heidelberg/py4dgeo-test-data/releases/download/2023-09-08/data.tar.gz"
18
- TEST_DATA_CHECKSUM = "88bd0663c6b4d7edad4561e5a7cca929ef70d6c2a67bfffd3a863c6105ba452c"
17
+ TEST_DATA_ARCHIVE = "https://github.com/3dgeo-heidelberg/py4dgeo-test-data/releases/download/2024-06-28/data.tar.gz"
18
+ TEST_DATA_CHECKSUM = "5ee51a43b008181b829113d8b967cdf519eae4ac37a3301f1eaf53d15d3016cc"
19
19
 
20
20
  # Read the version from package metadata
21
21
  __version__ = metadata.version(__package__)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: py4dgeo
3
- Version: 0.6.0
3
+ Version: 0.7.0
4
4
  Summary: Library for change detection in 4D point cloud data
5
5
  Maintainer-Email: Dominic Kempf <ssc@iwr.uni-heidelberg.de>
6
6
  License: Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
@@ -26,8 +26,6 @@ Requires-Dist: scikit-learn
26
26
  Requires-Dist: vedo
27
27
  Requires-Dist: xdg
28
28
  Description-Content-Type: text/markdown
29
- License-File: LICENSE.md
30
- License-File: COPYING.md
31
29
 
32
30
  # Welcome to py4dgeo
33
31
 
@@ -53,28 +51,27 @@ Below, you find a list of [provided methods](#methods-provided-by-py4dgeo).
53
51
 
54
52
  ## 🔨 Methods provided by py4dgeo
55
53
 
56
- * **M3C2 algorithm** ([Lague et al., 2013](#-literature)) for bitemporal point cloud distance computation. The concept and algorithm is explained [in this tutorial by James Dietrich](https://youtu.be/xJql7h8M2_o), including usage in the graphical software [CloudCompare](www.cloudcompare.org).
54
+ * **M3C2 algorithm** ([Lague et al., 2013](#-literature)) for bitemporal point cloud distance computation.
57
55
 
58
- * **M3C2-EP** (M3C2-EP; [Winiwarter et al., 2021](#-literature)) for statistical signal-noise separation in change analysis through error propagation. The concept and method are explained in full detail in the related paper.
56
+ * **M3C2-EP** (M3C2-EP; [Winiwarter et al., 2021](#-literature)) for statistical signal-noise separation in change analysis through error propagation.
59
57
 
60
- * **4D objects-by-change** (4D-OBC; [Anders et al., 2021](#-literature)) for time series-based extraction of surface activities *[under active development]*. The concept and method are explained in this scientific talk:
61
- <a href="https://youtu.be/JxX3veMbMAI" target="_blank"><img src="https://github.com/3dgeo-heidelberg/py4dgeo/blob/main/doc/img/thumb_youtube_anders_isprs2021.png?raw=true" alt="" width="400" /></a>
58
+ * **4D objects-by-change** (4D-OBC; [Anders et al., 2021](#-literature)) for time series-based extraction of surface activities.
62
59
 
63
- * **Correspondence-driven plane-based M3C2** ([Zahs et al., 2022](#-literature)) for lower uncertainty in 3D topographic change quantification. The concept and method are explained in this scientific talk:
64
- <a href="https://youtu.be/5pjkpajsRNU" target="_blank"><img src="https://github.com/3dgeo-heidelberg/py4dgeo/blob/main/doc/img/thumb_youtube_zahs_isprs2022.png?raw=true" alt="" width="400" /></a>
60
+ * **Correspondence-driven plane-based M3C2** ([Zahs et al., 2022](#-literature)) for lower uncertainty in 3D topographic change quantification *[under active development]*.
65
61
 
66
- * **Point cloud registration**: Py4dgeo supports to calculate and apply affine transformations to point clouds using a standard ICP implementations. More ICP methods are currently being implemented - stay tuned!
62
+ * **Point cloud registration using standard ICP** by calculating and applying affine transformations to point clouds using a standard ICP implementations.
67
63
 
64
+ * **Point cloud registration with automatic determination of stable areas** ([Yang et al., 2022](https://doi.org/10.1515/jag-2022-0031)) for multitemporal change detection.
68
65
 
69
66
  ## 🎮 Examples
70
67
 
71
68
  ### Demo notebooks using methods provided by py4dgeo
72
- | | |
73
- |------------------------------------------------------------------------------------------------------------------------------------------------------|---|
74
- | [![Example 1](img/m3c2_change_analysis_thumbnail.png)](https://github.com/3dgeo-heidelberg/py4dgeo/blob/main/demo/m3c2-change_analysis.ipynb) | [![Example 2](img/m3c2ep_change_analysis_thumbnail.png)](https://github.com/3dgeo-heidelberg/py4dgeo/blob/main/demo/m3c2ep-change_analysis.ipynb) |
75
- | [![Example 3](img/4dobc_extraction_thumbnail.png)](https://github.com/3dgeo-heidelberg/py4dgeo/blob/main/demo/4dobc-change_analysis.ipynb) | [![Example 4](img/kmeans_clustering_thumbnail.png)](https://nbviewer.org/github/3dgeo-heidelberg/py4dgeo/blob/main/demo/kmeans_clustering_of_time_series.ipynb) |
76
- | [![Example 5](img/pca_thumbnail.png)](https://nbviewer.org/github/3dgeo-heidelberg/py4dgeo/blob/main/demo/registration_standard_ICP.ipynb) | [![Example 6](img/standard_icp_thumbnail.png)](https://nbviewer.org/github/3dgeo-heidelberg/py4dgeo/blob/main/demo/registration_standard_ICP.ipynb) |
77
- | [![Example 7](img/cd-pb-m3c2_change_analysis_thumbnail.png)](https://nbviewer.org/github/3dgeo-heidelberg/py4dgeo/blob/main/jupyter/pbm3c2.ipynb)
69
+ | | |
70
+ |--------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------|
71
+ | [![Example 1](img/m3c2_change_analysis_thumbnail.png)](https://nbviewer.org/github/3dgeo-heidelberg/py4dgeo/blob/main/demo/m3c2-change_analysis.ipynb) | [![Example 2](img/m3c2ep_change_analysis_thumbnail.png)](https://nbviewer.org/github/3dgeo-heidelberg/py4dgeo/blob/main/demo/m3c2ep_change_analysis.ipynb) |
72
+ | [![Example 3](img/4dobc_extraction_thumbnail.png)](https://nbviewer.org/github/3dgeo-heidelberg/py4dgeo/blob/main/demo/4dobc-change_analysis.ipynb) | [![Example 4](img/kmeans_clustering_thumbnail.png)](https://nbviewer.org/github/3dgeo-heidelberg/py4dgeo/blob/main/demo/kmeans_clustering_of_time_series.ipynb) |
73
+ | [![Example 5](img/pca_thumbnail.png)](https://nbviewer.org/github/3dgeo-heidelberg/py4dgeo/blob/main/demo/principal_component_analysis_of_time_series.ipynb) | [![Example 6](img/pbm3c2_thumbnail.png)](https://nbviewer.org/github/3dgeo-heidelberg/py4dgeo/blob/main/jupyter/pbm3c2.ipynb) |
74
+ [![Example 7](img/standard_icp_thumbnail.png)](https://nbviewer.org/github/3dgeo-heidelberg/py4dgeo/blob/main/demo/registration_standard_ICP.ipynb) |
78
75
 
79
76
  ## 💻 Installation
80
77
 
@@ -82,7 +79,7 @@ Below, you find a list of [provided methods](#methods-provided-by-py4dgeo).
82
79
 
83
80
  Using py4dgeo requires the following software installed:
84
81
 
85
- * 64-bit Python `>= 3.8` (32-bit installations might cause trouble during installation of dependencies)
82
+ * 64-bit Python `>= 3.9` (32-bit installations might cause trouble during installation of dependencies)
86
83
 
87
84
  In order to build the package from source, the following tools are also needed.
88
85
 
@@ -0,0 +1,20 @@
1
+ _py4dgeo.cp39-win_amd64.pyd,sha256=zaN5NSVv7ZV0_Qp1FqcH6Jp5ysJS93HfsC-Yf4Muyd0,526848
2
+ py4dgeo/__init__.py,sha256=T_hK47cJOktmTxJrC03Ou-JGcdhs0IEKBcPXjUT4wJ8,767
3
+ py4dgeo/cloudcompare.py,sha256=hCe0YKZit8XVe98DsMKeUp-m-F2-avwicgS-IRjL4EQ,1111
4
+ py4dgeo/epoch.py,sha256=8Q9NTxb4OoiJx4IGESDXaDe4uwcgksofiMQLChsnndI,29914
5
+ py4dgeo/fallback.py,sha256=dZltN-wUSMq5GYnUgCz157fh_H5gNf2MF2SdcntDQmI,5233
6
+ py4dgeo/logger.py,sha256=5J7DenNkvq5GMOF_Vd2kHLHGP7os1u4EFBMbyGCAJJY,2314
7
+ py4dgeo/m3c2.py,sha256=I-T2rnsHgZtjOcN0ONmB0UB3TlaW5je4-S0lgxhNvjo,8475
8
+ py4dgeo/m3c2ep.py,sha256=vqudKR_QcV7z9bYMbGssVyJ2d4q5shgFhRue_WErJQ0,29150
9
+ py4dgeo/pbm3c2.py,sha256=Ia7yWym0M72NNz1L-N5WhDA_ZGA_YmWmee80Dvdjgqo,146200
10
+ py4dgeo/py4dgeo_python.cpp,sha256=J_csxFp1FF7nWHh8_z_BTJQRKbG4oPyRrrccJjoW4zw,18960
11
+ py4dgeo/registration.py,sha256=aUdEas772buy31PVv-hA5Y3v-jGnnGeJB8Bo56eV4zg,17640
12
+ py4dgeo/segmentation.py,sha256=pzSHuWyfHfiTDpmfy2onL3XTE_9dvLz7faVr0ddV0JI,53556
13
+ py4dgeo/UpdateableZipFile.py,sha256=aZVdQgAc_M-EWFDIEVukgrYQUtEb5fRoRMCVxZqpggc,2770
14
+ py4dgeo/util.py,sha256=dB27r6UAX1V4zy-bj-TbNxtjtwebvU7T1BCd3EIj59k,8553
15
+ py4dgeo-0.7.0.dist-info/METADATA,sha256=jhvHo83amSpEvxj_2hXuIsBJWb1ef_ni04l70Xn9I9Q,12290
16
+ py4dgeo-0.7.0.dist-info/WHEEL,sha256=s-IzttuPuPJp8zDv-jm-zGvTOCxdFCvjX9dIQ9zqnM8,104
17
+ py4dgeo-0.7.0.dist-info/entry_points.txt,sha256=S8EHFVRD4deFJ_N6ZWst9v_ukH5lGnZY-f8NHjYoIfk,83
18
+ py4dgeo-0.7.0.dist-info/licenses/COPYING.md,sha256=ZetvO_BrdyO2DkROtlUcvpd6rl1M8Ak69vyDyePCZN0,1330
19
+ py4dgeo-0.7.0.dist-info/licenses/LICENSE.md,sha256=Rza103klOvpFdEr8ed20dZErIT6Tm998uX2ai29wDl8,1028
20
+ py4dgeo-0.7.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: scikit-build-core 0.7.0
2
+ Generator: scikit-build-core 0.10.7
3
3
  Root-Is-Purelib: false
4
4
  Tag: cp39-cp39-win_amd64
5
5
 
@@ -1,20 +0,0 @@
1
- _py4dgeo.cp39-win_amd64.pyd,sha256=wnuHtpyKWkLDhKxEeSvSNj9hf0Js-v8-eLsQxguMQQE,437760
2
- py4dgeo/__init__.py,sha256=SUIe73kbzU1eLUJXUAxNvFMroy-K6PoMR-Ox_RyDC7U,703
3
- py4dgeo/cloudcompare.py,sha256=R4IDWBu13RilIjgXVClE7sFsGhs0TXiJj-5IPAr9AMk,1157
4
- py4dgeo/epoch.py,sha256=h9XqOwRgVFbebn12bXYTSnrkp3M25NNPV32Jq8Wm2Jg,27772
5
- py4dgeo/fallback.py,sha256=jD5OFPrFAbTMJpXTx-gRgfcwCBK0F6cLXcxraQ7WL1I,5230
6
- py4dgeo/logger.py,sha256=5J7DenNkvq5GMOF_Vd2kHLHGP7os1u4EFBMbyGCAJJY,2314
7
- py4dgeo/m3c2.py,sha256=zgueVFPDGhnoUuAvtEwmBXgHOzTsDhkjde1SZBnllww,7658
8
- py4dgeo/m3c2ep.py,sha256=_A7vH2tEQ-Xnx88clhywKkraSrJAYL_aVNw44oFgn20,29122
9
- py4dgeo/pbm3c2.py,sha256=nrdqYvxUnENgJj9wR1D9C4uqsyzCNJ_2GVrP-CjNgEM,146070
10
- py4dgeo/py4dgeo_python.cpp,sha256=5XRjetjQsVrz7Xj8DpbwyIH7P7XTf73PfoQHWEfSxac,15371
11
- py4dgeo/registration.py,sha256=SLYtg8ag6mtMpdmosiOpdvpZC5ykJnLrgU6-kVAixHU,3128
12
- py4dgeo/segmentation.py,sha256=MS3jETT34QVs0e87ncU5ZkitImsrZV080PJE0kTfKsk,48878
13
- py4dgeo/UpdateableZipFile.py,sha256=aZVdQgAc_M-EWFDIEVukgrYQUtEb5fRoRMCVxZqpggc,2770
14
- py4dgeo/util.py,sha256=BzyY5TxnQ4uDEtHJlgR6mzXpcy4CRdm_2Ywj0BquwCs,8553
15
- py4dgeo-0.6.0.dist-info/METADATA,sha256=17eGeRumqHKC35ug_w5seIkm2E4bAX4UnpZpOEQBbGQ,12586
16
- py4dgeo-0.6.0.dist-info/WHEEL,sha256=8M3tCeSbuIPqzNDALzP1Gqz5ajcvQ7Kh5mc0IA65Zn4,103
17
- py4dgeo-0.6.0.dist-info/entry_points.txt,sha256=S8EHFVRD4deFJ_N6ZWst9v_ukH5lGnZY-f8NHjYoIfk,83
18
- py4dgeo-0.6.0.dist-info/licenses/LICENSE.md,sha256=Rza103klOvpFdEr8ed20dZErIT6Tm998uX2ai29wDl8,1028
19
- py4dgeo-0.6.0.dist-info/licenses/COPYING.md,sha256=ZetvO_BrdyO2DkROtlUcvpd6rl1M8Ak69vyDyePCZN0,1330
20
- py4dgeo-0.6.0.dist-info/RECORD,,