py4dgeo 1.0.0__cp314-cp314t-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
py4dgeo/pbm3c2.py ADDED
@@ -0,0 +1,694 @@
1
+ import numpy as np
2
+ import pandas as pd
3
+ from sklearn.ensemble import RandomForestClassifier
4
+ from scipy.spatial import cKDTree
5
+ from py4dgeo.util import Py4DGeoError
6
+ from py4dgeo.epoch import Epoch
7
+ import matplotlib.pyplot as plt
8
+ from matplotlib.patches import FancyArrowPatch
9
+ from mpl_toolkits.mplot3d import proj3d
10
+
11
+ try:
12
+ from tqdm import tqdm
13
+ except ImportError:
14
+
15
+ def tqdm(iterable, *args, **kwargs):
16
+ return iterable
17
+
18
+
19
+ class PBM3C2:
20
+ """
21
+ Correspondence-driven plane-based M3C2 for lower uncertainty in 3D topographic change quantification.
22
+
23
+ This class implements the PBM3C2 algorithm as described in Zahs et al. (2022).
24
+ """
25
+
26
+ def __init__(self, registration_error=0.0):
27
+ self.clf = RandomForestClassifier(n_estimators=100, random_state=42)
28
+ self.registration_error = registration_error
29
+ self.epoch0_segment_metrics = None
30
+ self.epoch1_segment_metrics = None
31
+ self.epoch0_segments = None
32
+ self.epoch1_segments = None
33
+ self.correspondences = None
34
+ self.epoch0_id_mapping = None
35
+ self.epoch1_id_mapping = None
36
+ self.epoch0_reverse_mapping = None
37
+ self.epoch1_reverse_mapping = None
38
+
39
+ @staticmethod
40
+ def preprocess_epochs(epoch0, epoch1, correspondences_file):
41
+ """
42
+ Assign globally unique segment IDs using independent sequential numbering.
43
+ Map correspondence file IDs to the new ID scheme.
44
+
45
+ Parameters
46
+ ----------
47
+ epoch0, epoch1 : Epoch
48
+ Input epochs with segment_id in additional_dimensions
49
+ correspondences_file : str
50
+ Path to CSV file with correspondence data
51
+
52
+ Returns
53
+ -------
54
+ tuple
55
+ (processed_epoch0, processed_epoch1, remapped_correspondences,
56
+ epoch0_id_mapping, epoch1_id_mapping,
57
+ epoch0_reverse_mapping, epoch1_reverse_mapping)
58
+ """
59
+ print("Assigning globally unique segment IDs...")
60
+
61
+ orig_ids0 = np.unique(epoch0.additional_dimensions["segment_id"])
62
+ orig_ids1 = np.unique(epoch1.additional_dimensions["segment_id"])
63
+
64
+ print(
65
+ f" Epoch 0: {len(orig_ids0)} unique segments (range: {orig_ids0.min()}-{orig_ids0.max()})"
66
+ )
67
+ print(
68
+ f" Epoch 1: {len(orig_ids1)} unique segments (range: {orig_ids1.min()}-{orig_ids1.max()})"
69
+ )
70
+
71
+ # Load correspondence file
72
+ try:
73
+ correspondences_arr = np.genfromtxt(
74
+ correspondences_file, delimiter=",", dtype=np.float64
75
+ )
76
+ if correspondences_arr.ndim == 1:
77
+ correspondences_arr = correspondences_arr.reshape(1, -1)
78
+ except Exception as e:
79
+ raise Py4DGeoError(
80
+ f"Failed to read correspondence file '{correspondences_file}': {e}"
81
+ )
82
+
83
+ if correspondences_arr.shape[1] < 2:
84
+ raise Py4DGeoError(
85
+ f"The correspondence file must contain at least two columns (got {correspondences_arr.shape[1]})."
86
+ )
87
+
88
+ if not np.issubdtype(correspondences_arr.dtype, np.number):
89
+ raise Py4DGeoError(
90
+ "The correspondence file appears to contain non-numeric data."
91
+ )
92
+
93
+ corr_ids0 = correspondences_arr[:, 0]
94
+ corr_ids1 = correspondences_arr[:, 1]
95
+
96
+ # Validate correspondence IDs
97
+ invalid_ids0 = ~np.isin(corr_ids0, orig_ids0)
98
+ invalid_ids1 = ~np.isin(corr_ids1, orig_ids1)
99
+
100
+ if invalid_ids0.any():
101
+ invalid_list = corr_ids0[invalid_ids0]
102
+ print(
103
+ f" Warning: {invalid_ids0.sum()} epoch0 IDs in correspondences don't exist in epoch0 segments"
104
+ )
105
+ print(f" First 10 invalid IDs: {invalid_list[:10]}")
106
+
107
+ if invalid_ids1.any():
108
+ invalid_list = corr_ids1[invalid_ids1]
109
+ print(
110
+ f" Warning: {invalid_ids1.sum()} epoch1 IDs in correspondences don't exist in epoch1 segments"
111
+ )
112
+ print(f" First 10 invalid IDs: {invalid_list[:10]}")
113
+
114
+ # Filter invalid correspondences
115
+ valid_mask = ~invalid_ids0 & ~invalid_ids1
116
+ if not valid_mask.all():
117
+ print(f" Filtering out {(~valid_mask).sum()} invalid correspondence pairs")
118
+ correspondences_arr = correspondences_arr[valid_mask]
119
+
120
+ if len(correspondences_arr) == 0:
121
+ raise Py4DGeoError(
122
+ "No valid correspondences remain after filtering. "
123
+ "Please verify that segment IDs in the correspondence file match those in the input epochs."
124
+ )
125
+
126
+ # Create new independent ID mappings
127
+ new_ids0 = np.arange(1, len(orig_ids0) + 1, dtype=np.int64)
128
+ epoch0_id_mapping = dict(zip(orig_ids0, new_ids0))
129
+ epoch0_reverse_mapping = dict(zip(new_ids0, orig_ids0))
130
+
131
+ start_id_epoch1 = len(orig_ids0) + 1
132
+ new_ids1 = np.arange(
133
+ start_id_epoch1, start_id_epoch1 + len(orig_ids1), dtype=np.int64
134
+ )
135
+ epoch1_id_mapping = dict(zip(orig_ids1, new_ids1))
136
+ epoch1_reverse_mapping = dict(zip(new_ids1, orig_ids1))
137
+
138
+ print(f" Assigned new IDs for Epoch 0: 1 to {len(orig_ids0)}")
139
+ print(
140
+ f" Assigned new IDs for Epoch 1: {start_id_epoch1} to {start_id_epoch1 + len(orig_ids1) - 1}"
141
+ )
142
+
143
+ # Apply new IDs to epoch0
144
+ old_seg_ids0 = epoch0.additional_dimensions["segment_id"]
145
+ new_seg_ids0 = np.array([epoch0_id_mapping[sid] for sid in old_seg_ids0])
146
+ new_add_dims0 = epoch0.additional_dimensions.copy()
147
+ new_add_dims0["segment_id"] = new_seg_ids0
148
+ new_epoch0 = Epoch(
149
+ cloud=epoch0.cloud.copy(), additional_dimensions=new_add_dims0
150
+ )
151
+
152
+ # Apply new IDs to epoch1
153
+ old_seg_ids1 = epoch1.additional_dimensions["segment_id"]
154
+ new_seg_ids1 = np.array([epoch1_id_mapping[sid] for sid in old_seg_ids1])
155
+ new_add_dims1 = epoch1.additional_dimensions.copy()
156
+ new_add_dims1["segment_id"] = new_seg_ids1
157
+ new_epoch1 = Epoch(
158
+ cloud=epoch1.cloud.copy(), additional_dimensions=new_add_dims1
159
+ )
160
+
161
+ # Remap correspondence IDs
162
+ remapped_corr = correspondences_arr.copy()
163
+ remapped_corr[:, 0] = np.array(
164
+ [epoch0_id_mapping[int(cid)] for cid in correspondences_arr[:, 0]]
165
+ )
166
+ remapped_corr[:, 1] = np.array(
167
+ [epoch1_id_mapping[int(cid)] for cid in correspondences_arr[:, 1]]
168
+ )
169
+
170
+ print(f" Remapped {len(remapped_corr)} correspondence pairs to new ID scheme")
171
+ print("Preprocessing complete.\n")
172
+
173
+ return (
174
+ new_epoch0,
175
+ new_epoch1,
176
+ remapped_corr,
177
+ epoch0_id_mapping,
178
+ epoch1_id_mapping,
179
+ epoch0_reverse_mapping,
180
+ epoch1_reverse_mapping,
181
+ )
182
+
183
+ def _get_segments(self, epoch):
184
+ """Extract individual segments from an epoch."""
185
+ segment_id_array = epoch.additional_dimensions["segment_id"]
186
+ unique_segment_ids = np.unique(segment_id_array)
187
+
188
+ segments_dict = {}
189
+ for seg_id in unique_segment_ids:
190
+ indices = np.where(segment_id_array == seg_id)[0]
191
+ if len(indices) > 0:
192
+ segments_dict[seg_id] = {"points": epoch.cloud[indices].copy()}
193
+
194
+ return segments_dict
195
+
196
+ def _create_segment_metrics(self, segments):
197
+ """Calculate geometric metrics for each segment."""
198
+ metrics_list = []
199
+
200
+ for segment_id, data in tqdm(segments.items(), desc="Extracting Features"):
201
+ points = data["points"]
202
+ if points.shape[0] < 3:
203
+ continue
204
+
205
+ cog = np.mean(points, axis=0)
206
+ centered_points = points - cog
207
+ cov_matrix = np.cov(centered_points, rowvar=False)
208
+ eigenvalues, eigenvectors = np.linalg.eigh(cov_matrix)
209
+
210
+ sort_indices = np.argsort(eigenvalues)[::-1]
211
+ eigenvalues = eigenvalues[sort_indices]
212
+ eigenvectors = eigenvectors[:, sort_indices]
213
+
214
+ normal = eigenvectors[:, 2]
215
+ distances_to_plane = np.dot(centered_points, normal)
216
+ roughness = np.std(distances_to_plane)
217
+
218
+ e1, e2, e3 = eigenvalues
219
+ sum_eigenvalues = e1 + e2 + e3
220
+ if sum_eigenvalues == 0:
221
+ linearity = planarity = sphericity = 0
222
+ else:
223
+ linearity = (e1 - e2) / sum_eigenvalues
224
+ planarity = (e2 - e3) / sum_eigenvalues
225
+ sphericity = e3 / sum_eigenvalues
226
+
227
+ metrics_list.append(
228
+ {
229
+ "segment_id": segment_id,
230
+ "cog_x": cog[0],
231
+ "cog_y": cog[1],
232
+ "cog_z": cog[2],
233
+ "normal_x": normal[0],
234
+ "normal_y": normal[1],
235
+ "normal_z": normal[2],
236
+ "linearity": linearity,
237
+ "planarity": planarity,
238
+ "sphericity": sphericity,
239
+ "roughness": roughness,
240
+ "num_points": points.shape[0],
241
+ }
242
+ )
243
+
244
+ return pd.DataFrame(metrics_list).set_index("segment_id")
245
+
246
+ def _create_feature_array(self, df_t1, df_t2, correspondences):
247
+ """Create feature vectors for segment pairs."""
248
+ features = []
249
+
250
+ if isinstance(correspondences, np.ndarray):
251
+ pairs = correspondences
252
+ else:
253
+ pairs = correspondences.values
254
+
255
+ for row in pairs:
256
+ id1, id2 = int(row[0]), int(row[1])
257
+
258
+ if id1 not in df_t1.index or id2 not in df_t2.index:
259
+ continue
260
+
261
+ metrics1 = df_t1.loc[id1]
262
+ metrics2 = df_t2.loc[id2]
263
+
264
+ cog1 = metrics1[["cog_x", "cog_y", "cog_z"]].values
265
+ cog2 = metrics2[["cog_x", "cog_y", "cog_z"]].values
266
+ cog_dist = np.linalg.norm(cog1 - cog2)
267
+
268
+ normal1 = metrics1[["normal_x", "normal_y", "normal_z"]].values
269
+ normal2 = metrics2[["normal_x", "normal_y", "normal_z"]].values
270
+ dot_product = np.clip(np.dot(normal1, normal2), -1.0, 1.0)
271
+ normal_angle = np.arccos(dot_product)
272
+
273
+ roughness_diff = abs(metrics1["roughness"] - metrics2["roughness"])
274
+
275
+ features.append([cog_dist, normal_angle, roughness_diff])
276
+
277
+ return np.array(features)
278
+
279
+ def train(self, correspondences):
280
+ """Train Random Forest classifier on labeled correspondences."""
281
+ positives = correspondences[correspondences[:, 2] == 1]
282
+ negatives = correspondences[correspondences[:, 2] == 0]
283
+
284
+ X_pos = self._create_feature_array(
285
+ self.epoch0_segment_metrics, self.epoch1_segment_metrics, positives
286
+ )
287
+ X_neg = self._create_feature_array(
288
+ self.epoch0_segment_metrics, self.epoch1_segment_metrics, negatives
289
+ )
290
+
291
+ if X_pos.shape[0] == 0 or X_neg.shape[0] == 0:
292
+ raise ValueError("Training data is missing positive or negative examples.")
293
+
294
+ X = np.vstack([X_pos, X_neg])
295
+ y = np.array([1] * len(X_pos) + [0] * len(X_neg))
296
+
297
+ self.clf.fit(X, y)
298
+
299
+ def apply(self, apply_ids, search_radius=3.0):
300
+ """Apply trained classifier to find correspondences."""
301
+ epoch1_cogs = self.epoch1_segment_metrics[["cog_x", "cog_y", "cog_z"]].values
302
+ kdtree = cKDTree(epoch1_cogs)
303
+ found_correspondences = []
304
+
305
+ for apply_id in tqdm(apply_ids, desc="Applying Classifier"):
306
+ if apply_id not in self.epoch0_segment_metrics.index:
307
+ continue
308
+
309
+ cog0 = self.epoch0_segment_metrics.loc[apply_id][
310
+ ["cog_x", "cog_y", "cog_z"]
311
+ ].values
312
+ indices = kdtree.query_ball_point(cog0, r=search_radius)
313
+
314
+ if len(indices) == 0:
315
+ continue
316
+
317
+ candidate_ids = self.epoch1_segment_metrics.index[indices]
318
+ apply_df = pd.DataFrame(
319
+ {"id1": [apply_id] * len(candidate_ids), "id2": candidate_ids}
320
+ )
321
+
322
+ X_apply = self._create_feature_array(
323
+ self.epoch0_segment_metrics, self.epoch1_segment_metrics, apply_df
324
+ )
325
+
326
+ if len(X_apply) == 0:
327
+ continue
328
+
329
+ probabilities = self.clf.predict_proba(X_apply)[:, 1]
330
+ best_match_idx = np.argmax(probabilities)
331
+ found_correspondences.append([apply_id, candidate_ids[best_match_idx]])
332
+
333
+ self.correspondences = pd.DataFrame(
334
+ found_correspondences, columns=["epoch0_segment_id", "epoch1_segment_id"]
335
+ )
336
+
337
+ def _calculate_cog_distance(self, segment1_id, segment2_id):
338
+ """Calculate CoG distance and level of detection."""
339
+ metrics1 = self.epoch0_segment_metrics.loc[segment1_id]
340
+ metrics2 = self.epoch1_segment_metrics.loc[segment2_id]
341
+
342
+ cog1 = metrics1[["cog_x", "cog_y", "cog_z"]].values
343
+ cog2 = metrics2[["cog_x", "cog_y", "cog_z"]].values
344
+ normal1 = metrics1[["normal_x", "normal_y", "normal_z"]].values
345
+
346
+ dist = np.dot(cog2 - cog1, normal1)
347
+
348
+ sigma1_sq = metrics1["roughness"] ** 2
349
+ sigma2_sq = metrics2["roughness"] ** 2
350
+ n1 = metrics1["num_points"]
351
+ n2 = metrics2["num_points"]
352
+
353
+ if n1 == 0 or n2 == 0:
354
+ lod = np.nan
355
+ else:
356
+ lod = (
357
+ 1.96 * np.sqrt(sigma1_sq / n1 + sigma2_sq / n2)
358
+ + self.registration_error
359
+ )
360
+
361
+ return dist, lod
362
+
363
+ def run(self, epoch0, epoch1, correspondences_file, apply_ids, search_radius=3.0):
364
+ """
365
+ Execute complete PBM3C2 workflow.
366
+
367
+ Parameters
368
+ ----------
369
+ epoch0, epoch1 : Epoch
370
+ Input point cloud epochs with segment_id
371
+ correspondences_file : str
372
+ Path to CSV with training correspondences
373
+ apply_ids : array-like
374
+ Segment IDs to find correspondences for (using original IDs)
375
+ search_radius : float
376
+ Spatial search radius in meters
377
+
378
+ Returns
379
+ -------
380
+ DataFrame
381
+ Correspondences with distances and uncertainties (using original IDs)
382
+ """
383
+ print("=" * 60)
384
+ print("PBM3C2 Processing Pipeline")
385
+ print("=" * 60)
386
+
387
+ print("\n[1/6] Preprocessing epochs and correspondences...")
388
+ (
389
+ epoch0_processed,
390
+ epoch1_processed,
391
+ correspondences_for_training,
392
+ self.epoch0_id_mapping,
393
+ self.epoch1_id_mapping,
394
+ self.epoch0_reverse_mapping,
395
+ self.epoch1_reverse_mapping,
396
+ ) = self.preprocess_epochs(epoch0, epoch1, correspondences_file)
397
+
398
+ print("[2/6] Loading and processing segments...")
399
+ self.epoch0_segments = self._get_segments(epoch0_processed)
400
+ self.epoch1_segments = self._get_segments(epoch1_processed)
401
+ print(
402
+ f" Loaded {len(self.epoch0_segments)} segments from epoch 0, {len(self.epoch1_segments)} from epoch 1"
403
+ )
404
+
405
+ print("\n[3/6] Extracting geometric features...")
406
+ self.epoch0_segment_metrics = self._create_segment_metrics(self.epoch0_segments)
407
+ self.epoch1_segment_metrics = self._create_segment_metrics(self.epoch1_segments)
408
+ print(
409
+ f" Computed metrics for {len(self.epoch0_segment_metrics)} + {len(self.epoch1_segment_metrics)} segments"
410
+ )
411
+
412
+ print("\n[4/6] Training Random Forest classifier...")
413
+ self.train(correspondences_for_training)
414
+ print(f" Classifier trained on {len(correspondences_for_training)} pairs")
415
+
416
+ print("\n[5/6] Finding correspondences...")
417
+ remapped_apply_ids = [
418
+ self.epoch0_id_mapping[orig_id]
419
+ for orig_id in apply_ids
420
+ if orig_id in self.epoch0_id_mapping
421
+ ]
422
+
423
+ if len(remapped_apply_ids) < len(apply_ids):
424
+ print(
425
+ f" Warning: {len(apply_ids) - len(remapped_apply_ids)} apply_ids not found in epoch0"
426
+ )
427
+
428
+ self.apply(apply_ids=remapped_apply_ids, search_radius=search_radius)
429
+
430
+ print("\n[6/6] Calculating M3C2 distances and uncertainties...")
431
+ if self.correspondences is None or self.correspondences.empty:
432
+ print(" Warning: No correspondences were found.")
433
+ return self.correspondences
434
+
435
+ distances, uncertainties = [], []
436
+ for _, row in self.correspondences.iterrows():
437
+ id1, id2 = row["epoch0_segment_id"], row["epoch1_segment_id"]
438
+ dist, lod = self._calculate_cog_distance(id1, id2)
439
+ distances.append(dist)
440
+ uncertainties.append(lod)
441
+
442
+ self.correspondences["distance"] = distances
443
+ self.correspondences["uncertainty"] = uncertainties
444
+
445
+ print("\n[Final] Mapping results back to original segment IDs...")
446
+ self.correspondences["epoch0_original_id"] = self.correspondences[
447
+ "epoch0_segment_id"
448
+ ].map(self.epoch0_reverse_mapping)
449
+ self.correspondences["epoch1_original_id"] = self.correspondences[
450
+ "epoch1_segment_id"
451
+ ].map(self.epoch1_reverse_mapping)
452
+
453
+ cols = [
454
+ "epoch0_original_id",
455
+ "epoch1_original_id",
456
+ "epoch0_segment_id",
457
+ "epoch1_segment_id",
458
+ "distance",
459
+ "uncertainty",
460
+ ]
461
+ self.correspondences = self.correspondences[cols]
462
+
463
+ print("=" * 60)
464
+ print(f"Processing complete! Found {len(self.correspondences)} matches")
465
+ print("=" * 60)
466
+
467
+ return self.correspondences
468
+
469
+ def visualize_correspondences(
470
+ self,
471
+ epoch0_segment_id=None,
472
+ use_original_ids=True,
473
+ show_all=False,
474
+ num_samples=10,
475
+ figsize=(12, 10),
476
+ elev=30,
477
+ azim=45,
478
+ ):
479
+ """
480
+ Visualize matched plane segments and their correspondences.
481
+
482
+ Parameters
483
+ ----------
484
+ epoch0_segment_id : int, optional
485
+ Specific segment ID to visualize (original or new ID based on use_original_ids)
486
+ use_original_ids : bool, optional
487
+ If True, interpret epoch0_segment_id as original ID (default: True)
488
+ show_all : bool, optional
489
+ Plot all correspondences (default: False)
490
+ num_samples : int, optional
491
+ Number of random samples if show_all=False (default: 10)
492
+ figsize : tuple, optional
493
+ Figure size in inches
494
+ elev : float, optional
495
+ Elevation angle for 3D view in degrees
496
+ azim : float, optional
497
+ Azimuth angle for 3D view in degrees
498
+
499
+ Returns
500
+ -------
501
+ tuple
502
+ (fig, ax) matplotlib figure and axis objects
503
+ """
504
+ if self.correspondences is None or self.correspondences.empty:
505
+ raise ValueError("No correspondences found. Run run() first.")
506
+
507
+ n_corr = len(self.correspondences)
508
+ zoom_in = False
509
+
510
+ if epoch0_segment_id is not None:
511
+ if (
512
+ use_original_ids
513
+ and "epoch0_original_id" in self.correspondences.columns
514
+ ):
515
+ corr_sample = self.correspondences[
516
+ self.correspondences["epoch0_original_id"] == epoch0_segment_id
517
+ ]
518
+ title_text = (
519
+ f"PBM3C2 Correspondence for Original Segment ID {epoch0_segment_id}"
520
+ )
521
+ else:
522
+ corr_sample = self.correspondences[
523
+ self.correspondences["epoch0_segment_id"] == epoch0_segment_id
524
+ ]
525
+ title_text = f"PBM3C2 Correspondence for Segment ID {epoch0_segment_id}"
526
+
527
+ if corr_sample.empty:
528
+ raise ValueError(
529
+ f"Segment ID {epoch0_segment_id} not found in correspondences."
530
+ )
531
+ zoom_in = True
532
+
533
+ elif show_all:
534
+ corr_sample = self.correspondences
535
+ title_text = f"PBM3C2 Correspondences (showing all {n_corr})"
536
+
537
+ else:
538
+ if n_corr > num_samples:
539
+ sample_indices = np.random.choice(n_corr, num_samples, replace=False)
540
+ corr_sample = self.correspondences.iloc[sample_indices]
541
+ title_text = (
542
+ f"PBM3C2 Correspondences (showing {num_samples} of {n_corr})"
543
+ )
544
+ else:
545
+ corr_sample = self.correspondences
546
+ title_text = f"PBM3C2 Correspondences (showing all {n_corr})"
547
+
548
+ fig = plt.figure(figsize=figsize)
549
+ ax = fig.add_subplot(111, projection="3d")
550
+
551
+ class Arrow3D(FancyArrowPatch):
552
+ def __init__(self, xs, ys, zs, *args, **kwargs):
553
+ super().__init__((0, 0), (0, 0), *args, **kwargs)
554
+ self._verts3d = xs, ys, zs
555
+
556
+ def do_3d_projection(self, renderer=None):
557
+ xs3d, ys3d, zs3d = self._verts3d
558
+ xs, ys, zs = proj3d.proj_transform(xs3d, ys3d, zs3d, self.axes.M)
559
+ self.set_positions((xs[0], ys[0]), (xs[1], ys[1]))
560
+ return np.min(zs)
561
+
562
+ epoch0_plotted = False
563
+ epoch1_plotted = False
564
+
565
+ for idx, row in corr_sample.iterrows():
566
+ id0 = row["epoch0_segment_id"]
567
+ id1 = row["epoch1_segment_id"]
568
+
569
+ if id0 not in self.epoch0_segments or id1 not in self.epoch1_segments:
570
+ continue
571
+
572
+ points0 = self.epoch0_segments[id0]["points"]
573
+ points1 = self.epoch1_segments[id1]["points"]
574
+
575
+ label0 = "Epoch 0" if not epoch0_plotted else ""
576
+ ax.scatter(
577
+ points0[:, 0],
578
+ points0[:, 1],
579
+ points0[:, 2],
580
+ c="blue",
581
+ s=5,
582
+ alpha=0.3,
583
+ label=label0,
584
+ )
585
+ if label0:
586
+ epoch0_plotted = True
587
+
588
+ label1 = "Epoch 1" if not epoch1_plotted else ""
589
+ ax.scatter(
590
+ points1[:, 0],
591
+ points1[:, 1],
592
+ points1[:, 2],
593
+ c="red",
594
+ s=5,
595
+ alpha=0.3,
596
+ label=label1,
597
+ )
598
+ if label1:
599
+ epoch1_plotted = True
600
+
601
+ cog0 = self.epoch0_segment_metrics.loc[id0][
602
+ ["cog_x", "cog_y", "cog_z"]
603
+ ].values
604
+ cog1 = self.epoch1_segment_metrics.loc[id1][
605
+ ["cog_x", "cog_y", "cog_z"]
606
+ ].values
607
+
608
+ ax.scatter(
609
+ *cog0, c="darkblue", s=100, marker="o", edgecolors="black", linewidths=2
610
+ )
611
+ ax.scatter(
612
+ *cog1, c="darkred", s=100, marker="o", edgecolors="black", linewidths=2
613
+ )
614
+
615
+ arrow = Arrow3D(
616
+ [cog0[0], cog1[0]],
617
+ [cog0[1], cog1[1]],
618
+ [cog0[2], cog1[2]],
619
+ mutation_scale=20,
620
+ lw=2,
621
+ arrowstyle="-|>",
622
+ color="green",
623
+ alpha=0.8,
624
+ )
625
+ ax.add_artist(arrow)
626
+
627
+ if "distance" in row:
628
+ mid_point = (cog0 + cog1) / 2
629
+ ax.text(
630
+ mid_point[0],
631
+ mid_point[1],
632
+ mid_point[2],
633
+ f'{row["distance"]:.3f}m',
634
+ fontsize=8,
635
+ color="green",
636
+ weight="bold",
637
+ )
638
+
639
+ ax.set_xlabel("X [m]", fontsize=12)
640
+ ax.set_ylabel("Y [m]", fontsize=12)
641
+ ax.set_zlabel("Z [m]", fontsize=12)
642
+ ax.set_title(title_text, fontsize=14, weight="bold")
643
+
644
+ if epoch0_plotted or epoch1_plotted:
645
+ ax.legend(loc="upper right", fontsize=10)
646
+
647
+ ax.view_init(elev=elev, azim=azim)
648
+
649
+ if not zoom_in:
650
+ ax.set_box_aspect([1, 1, 1])
651
+
652
+ plt.tight_layout()
653
+
654
+ return fig, ax
655
+
656
+ def get_original_ids(self, new_epoch0_id=None, new_epoch1_id=None):
657
+ """Retrieve original segment IDs from new internal IDs."""
658
+ result = []
659
+
660
+ if new_epoch0_id is not None:
661
+ orig_id = self.epoch0_reverse_mapping.get(new_epoch0_id, None)
662
+ if orig_id is None:
663
+ print(f"Warning: New epoch0 ID {new_epoch0_id} not found in mapping")
664
+ result.append(orig_id)
665
+
666
+ if new_epoch1_id is not None:
667
+ orig_id = self.epoch1_reverse_mapping.get(new_epoch1_id, None)
668
+ if orig_id is None:
669
+ print(f"Warning: New epoch1 ID {new_epoch1_id} not found in mapping")
670
+ result.append(orig_id)
671
+
672
+ return tuple(result) if len(result) > 1 else (result[0] if result else None)
673
+
674
+ def get_new_ids(self, orig_epoch0_id=None, orig_epoch1_id=None):
675
+ """Retrieve new internal segment IDs from original IDs."""
676
+ result = []
677
+
678
+ if orig_epoch0_id is not None:
679
+ new_id = self.epoch0_id_mapping.get(orig_epoch0_id, None)
680
+ if new_id is None:
681
+ print(
682
+ f"Warning: Original epoch0 ID {orig_epoch0_id} not found in mapping"
683
+ )
684
+ result.append(new_id)
685
+
686
+ if orig_epoch1_id is not None:
687
+ new_id = self.epoch1_id_mapping.get(orig_epoch1_id, None)
688
+ if new_id is None:
689
+ print(
690
+ f"Warning: Original epoch1 ID {orig_epoch1_id} not found in mapping"
691
+ )
692
+ result.append(new_id)
693
+
694
+ return tuple(result) if len(result) > 1 else (result[0] if result else None)