nettracer3d 0.8.9__py3-none-any.whl → 0.9.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nettracer3d/proximity.py CHANGED
@@ -469,68 +469,121 @@ def extract_pairwise_connections(connections):
469
469
  return output
470
470
 
471
471
 
472
- def average_nearest_neighbor_distances(point_centroids, root_set, compare_set, xy_scale=1.0, z_scale=1.0, num = 1):
472
+ def average_nearest_neighbor_distances(point_centroids, root_set, compare_set, xy_scale=1.0, z_scale=1.0, num=1, do_borders=False):
473
473
  """
474
474
  Calculate the average distance between each point in root_set and its nearest neighbor in compare_set.
475
475
 
476
476
  Args:
477
- point_centroids (dict): Dictionary mapping point IDs to [Z, Y, X] coordinates
478
- root_set (set): Set of point IDs to find nearest neighbors for
479
- compare_set (set): Set of point IDs to search for nearest neighbors in
477
+ point_centroids (dict): Dictionary mapping point IDs to [Z, Y, X] coordinates (when do_borders=False)
478
+ OR dictionary mapping labels to border coordinates (when do_borders=True)
479
+ root_set (set or dict): Set of point IDs (when do_borders=False)
480
+ OR dict {label: border_coords} (when do_borders=True)
481
+ compare_set (set or numpy.ndarray): Set of point IDs (when do_borders=False)
482
+ OR 1D array of border coordinates (when do_borders=True)
480
483
  xy_scale (float): Scaling factor for X and Y coordinates
481
484
  z_scale (float): Scaling factor for Z coordinate
485
+ num (int): Number of nearest neighbors (ignored when do_borders=True, always uses 1)
486
+ do_borders (bool): If True, perform border-to-border distance calculation
482
487
 
483
488
  Returns:
484
- float: Average distance to nearest neighbors
489
+ tuple: (average_distance, distances_dict)
485
490
  """
486
491
 
487
- # Extract and scale coordinates for compare_set
488
- compare_coords = []
489
- compare_ids = list(compare_set)
490
-
491
- for point_id in compare_ids:
492
- z, y, x = point_centroids[point_id]
493
- compare_coords.append([z * z_scale, y * xy_scale, x * xy_scale])
494
-
495
- compare_coords = np.array(compare_coords)
496
-
497
- # Build KDTree for efficient nearest neighbor search
498
- tree = KDTree(compare_coords)
499
-
500
- distances = {}
501
- same_sets = root_set == compare_set
492
+ if do_borders:
493
+ # Border comparison mode
494
+ if not isinstance(compare_set, np.ndarray):
495
+ raise ValueError("When do_borders=True, compare_set must be a numpy array of coordinates")
496
+
497
+ # Vectorized scaling for compare coordinates
498
+ compare_coords_scaled = compare_set.astype(float)
499
+ compare_coords_scaled[:, 0] *= z_scale # Z coordinates
500
+ compare_coords_scaled[:, 1:] *= xy_scale # Y and X coordinates
501
+
502
+ distances = {}
503
+
504
+ for label, border_coords in root_set.items():
505
+ if len(border_coords) == 0:
506
+ continue
507
+
508
+ # Vectorized scaling for border coordinates
509
+ border_coords_scaled = border_coords.astype(float)
510
+ border_coords_scaled[:, 0] *= z_scale # Z coordinates
511
+ border_coords_scaled[:, 1:] *= xy_scale # Y and X coordinates
512
+
513
+ # Remove overlapping coordinates to avoid distance = 0
514
+ # Create a set of tuples for fast membership testing
515
+ border_coords_set = set(map(tuple, border_coords_scaled))
516
+
517
+ # Filter out overlapping coordinates from compare set
518
+ non_overlapping_mask = np.array([
519
+ tuple(coord) not in border_coords_set
520
+ for coord in compare_coords_scaled
521
+ ])
522
+
523
+ if not np.any(non_overlapping_mask):
524
+ # All compare coordinates overlap - skip this object or set to NaN
525
+ distances[label] = np.nan
526
+ continue
527
+
528
+ filtered_compare_coords = compare_coords_scaled[non_overlapping_mask]
529
+
530
+ # Build KDTree with filtered coordinates
531
+ tree = KDTree(filtered_compare_coords)
532
+
533
+ # Vectorized nearest neighbor search for all border points at once
534
+ distances_to_all, _ = tree.query(border_coords_scaled, k=1)
535
+
536
+ # Find minimum distance for this object
537
+ distances[label] = np.min(distances_to_all)
538
+
539
+ # Calculate average excluding NaN values
540
+ valid_distances = [d for d in distances.values() if not np.isnan(d)]
541
+ avg = np.mean(valid_distances) if valid_distances else np.nan
542
+ return avg, distances
502
543
 
503
- for root_id in root_set:
504
- # Get scaled coordinates for root point
505
- z, y, x = point_centroids[root_id]
506
- root_coord = np.array([z * z_scale, y * xy_scale, x * xy_scale])
544
+ else:
545
+ # Original centroid comparison mode (unchanged)
546
+ # Extract coordinates for compare_set
547
+ compare_coords = np.array([point_centroids[point_id] for point_id in compare_set])
507
548
 
549
+ # Vectorized scaling for compare coordinates
550
+ compare_coords_scaled = compare_coords.astype(float)
551
+ compare_coords_scaled[:, 0] *= z_scale # Z coordinates
552
+ compare_coords_scaled[:, 1:] *= xy_scale # Y and X coordinates
553
+
554
+ # Build KDTree for efficient nearest neighbor search
555
+ tree = KDTree(compare_coords_scaled)
556
+
557
+ distances = {}
558
+ same_sets = root_set == compare_set
559
+
560
+ # Extract and scale root coordinates all at once
561
+ root_coords = np.array([point_centroids[root_id] for root_id in root_set])
562
+ root_coords_scaled = root_coords.astype(float)
563
+ root_coords_scaled[:, 0] *= z_scale # Z coordinates
564
+ root_coords_scaled[:, 1:] *= xy_scale # Y and X coordinates
565
+
566
+ # Vectorized nearest neighbor search for all root points
508
567
  if same_sets:
509
- # When sets are the same, find 2 nearest neighbors and take the second one
510
- # (first one would be the point itself)
511
- distances_to_all, indices = tree.query(root_coord, k= (num + 1))
512
-
513
- temp_dist = 0
514
- for i in range(1, len(distances_to_all)):
515
- temp_dist += distances_to_all[i]
516
-
517
- distances[root_id] = temp_dist/(len(distances_to_all) - 1)
518
-
568
+ distances_to_all, indices = tree.query(root_coords_scaled, k=(num + 1))
569
+ # Remove self-matches (first column) and average the rest
570
+ if num == 1:
571
+ distances_array = distances_to_all[:, 1] # Just take second nearest
572
+ else:
573
+ distances_array = np.mean(distances_to_all[:, 1:], axis=1)
519
574
  else:
520
- # Different sets, find nearest neighbors
521
- distances_to_all, _ = tree.query(root_coord, k=num)
522
- temp_dist = 0
523
- for val in distances_to_all:
524
- temp_dist += val
525
-
526
- distances[root_id] = temp_dist/(len(distances_to_all))
527
-
528
- avg = np.mean(list(distances.values())) if list(distances.values()) else 0.0
529
-
530
-
531
- # Return average distance
532
- return avg, distances
533
-
575
+ distances_to_all, _ = tree.query(root_coords_scaled, k=num)
576
+ if num == 1:
577
+ distances_array = distances_to_all.flatten()
578
+ else:
579
+ distances_array = np.mean(distances_to_all, axis=1)
580
+
581
+ # Map back to root_ids
582
+ for i, root_id in enumerate(root_set):
583
+ distances[root_id] = distances_array[i]
584
+
585
+ avg = np.mean(distances_array) if len(distances_array) > 0 else 0.0
586
+ return avg, distances
534
587
 
535
588
 
536
589
  #voronois: