nettracer3d 0.8.9__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nettracer3d might be problematic. Click here for more details.

@@ -16,14 +16,15 @@ class CellposeGUILauncher:
16
16
  """
17
17
  self.parent_widget = parent_widget
18
18
  self.cellpose_process = None
19
-
20
- def launch_cellpose_gui(self, image_path=None, working_directory=None):
19
+
20
+ def launch_cellpose_gui(self, image_path=None, working_directory=None, use_3d=False):
21
21
  """
22
22
  Launch cellpose GUI in a separate thread.
23
23
 
24
24
  Args:
25
25
  image_path (str, optional): Path to image file to load automatically
26
26
  working_directory (str, optional): Directory to start cellpose in
27
+ use_3d (bool, optional): Whether to launch cellpose 3D version (default: False)
27
28
 
28
29
  Returns:
29
30
  bool: True if launch was initiated successfully
@@ -34,6 +35,10 @@ class CellposeGUILauncher:
34
35
  # Build command
35
36
  cmd = [sys.executable, "-m", "cellpose"]
36
37
 
38
+ # Add 3D flag if requested
39
+ if use_3d:
40
+ cmd.append("--Zstack")
41
+
37
42
  # Add image path if provided
38
43
  if image_path and Path(image_path).exists():
39
44
  cmd.extend(["--image_path", str(image_path)])
@@ -55,29 +60,35 @@ class CellposeGUILauncher:
55
60
  except Exception as e:
56
61
  if self.parent_widget:
57
62
  # Show error in main thread
58
- self.show_error(f"Failed to launch cellpose GUI: {str(e)}")
63
+ version_str = "3D " if use_3d else ""
64
+ self.show_error(f"Failed to launch cellpose {version_str}GUI: {str(e)}")
59
65
  else:
60
- print(f"Failed to launch cellpose GUI: {str(e)}")
66
+ version_str = "3D " if use_3d else ""
67
+ print(f"Failed to launch cellpose {version_str}GUI: {str(e)}")
61
68
 
62
69
  try:
63
70
  # Start cellpose in separate thread
64
71
  thread = threading.Thread(target=run_cellpose, daemon=True)
65
72
  thread.start()
66
73
 
67
- if self.parent_widget:
68
- self.show_info("Cellpose GUI launched!")
69
- else:
70
- print("Cellpose GUI launched!")
74
+ #if self.parent_widget:
75
+ #version_str = "3D " if use_3d else ""
76
+ #self.show_info(f"Cellpose {version_str}GUI launched!")
77
+ #else:
78
+ #version_str = "3D " if use_3d else ""
79
+ #print(f"Cellpose {version_str}GUI launched!")
71
80
 
72
81
  return True
73
82
 
74
83
  except Exception as e:
75
84
  if self.parent_widget:
76
- self.show_error(f"Failed to start cellpose thread: {str(e)}")
85
+ version_str = "3D " if use_3d else ""
86
+ self.show_error(f"Failed to start cellpose {version_str}thread: {str(e)}")
77
87
  else:
78
- print(f"Failed to start cellpose thread: {str(e)}")
88
+ version_str = "3D " if use_3d else ""
89
+ print(f"Failed to start cellpose {version_str}thread: {str(e)}")
79
90
  return False
80
-
91
+
81
92
  def launch_with_directory(self, directory_path):
82
93
  """
83
94
  Launch cellpose GUI with a specific directory.
nettracer3d/modularity.py CHANGED
@@ -103,7 +103,7 @@ def read_excel_to_lists(file_path, sheet_name=0):
103
103
 
104
104
 
105
105
 
106
- def show_communities_flex(G, master_list, normalized_weights, geo_info = None, geometric=False, directory=None, weighted=True, partition=None, style=0):
106
+ def show_communities_flex(G, master_list, normalized_weights, geo_info=None, geometric=False, directory=None, weighted=True, partition=None, style=0):
107
107
 
108
108
  if normalized_weights is None:
109
109
  G, edge_weights = network_analysis.weighted_network(master_list)
@@ -137,10 +137,25 @@ def show_communities_flex(G, master_list, normalized_weights, geo_info = None, g
137
137
 
138
138
  # Create a mapping of community IDs to sequential indices
139
139
  unique_communities = sorted(set(partition.values()))
140
- community_to_index = {comm: idx for idx, comm in enumerate(unique_communities)}
141
-
142
- # Prepare colors using the number of unique communities
143
- colors = [plt.cm.jet(i / len(unique_communities)) for i in range(len(unique_communities))]
140
+
141
+ # Use the same color generation method as the overlay system
142
+ # Get community sizes for sorting (largest first)
143
+ from collections import Counter
144
+ community_sizes = Counter(partition.values())
145
+ sorted_communities = sorted(unique_communities, key=lambda x: community_sizes[x], reverse=True)
146
+
147
+ from . import community_extractor
148
+
149
+ # Generate distinct colors using the same method as assign_community_colors
150
+ colors_rgb = community_extractor.generate_distinct_colors(len(unique_communities))
151
+
152
+ # Create community to color mapping (same order as the overlay system)
153
+ community_to_color = {comm: colors_rgb[i] for i, comm in enumerate(sorted_communities)}
154
+
155
+ # Convert RGB tuples to matplotlib format (0-1 range)
156
+ colors_matplotlib = {}
157
+ for comm, rgb in community_to_color.items():
158
+ colors_matplotlib[comm] = tuple(c/255.0 for c in rgb)
144
159
 
145
160
  if weighted:
146
161
  G = nx.Graph()
@@ -156,7 +171,7 @@ def show_communities_flex(G, master_list, normalized_weights, geo_info = None, g
156
171
  for community_id, nodes in communities.items():
157
172
  node_sizes_list = [z_pos[node] for node in nodes]
158
173
  nx.draw_networkx_nodes(G, pos, nodelist=nodes,
159
- node_color=[colors[community_to_index[community_id]]],
174
+ node_color=[colors_matplotlib[community_id]],
160
175
  node_size=node_sizes_list, alpha=0.8)
161
176
 
162
177
  # Draw edges with normalized weights
@@ -172,7 +187,7 @@ def show_communities_flex(G, master_list, normalized_weights, geo_info = None, g
172
187
  # Draw the nodes, coloring them according to their community
173
188
  for community_id, nodes in communities.items():
174
189
  nx.draw_networkx_nodes(G, pos, nodelist=nodes,
175
- node_color=[colors[community_to_index[community_id]]],
190
+ node_color=[colors_matplotlib[community_id]],
176
191
  node_size=100, alpha=0.8)
177
192
 
178
193
  # Draw edges with normalized weights
@@ -183,8 +198,8 @@ def show_communities_flex(G, master_list, normalized_weights, geo_info = None, g
183
198
  nx.draw_networkx_labels(G, pos)
184
199
 
185
200
  else:
186
- # Create node color list based on partition and mapping
187
- node_colors = [colors[community_to_index[partition[node]]] for node in G.nodes()]
201
+ # Create node color list based on partition and the same color mapping
202
+ node_colors = [colors_matplotlib[partition[node]] for node in G.nodes()]
188
203
 
189
204
  if geometric:
190
205
  pos, z_pos = simple_network.geometric_positions(geo_info[0], geo_info[1])
@@ -644,25 +644,41 @@ def create_community_heatmap(community_intensity, node_community, node_centroids
644
644
 
645
645
  # Create colormap function (RdBu_r - red for high, blue for low, yellow/white for middle)
646
646
  def intensity_to_rgb(intensity, min_val, max_val):
647
- """Convert intensity value to RGB using RdBu_r colormap logic"""
647
+ """Convert intensity value to RGB using RdBu_r colormap logic, centered at 0"""
648
+
649
+ # Handle edge case where all values are the same
648
650
  if max_val == min_val:
649
- # All same value, use neutral color
651
+ if intensity == 0:
652
+ return np.array([255, 255, 255], dtype=np.uint8) # White for 0
653
+ elif intensity > 0:
654
+ return np.array([255, 200, 200], dtype=np.uint8) # Light red for positive
655
+ else:
656
+ return np.array([200, 200, 255], dtype=np.uint8) # Light blue for negative
657
+
658
+ # Find the maximum absolute value for symmetric scaling around 0
659
+ max_abs = max(abs(min_val), abs(max_val))
660
+
661
+ # If max_abs is 0, everything is 0, so return white
662
+ if max_abs == 0:
650
663
  return np.array([255, 255, 255], dtype=np.uint8) # White
651
664
 
652
- # Normalize to -1 to 1 range (like RdBu_r colormap)
653
- normalized = 2 * (intensity - min_val) / (max_val - min_val) - 1
665
+ # Normalize intensity to -1 to 1 range, centered at 0
666
+ normalized = intensity / max_abs
654
667
  normalized = np.clip(normalized, -1, 1)
655
668
 
656
669
  if normalized > 0:
657
- # Positive values: white to red
670
+ # Positive values: white to red (intensity 0 = white, max positive = red)
658
671
  r = 255
659
672
  g = int(255 * (1 - normalized))
660
673
  b = int(255 * (1 - normalized))
661
- else:
662
- # Negative values: white to blue
674
+ elif normalized < 0:
675
+ # Negative values: white to blue (intensity 0 = white, max negative = blue)
663
676
  r = int(255 * (1 + normalized))
664
677
  g = int(255 * (1 + normalized))
665
678
  b = 255
679
+ else:
680
+ # Exactly 0: white
681
+ r, g, b = 255, 255, 255
666
682
 
667
683
  return np.array([r, g, b], dtype=np.uint8)
668
684
 
@@ -868,25 +884,41 @@ def create_node_heatmap(node_intensity, node_centroids, shape=None, is_3d=True,
868
884
 
869
885
  # Create colormap function (RdBu_r - red for high, blue for low, yellow/white for middle)
870
886
  def intensity_to_rgb(intensity, min_val, max_val):
871
- """Convert intensity value to RGB using RdBu_r colormap logic"""
887
+ """Convert intensity value to RGB using RdBu_r colormap logic, centered at 0"""
888
+
889
+ # Handle edge case where all values are the same
872
890
  if max_val == min_val:
873
- # All same value, use neutral color
891
+ if intensity == 0:
892
+ return np.array([255, 255, 255], dtype=np.uint8) # White for 0
893
+ elif intensity > 0:
894
+ return np.array([255, 200, 200], dtype=np.uint8) # Light red for positive
895
+ else:
896
+ return np.array([200, 200, 255], dtype=np.uint8) # Light blue for negative
897
+
898
+ # Find the maximum absolute value for symmetric scaling around 0
899
+ max_abs = max(abs(min_val), abs(max_val))
900
+
901
+ # If max_abs is 0, everything is 0, so return white
902
+ if max_abs == 0:
874
903
  return np.array([255, 255, 255], dtype=np.uint8) # White
875
904
 
876
- # Normalize to -1 to 1 range (like RdBu_r colormap)
877
- normalized = 2 * (intensity - min_val) / (max_val - min_val) - 1
905
+ # Normalize intensity to -1 to 1 range, centered at 0
906
+ normalized = intensity / max_abs
878
907
  normalized = np.clip(normalized, -1, 1)
879
908
 
880
909
  if normalized > 0:
881
- # Positive values: white to red
910
+ # Positive values: white to red (intensity 0 = white, max positive = red)
882
911
  r = 255
883
912
  g = int(255 * (1 - normalized))
884
913
  b = int(255 * (1 - normalized))
885
- else:
886
- # Negative values: white to blue
914
+ elif normalized < 0:
915
+ # Negative values: white to blue (intensity 0 = white, max negative = blue)
887
916
  r = int(255 * (1 + normalized))
888
917
  g = int(255 * (1 + normalized))
889
918
  b = 255
919
+ else:
920
+ # Exactly 0: white
921
+ r, g, b = 255, 255, 255
890
922
 
891
923
  return np.array([r, g, b], dtype=np.uint8)
892
924
 
nettracer3d/nettracer.py CHANGED
@@ -500,7 +500,7 @@ def _upsample_3d_array(data, factor, original_shape):
500
500
  else:
501
501
  trimmed_rows = trimmed_planes[:, sub_before[1]:-sub_after[1], :]
502
502
 
503
- # Remove columns from the beginning and end
503
+ # Remove columns from the beginning and end
504
504
  if sub_dims[2] == 0:
505
505
  trimmed_array = trimmed_rows
506
506
  else:
@@ -508,6 +508,101 @@ def _upsample_3d_array(data, factor, original_shape):
508
508
 
509
509
  return trimmed_array
510
510
 
511
+
512
+ def remove_branches_new(skeleton, length):
513
+ """Used to compensate for overly-branched skeletons resulting from the scipy 3d skeletonization algorithm"""
514
+ def find_coordinate_difference(arr):
515
+ try:
516
+ arr[1,1,1] = 0
517
+ # Find the indices of non-zero elements
518
+ indices = np.array(np.nonzero(arr)).T
519
+
520
+ # Calculate the difference
521
+ diff = np.array([1,1,1]) - indices[0]
522
+
523
+ return diff
524
+ except:
525
+ return None
526
+
527
+ skeleton = np.pad(skeleton, pad_width=1, mode='constant', constant_values=0) #Add black planes over the 3d space to avoid index errors
528
+ image_copy = np.copy(skeleton)
529
+
530
+ # Find all endpoints ONCE at the beginning
531
+ nonzero_coords = np.transpose(np.nonzero(image_copy))
532
+ endpoints = []
533
+ nubs = []
534
+
535
+ for x, y, z in nonzero_coords:
536
+ mini = image_copy[x-1:x+2, y-1:y+2, z-1:z+2]
537
+ nearby_sum = np.sum(mini)
538
+ threshold = 2 * image_copy[x, y, z]
539
+
540
+ if nearby_sum <= threshold:
541
+ endpoints.append((x, y, z))
542
+
543
+ x, y, z = endpoints[0]
544
+ original_val = image_copy[x, y, z]
545
+
546
+ # Process each endpoint individually for nub assessment
547
+ for start_x, start_y, start_z in endpoints:
548
+
549
+ # Trace the branch from this endpoint, removing points as we go
550
+ branch_coords = []
551
+ current_coord = (start_x, start_y, start_z)
552
+ nub_reached = False
553
+
554
+ for step in range(length):
555
+ x, y, z = current_coord
556
+
557
+ # Store original value and coordinates
558
+ branch_coords.append((x, y, z))
559
+
560
+ # Remove this point temporarily
561
+ image_copy[x, y, z] = 0
562
+
563
+ # If we've reached the maximum length without hitting a nub, break
564
+ if step == length - 1:
565
+ break
566
+
567
+ # Find next coordinate in the branch
568
+ mini = image_copy[x-1:x+2, y-1:y+2, z-1:z+2]
569
+ dif = find_coordinate_difference(mini.copy())
570
+ if dif is None:
571
+ break
572
+
573
+ next_coord = (x - dif[0], y - dif[1], z - dif[2])
574
+
575
+ # Check if next coordinate is valid and exists
576
+ nx, ny, nz = next_coord
577
+
578
+ # Check if next point is a nub (has more neighbors than expected)
579
+ next_mini = image_copy[nx-1:nx+2, ny-1:ny+2, nz-1:nz+2]
580
+ next_nearby_sum = np.sum(next_mini)
581
+ next_threshold = 2 * image_copy[nx, ny, nz]
582
+
583
+ if next_nearby_sum > next_threshold:
584
+ nub_reached = True
585
+ nubs.append(next_coord)
586
+ nubs.append(current_coord) # Note, if we don't add the current coord here (and restore it below), the behavior of this method can be changed to trim branches beneath previous branches, which could be neat but its somewhat unpredictable so I opted out of it.
587
+ image_copy[x, y, z] = original_val
588
+ #image_copy[nx, ny, nz] = 0
589
+ break
590
+
591
+ current_coord = next_coord
592
+
593
+ # If no nub was reached, restore all the points we removed
594
+ if not nub_reached:
595
+ for i, (bx, by, bz) in enumerate(branch_coords):
596
+ image_copy[bx, by, bz] = original_val
597
+ # If nub was reached, points stay removed (branch is eliminated)
598
+
599
+ for item in nubs: #The nubs are endpoints of length = 1. They appear a bit different in the array so we just note when one is created and remove them all at the end in a batch.
600
+ image_copy[item[0], item[1], item[2]] = 0 # Removing the nub itself leaves a hole in the skeleton but for branchpoint detection that doesn't matter, which is why it behaves this way. To fill the hole, one option is to dilate once then erode/skeletonize again, but we want to avoid making anything that looks like local branching so I didn't bother.
601
+
602
+ # Remove padding and return
603
+ image_copy = (image_copy[1:-1, 1:-1, 1:-1]).astype(np.uint8)
604
+ return image_copy
605
+
511
606
  def remove_branches(skeleton, length):
512
607
  """Used to compensate for overly-branched skeletons resulting from the scipy 3d skeletonization algorithm"""
513
608
 
@@ -532,6 +627,7 @@ def remove_branches(skeleton, length):
532
627
  x, y, z = nonzero_coords[0]
533
628
  threshold = 2 * skeleton[x, y, z]
534
629
  nubs = []
630
+
535
631
 
536
632
  for b in range(length):
537
633
 
@@ -628,6 +724,8 @@ def break_and_label_skeleton(skeleton, peaks = 1, branch_removal = 0, comp_dil =
628
724
  else:
629
725
  broken_skele = None
630
726
 
727
+ #old_skeleton = copy.deepcopy(skeleton) # The skeleton might get modified in label_vertices so we can make a preserved copy of it to use later
728
+
631
729
  if nodes is None:
632
730
 
633
731
  verts = label_vertices(skeleton, peaks = peaks, branch_removal = branch_removal, comp_dil = comp_dil, max_vol = max_vol, return_skele = return_skele)
@@ -637,6 +735,8 @@ def break_and_label_skeleton(skeleton, peaks = 1, branch_removal = 0, comp_dil =
637
735
 
638
736
  verts = invert_array(verts)
639
737
 
738
+ #skeleton = old_skeleton
739
+
640
740
  image_copy = skeleton * verts
641
741
 
642
742
 
@@ -1031,10 +1131,93 @@ def remove_trunk(edges, num_iterations=1):
1031
1131
 
1032
1132
  return edges
1033
1133
 
1034
- def hash_inners(search_region, inner_edges, GPU = True):
1134
+ def get_all_label_coords(labeled_array, background=0):
1135
+ """
1136
+ Get coordinates for all labels using single pass method.
1137
+
1138
+ Parameters:
1139
+ -----------
1140
+ labeled_array : numpy.ndarray
1141
+ Labeled array with integer labels
1142
+ background : int, optional
1143
+ Background label to exclude (default: 0)
1144
+
1145
+ Returns:
1146
+ --------
1147
+ dict : {label: coordinates_array}
1148
+ Dictionary mapping each label to its coordinate array
1149
+ """
1150
+ coords_dict = {}
1151
+
1152
+ # Get all non-background coordinates at once
1153
+ all_coords = np.argwhere(labeled_array != background)
1154
+
1155
+ if len(all_coords) == 0:
1156
+ return coords_dict
1157
+
1158
+ # Get the label values at those coordinates
1159
+ labels_at_coords = labeled_array[tuple(all_coords.T)]
1160
+
1161
+ # Group by label
1162
+ unique_labels = np.unique(labels_at_coords)
1163
+ for label in unique_labels:
1164
+ mask = labels_at_coords == label
1165
+ coords_dict[label] = all_coords[mask]
1166
+
1167
+ return coords_dict
1168
+
1169
+ def approx_boundaries(array, iden_set = None, node_identities = None, keep_labels = False):
1170
+
1171
+ """Hollows out an array, can do it for only a set number of identities. Returns coords as dict if labeled or as 1d numpy array if binary is desired"""
1172
+
1173
+ if node_identities is not None:
1174
+
1175
+ nodes = []
1176
+
1177
+ for node in node_identities:
1178
+
1179
+ if node_identities[node] in iden_set: #Filter out only idens we need
1180
+ nodes.append(node)
1181
+
1182
+ mask = np.isin(array, nodes)
1183
+
1184
+ if keep_labels:
1185
+
1186
+ array = array * mask
1187
+ else:
1188
+ array = mask
1189
+ del mask
1190
+
1191
+ from skimage.segmentation import find_boundaries
1192
+
1193
+ borders = find_boundaries(array, mode='thick')
1194
+ array = array * borders
1195
+ del borders
1196
+ if not keep_labels:
1197
+ return np.argwhere(array != 0)
1198
+ else:
1199
+ return get_all_label_coords(array)
1200
+
1201
+
1202
+
1203
+ def hash_inners(search_region, inner_edges, GPU = False):
1035
1204
  """Internal method used to help sort out inner edge connections. The inner edges of the array will not differentiate between what nodes they contact if those nodes themselves directly touch each other.
1036
1205
  This method allows these elements to be efficiently seperated from each other"""
1037
1206
 
1207
+ from skimage.segmentation import find_boundaries
1208
+
1209
+ borders = find_boundaries(search_region, mode='thick')
1210
+
1211
+ inner_edges = inner_edges * borders #And as a result, we can mask out only 'inner edges' that themselves exist within borders
1212
+
1213
+ inner_edges = dilate_3D_old(inner_edges, 3, 3, 3) #Not sure if dilating is necessary. Want to ensure that the inner edge pieces still overlap with the proper nodes after the masking.
1214
+
1215
+ return inner_edges
1216
+
1217
+ def hash_inners_old(search_region, inner_edges, GPU = True):
1218
+ """Internal method used to help sort out inner edge connections. The inner edges of the array will not differentiate between what nodes they contact if those nodes themselves directly touch each other.
1219
+ This method allows these elements to be efficiently seperated from each other. Originally this was implemented using the gaussian blur because i didn't yet realize skimage could do the same more efficiently."""
1220
+
1038
1221
  print("Performing gaussian blur to hash inner edges.")
1039
1222
 
1040
1223
  blurred_search = smart_dilate.gaussian(search_region, GPU = GPU)
@@ -2170,6 +2353,9 @@ def label_vertices(array, peaks = 0, branch_removal = 0, comp_dil = 0, max_vol =
2170
2353
 
2171
2354
  array = skeletonize(array)
2172
2355
 
2356
+ if return_skele:
2357
+ old_skeleton = copy.deepcopy(array) # The skeleton might get modified in label_vertices so we can make a preserved copy of it to use later
2358
+
2173
2359
  if branch_removal > 0:
2174
2360
  array = remove_branches(array, branch_removal)
2175
2361
 
@@ -2235,7 +2421,7 @@ def label_vertices(array, peaks = 0, branch_removal = 0, comp_dil = 0, max_vol =
2235
2421
 
2236
2422
  if return_skele:
2237
2423
 
2238
- return labeled_image, (array[1:-1, 1:-1, 1:-1]).astype(np.uint8)
2424
+ return labeled_image, old_skeleton
2239
2425
 
2240
2426
  else:
2241
2427
 
@@ -5668,75 +5854,128 @@ class Network_3D:
5668
5854
  pass
5669
5855
 
5670
5856
 
5671
- def nearest_neighbors_avg(self, root, targ, xy_scale = 1, z_scale = 1, num = 1, heatmap = False, threed = True, numpy = False, quant = False):
5857
+ def nearest_neighbors_avg(self, root, targ, xy_scale = 1, z_scale = 1, num = 1, heatmap = False, threed = True, numpy = False, quant = False, centroids = True):
5672
5858
 
5673
- def get_theoretical_nearest_neighbor_distance(compare_set, num_neighbors, volume, is_2d=False):
5674
- """
5675
- Calculate theoretical expected distance to k-th nearest neighbor
5676
- assuming random uniform distribution in 2D or 3D space.
5677
- """
5678
- import math
5859
+
5860
+ def distribute_points_uniformly(n, shape, z_scale, xy_scale, is_2d=False):
5861
+ if n <= 1:
5862
+ return 0
5863
+
5864
+ # Calculate total number of positions
5865
+ total_positions = np.prod(shape)
5866
+
5867
+ # Calculate the flat index spacing
5868
+ flat_spacing = total_positions / n
5869
+
5870
+ # Get the first two flat indices
5871
+ idx1 = 0
5872
+ idx2 = int(flat_spacing)
5873
+
5874
+ # Convert to multi-dimensional coordinates theoretically
5875
+ coord1 = np.unravel_index(idx1, shape)
5876
+ coord2 = np.unravel_index(idx2, shape)
5679
5877
 
5680
- if len(compare_set) == 0 or volume <= 0:
5681
- raise ValueError("Invalid input: empty set or non-positive volume")
5878
+ # Apply scaling
5879
+ if len(shape) == 3:
5880
+ p1 = np.array([coord1[0] * z_scale, coord1[1] * xy_scale, coord1[2] * xy_scale])
5881
+ p2 = np.array([coord2[0] * z_scale, coord2[1] * xy_scale, coord2[2] * xy_scale])
5882
+ elif len(shape) == 2:
5883
+ p1 = np.array([coord1[0] * xy_scale, coord1[1] * xy_scale])
5884
+ p2 = np.array([coord2[0] * xy_scale, coord2[1] * xy_scale])
5682
5885
 
5683
- density = len(compare_set) / volume
5684
- k = num_neighbors
5886
+ # Calculate neighbor distance
5887
+ neighbor_distance = np.linalg.norm(p2 - p1)
5685
5888
 
5889
+ # Apply the dimensional factor
5686
5890
  if is_2d:
5687
- # Expected distance to k-th nearest neighbor in 2D
5688
- # μ1' = Γ(k + 1/2) / (Γ(k) × √(m × π))
5689
- expected_distance = math.gamma(k + 0.5) / (math.gamma(k) * math.sqrt(density * math.pi))
5891
+ neighbor_distance = neighbor_distance * 0.38
5690
5892
  else:
5691
- # Expected distance to k-th nearest neighbor in 3D
5692
- # μ1' = Γ(k + 1/3) / (Γ(k) × (m × Φ)^(1/3))
5693
- # where Φ = π^(3/2) / Γ(3/2 + 1) = π^(3/2) / Γ(5/2) = 4π/3
5694
- phi_3d = 4 * math.pi / 3 # Volume of unit sphere in 3D
5695
- expected_distance = math.gamma(k + 1/3) / (math.gamma(k) * (density * phi_3d)**(1/3))
5893
+ neighbor_distance = neighbor_distance * 0.45
5696
5894
 
5697
- return expected_distance
5895
+ return neighbor_distance
5896
+
5897
+ do_borders = not centroids
5698
5898
 
5699
- root_set = []
5899
+ if centroids:
5900
+ root_set = []
5700
5901
 
5701
- compare_set = []
5902
+ compare_set = []
5702
5903
 
5703
- if root is None:
5904
+ if root is None:
5704
5905
 
5705
- root_set = list(self.node_centroids.keys())
5706
- compare_set = root_set
5707
- title = "Nearest Neighbors Between Nodes Heatmap"
5906
+ root_set = list(self.node_centroids.keys())
5907
+ compare_set = root_set
5908
+ title = "Nearest Neighbors Between Nodes Heatmap"
5708
5909
 
5709
- else:
5910
+ else:
5911
+
5912
+ title = f"Nearest Neighbors of ID {targ} from ID {root} Heatmap"
5710
5913
 
5711
- title = f"Nearest Neighbors of ID {targ} from ID {root} Heatmap"
5914
+ for node, iden in self.node_identities.items():
5712
5915
 
5713
- for node, iden in self.node_identities.items():
5916
+ if iden == root:
5714
5917
 
5715
- if iden == root:
5918
+ root_set.append(node)
5716
5919
 
5717
- root_set.append(node)
5920
+ elif (iden == targ) or (targ == 'All Others (Excluding Self)'):
5718
5921
 
5719
- elif (iden == targ) or (targ == 'All Others (Excluding Self)'):
5922
+ compare_set.append(node)
5720
5923
 
5721
- compare_set.append(node)
5924
+ if root == targ:
5722
5925
 
5723
- if root == targ:
5926
+ compare_set = root_set
5927
+ if len(compare_set) - 1 < num:
5928
+
5929
+ num = len(compare_set) - 1
5930
+
5931
+ print(f"Error: Not enough neighbor nodes for requested number of neighbors. Using max available neighbors: {num}")
5932
+
5724
5933
 
5725
- compare_set = root_set
5726
- if len(compare_set) - 1 < num:
5934
+ if len(compare_set) < num:
5727
5935
 
5728
- num = len(compare_set) - 1
5936
+ num = len(compare_set)
5729
5937
 
5730
5938
  print(f"Error: Not enough neighbor nodes for requested number of neighbors. Using max available neighbors: {num}")
5731
-
5732
5939
 
5733
- if len(compare_set) < num:
5940
+ avg, output = proximity.average_nearest_neighbor_distances(self.node_centroids, root_set, compare_set, xy_scale=self.xy_scale, z_scale=self.z_scale, num = num, do_borders = do_borders)
5734
5941
 
5735
- num = len(compare_set)
5942
+ else:
5943
+ if heatmap:
5944
+ root_set = []
5945
+ compare_set = []
5946
+ if root is None:
5736
5947
 
5737
- print(f"Error: Not enough neighbor nodes for requested number of neighbors. Using max available neighbors: {num}")
5738
-
5739
- avg, output = proximity.average_nearest_neighbor_distances(self.node_centroids, root_set, compare_set, xy_scale=self.xy_scale, z_scale=self.z_scale, num = num)
5948
+ root_set = list(self.node_centroids.keys())
5949
+ compare_set = root_set
5950
+ else:
5951
+ for node, iden in self.node_identities.items():
5952
+
5953
+ if iden == root:
5954
+
5955
+ root_set.append(node)
5956
+
5957
+ elif (iden == targ) or (targ == 'All Others (Excluding Self)'):
5958
+
5959
+ compare_set.append(node)
5960
+
5961
+ if root is None:
5962
+ title = "Nearest Neighbors Between Nodes Heatmap"
5963
+ root_set_neigh = approx_boundaries(self.nodes, keep_labels = True)
5964
+ compare_set_neigh = approx_boundaries(self.nodes, keep_labels = False)
5965
+ else:
5966
+ title = f"Nearest Neighbors of ID {targ} from ID {root} Heatmap"
5967
+
5968
+ root_set_neigh = approx_boundaries(self.nodes, [root], self.node_identities, keep_labels = True)
5969
+
5970
+ if targ == 'All Others (Excluding Self)':
5971
+ compare_set_neigh = set(self.node_identities.values())
5972
+ compare_set_neigh.remove(root)
5973
+ targ = compare_set_neigh
5974
+ else:
5975
+ targ = [targ]
5976
+
5977
+ compare_set_neigh = approx_boundaries(self.nodes, targ, self.node_identities, keep_labels = False)
5978
+ avg, output = proximity.average_nearest_neighbor_distances(self.node_centroids, root_set_neigh, compare_set_neigh, xy_scale=self.xy_scale, z_scale=self.z_scale, num = num, do_borders = do_borders)
5740
5979
 
5741
5980
  if quant:
5742
5981
  try:
@@ -5778,8 +6017,7 @@ class Network_3D:
5778
6017
  else:
5779
6018
  is_2d = False
5780
6019
 
5781
- pred = get_theoretical_nearest_neighbor_distance(compare_set, num, volume, is_2d = is_2d)
5782
- #pred = avg
6020
+ pred = distribute_points_uniformly(len(compare_set), bounds, self.z_scale, self.xy_scale, is_2d = is_2d)
5783
6021
 
5784
6022
  node_intensity = {}
5785
6023
  import math