nettracer3d 1.2.7__py3-none-any.whl → 1.3.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nettracer3d might be problematic. Click here for more details.

nettracer3d/nettracer.py CHANGED
@@ -785,6 +785,147 @@ def get_surface_areas(labeled, xy_scale=1, z_scale=1):
785
785
  result = {int(label): float(surface_areas[label]) for label in labels}
786
786
  return result
787
787
 
788
+ def get_background_surface_areas(labeled, xy_scale=1, z_scale=1):
789
+ """Calculate surface area exposed to background (value 0) for each object."""
790
+ labels = np.unique(labeled)
791
+ labels = labels[labels > 0]
792
+ max_label = int(np.max(labeled))
793
+
794
+ surface_areas = np.zeros(max_label + 1, dtype=np.float64)
795
+
796
+ for axis in range(3):
797
+ if axis == 2:
798
+ face_area = xy_scale * xy_scale
799
+ else:
800
+ face_area = xy_scale * z_scale
801
+
802
+ for direction in [-1, 1]:
803
+ # Pad with zeros only on the axis we're checking
804
+ pad_width = [(1, 1) if i == axis else (0, 0) for i in range(3)]
805
+ padded = np.pad(labeled, pad_width, mode='constant', constant_values=0)
806
+
807
+ # Roll the padded array
808
+ shifted = np.roll(padded, direction, axis=axis)
809
+
810
+ # Extract the center region (original size) from shifted
811
+ slices = [slice(1, -1) if i == axis else slice(None) for i in range(3)]
812
+ shifted_cropped = shifted[tuple(slices)]
813
+
814
+ # Find faces exposed to background (neighbor is 0)
815
+ exposed_faces = (shifted_cropped == 0) & (labeled > 0)
816
+
817
+ face_counts = np.bincount(labeled[exposed_faces],
818
+ minlength=max_label + 1)
819
+ surface_areas += face_counts * face_area
820
+
821
+ result = {int(label): float(surface_areas[label]) for label in labels}
822
+ return result
823
+
824
+
825
+ def get_background_proportion(labeled, xy_scale=1, z_scale=1):
826
+ """Calculate proportion of surface area exposed to background for each object."""
827
+ total_areas = get_surface_areas(labeled, xy_scale, z_scale)
828
+ background_areas = get_background_surface_areas(labeled, xy_scale, z_scale)
829
+
830
+ proportions = {}
831
+ for label in total_areas:
832
+ if total_areas[label] > 0:
833
+ proportions[label] = background_areas[label] / total_areas[label]
834
+ else:
835
+ proportions[label] = 0.0
836
+
837
+ return proportions
838
+
839
+ def get_perimeters(labeled, xy_scale=1):
840
+ """Calculate total perimeter for each object in a 2D array (pseudo-3D with z=1)."""
841
+ # Squeeze to 2D without modifying the original array reference
842
+ labeled_2d = np.squeeze(labeled)
843
+
844
+ labels = np.unique(labeled_2d)
845
+ labels = labels[labels > 0]
846
+ max_label = int(np.max(labeled_2d))
847
+
848
+ perimeters = np.zeros(max_label + 1, dtype=np.float64)
849
+
850
+ # Only check 2 axes for 2D
851
+ for axis in range(2):
852
+ edge_length = xy_scale
853
+
854
+ for direction in [-1, 1]:
855
+ # Pad with zeros only on the axis we're checking
856
+ pad_width = [(1, 1) if i == axis else (0, 0) for i in range(2)]
857
+ padded = np.pad(labeled_2d, pad_width, mode='constant', constant_values=0)
858
+
859
+ # Roll the padded array
860
+ shifted = np.roll(padded, direction, axis=axis)
861
+
862
+ # Extract the center region (original size) from shifted
863
+ slices = [slice(1, -1) if i == axis else slice(None) for i in range(2)]
864
+ shifted_cropped = shifted[tuple(slices)]
865
+
866
+ # Find exposed edges
867
+ exposed_edges = (labeled_2d != shifted_cropped) & (labeled_2d > 0)
868
+
869
+ edge_counts = np.bincount(labeled_2d[exposed_edges],
870
+ minlength=max_label + 1)
871
+ perimeters += edge_counts * edge_length
872
+
873
+ result = {int(label): float(perimeters[label]) for label in labels}
874
+ return result
875
+
876
+
877
+ def get_background_perimeters(labeled, xy_scale=1):
878
+ """Calculate perimeter exposed to background (value 0) for each object in a 2D array."""
879
+ # Squeeze to 2D without modifying the original array reference
880
+ labeled_2d = np.squeeze(labeled)
881
+
882
+ labels = np.unique(labeled_2d)
883
+ labels = labels[labels > 0]
884
+ max_label = int(np.max(labeled_2d))
885
+
886
+ perimeters = np.zeros(max_label + 1, dtype=np.float64)
887
+
888
+ # Only check 2 axes for 2D
889
+ for axis in range(2):
890
+ edge_length = xy_scale
891
+
892
+ for direction in [-1, 1]:
893
+ # Pad with zeros only on the axis we're checking
894
+ pad_width = [(1, 1) if i == axis else (0, 0) for i in range(2)]
895
+ padded = np.pad(labeled_2d, pad_width, mode='constant', constant_values=0)
896
+
897
+ # Roll the padded array
898
+ shifted = np.roll(padded, direction, axis=axis)
899
+
900
+ # Extract the center region (original size) from shifted
901
+ slices = [slice(1, -1) if i == axis else slice(None) for i in range(2)]
902
+ shifted_cropped = shifted[tuple(slices)]
903
+
904
+ # Find edges exposed to background (neighbor is 0)
905
+ exposed_edges = (shifted_cropped == 0) & (labeled_2d > 0)
906
+
907
+ edge_counts = np.bincount(labeled_2d[exposed_edges],
908
+ minlength=max_label + 1)
909
+ perimeters += edge_counts * edge_length
910
+
911
+ result = {int(label): float(perimeters[label]) for label in labels}
912
+ return result
913
+
914
+
915
+ def get_background_perimeter_proportion(labeled, xy_scale=1):
916
+ """Calculate proportion of perimeter exposed to background for each object in a 2D array."""
917
+ total_perimeters = get_perimeters(labeled, xy_scale)
918
+ background_perimeters = get_background_perimeters(labeled, xy_scale)
919
+
920
+ proportions = {}
921
+ for label in total_perimeters:
922
+ if total_perimeters[label] > 0:
923
+ proportions[label] = background_perimeters[label] / total_perimeters[label]
924
+ else:
925
+ proportions[label] = 0.0
926
+
927
+ return proportions
928
+
788
929
  def break_and_label_skeleton(skeleton, peaks = 1, branch_removal = 0, comp_dil = 0, max_vol = 0, directory = None, return_skele = False, nodes = None, compute = True, unify = False, xy_scale = 1, z_scale = 1):
789
930
  """Internal method to break open a skeleton at its branchpoints and label the remaining components, for an 8bit binary array"""
790
931
 
@@ -1174,21 +1315,29 @@ def z_project(array3d, method='max'):
1174
1315
  Returns:
1175
1316
  numpy.ndarray: 2D projected array with shape (Y, X)
1176
1317
  """
1177
- if not isinstance(array3d, np.ndarray) or array3d.ndim != 3:
1178
- raise ValueError("Input must be a 3D numpy array")
1179
-
1180
- if method == 'max':
1181
- return np.max(array3d, axis=0)
1182
- elif method == 'mean':
1183
- return np.mean(array3d, axis=0)
1184
- elif method == 'min':
1185
- return np.min(array3d, axis=0)
1186
- elif method == 'sum':
1187
- return np.sum(array3d, axis=0)
1188
- elif method == 'std':
1189
- return np.std(array3d, axis=0)
1318
+ #if not isinstance(array3d, np.ndarray):
1319
+ # raise ValueError("Input must be a 3D numpy array")
1320
+
1321
+
1322
+ if len(array3d.shape) == 3:
1323
+ if method == 'max':
1324
+ return np.max(array3d, axis=0)
1325
+ elif method == 'mean':
1326
+ return np.mean(array3d, axis=0)
1327
+ elif method == 'min':
1328
+ return np.min(array3d, axis=0)
1329
+ elif method == 'sum':
1330
+ return np.sum(array3d, axis=0)
1331
+ elif method == 'std':
1332
+ return np.std(array3d, axis=0)
1333
+ else:
1334
+ raise ValueError("Method must be one of: 'max', 'mean', 'min', 'sum', 'std'")
1190
1335
  else:
1191
- raise ValueError("Method must be one of: 'max', 'mean', 'min', 'sum', 'std'")
1336
+ array_list = []
1337
+ for i in range(array3d.shape[-1]):
1338
+ array_list.append(z_project(array3d[:, :, :, i], method = method))
1339
+ return np.stack(array_list, axis=-1)
1340
+
1192
1341
 
1193
1342
  def fill_holes_3d(array, head_on = False, fill_borders = True):
1194
1343
  def process_slice(slice_2d, border_threshold=0.08, fill_borders = True):
@@ -1490,24 +1639,6 @@ def hash_inners(search_region, inner_edges, GPU = False):
1490
1639
 
1491
1640
  return inner_edges
1492
1641
 
1493
- def hash_inners_old(search_region, inner_edges, GPU = True):
1494
- """Internal method used to help sort out inner edge connections. The inner edges of the array will not differentiate between what nodes they contact if those nodes themselves directly touch each other.
1495
- This method allows these elements to be efficiently seperated from each other. Originally this was implemented using the gaussian blur because i didn't yet realize skimage could do the same more efficiently."""
1496
-
1497
- print("Performing gaussian blur to hash inner edges.")
1498
-
1499
- blurred_search = smart_dilate.gaussian(search_region, GPU = GPU)
1500
-
1501
- borders = binarize((blurred_search - search_region)) #By subtracting the original image from the guassian blurred version, we set all non-border regions to 0
1502
-
1503
- del blurred_search
1504
-
1505
- inner_edges = inner_edges * borders #And as a result, we can mask out only 'inner edges' that themselves exist within borders
1506
-
1507
- inner_edges = dilate_3D_old(inner_edges, 3, 3, 3) #Not sure if dilating is necessary. Want to ensure that the inner edge pieces still overlap with the proper nodes after the masking.
1508
-
1509
- return inner_edges
1510
-
1511
1642
 
1512
1643
  def dilate_2D(array, search, scaling = 1):
1513
1644
 
@@ -2167,6 +2298,51 @@ def binarize(arrayimage, directory = None):
2167
2298
 
2168
2299
  return arrayimage.astype(np.uint8)
2169
2300
 
2301
+ def convert_to_multigraph(G, weight_attr='weight'):
2302
+ """
2303
+ Convert weighted graph to MultiGraph by creating parallel edges.
2304
+
2305
+ Args:
2306
+ G: NetworkX Graph with edge weights representing multiplicity
2307
+ weight_attr: Name of the weight attribute (default: 'weight')
2308
+
2309
+ Returns:
2310
+ MultiGraph with parallel edges instead of weights
2311
+
2312
+ Note:
2313
+ - Weights are rounded to integers
2314
+ - Original node/edge attributes are preserved on first edge
2315
+ - Directed graphs become MultiDiGraphs
2316
+ """
2317
+
2318
+ MG = nx.MultiGraph()
2319
+
2320
+ # Copy nodes with all their attributes
2321
+ MG.add_nodes_from(G.nodes(data=True))
2322
+
2323
+ # Convert weighted edges to multiple parallel edges
2324
+ for u, v, data in G.edges(data=True):
2325
+ # Get weight (default to 1 if missing)
2326
+ weight = data.get(weight_attr, 1)
2327
+
2328
+ # Round to integer for number of parallel edges
2329
+ num_edges = int(round(weight))
2330
+
2331
+ if num_edges < 1:
2332
+ num_edges = 1 # At least one edge
2333
+
2334
+ # Create parallel edges
2335
+ for i in range(num_edges):
2336
+ # First edge gets all the original attributes (except weight)
2337
+ if i == 0:
2338
+ edge_data = {k: v for k, v in data.items() if k != weight_attr}
2339
+ MG.add_edge(u, v, **edge_data)
2340
+ else:
2341
+ # Subsequent parallel edges are simple
2342
+ MG.add_edge(u, v)
2343
+
2344
+ return MG
2345
+
2170
2346
  def dilate(arrayimage, amount, xy_scale = 1, z_scale = 1, directory = None, fast_dil = False, recursive = False, dilate_xy = None, dilate_z = None):
2171
2347
  """
2172
2348
  Can be used to dilate a binary image in 3D. Dilated output will be saved to the active directory if none is specified. Note that dilation is done with single-instance kernels and not iterations, and therefore
@@ -2392,7 +2568,7 @@ def fix_branches_network(array, G, communities, fix_val = None):
2392
2568
 
2393
2569
  return targs
2394
2570
 
2395
- def fix_branches(array, G, max_val):
2571
+ def fix_branches(array, G, max_val, consider_prop = True):
2396
2572
  """
2397
2573
  Parameters:
2398
2574
  array: numpy array containing the labeled regions
@@ -2416,8 +2592,29 @@ def fix_branches(array, G, max_val):
2416
2592
 
2417
2593
  # Find all neighbors of not_safe nodes in one pass
2418
2594
  neighbors_of_not_safe = set()
2419
- for node in not_safe_initial:
2420
- neighbors_of_not_safe.update(adj[node])
2595
+ if consider_prop:
2596
+ if array.shape[0] != 1:
2597
+ areas = get_background_proportion(array, xy_scale=1, z_scale=1)
2598
+ else:
2599
+ areas = get_background_perimeter_proportion(array, xy_scale=1)
2600
+ valid_areas = {label: proportion for label, proportion in areas.items() if proportion < 0.4}
2601
+
2602
+ for node in not_safe_initial:
2603
+ # Filter neighbors based on whether they're in the valid areas dict
2604
+ valid_neighbors = [neighbor for neighbor in adj[node] if neighbor in valid_areas]
2605
+
2606
+ # If no valid neighbors, fall back to the one with lowest proportion
2607
+ if not valid_neighbors:
2608
+ node_neighbors = list(adj[node])
2609
+ if node_neighbors:
2610
+ # Find neighbor with minimum background proportion
2611
+ min_neighbor = min(node_neighbors, key=lambda n: areas.get(n, float('inf')))
2612
+ valid_neighbors = [min_neighbor]
2613
+
2614
+ neighbors_of_not_safe.update(valid_neighbors)
2615
+ else:
2616
+ for node in not_safe_initial:
2617
+ neighbors_of_not_safe.update(adj[node])
2421
2618
 
2422
2619
  # Remove max_val if present
2423
2620
  neighbors_of_not_safe.discard(max_val)
@@ -2428,7 +2625,7 @@ def fix_branches(array, G, max_val):
2428
2625
  # Update sets
2429
2626
  not_safe = not_safe_initial | nodes_to_move
2430
2627
 
2431
- # The rest of the function - FIX STARTS HERE
2628
+ # The rest of the function
2432
2629
  targs = np.array(list(not_safe))
2433
2630
 
2434
2631
  if len(targs) == 0:
@@ -2441,18 +2638,12 @@ def fix_branches(array, G, max_val):
2441
2638
  # Get the current maximum label in the array to avoid collisions
2442
2639
  current_max = np.max(array)
2443
2640
 
2444
- # Assign new unique labels to each connected component
2445
- for component_id in range(1, num_components + 1):
2446
- component_mask = labeled == component_id
2447
- array[component_mask] = current_max + component_id
2641
+ # Vectorized relabeling - single operation instead of loop
2642
+ array[mask] = labeled[mask] + current_max
2448
2643
 
2449
2644
  return array
2450
2645
 
2451
2646
 
2452
-
2453
-
2454
-
2455
-
2456
2647
  def label_vertices(array, peaks = 0, branch_removal = 0, comp_dil = 0, max_vol = 0, down_factor = 0, directory = None, return_skele = False, order = 0, fastdil = True):
2457
2648
  """
2458
2649
  Can be used to label vertices (where multiple branches connect) a binary image. Labelled output will be saved to the active directory if none is specified. Note this works better on already thin filaments and may over-divide larger trunkish objects.
@@ -2487,7 +2678,7 @@ def label_vertices(array, peaks = 0, branch_removal = 0, comp_dil = 0, max_vol =
2487
2678
  old_skeleton = copy.deepcopy(array) # The skeleton might get modified in label_vertices so we can make a preserved copy of it to use later
2488
2679
 
2489
2680
  if branch_removal > 0:
2490
- array = remove_branches(array, branch_removal)
2681
+ array = remove_branches_new(array, branch_removal)
2491
2682
 
2492
2683
  array = np.pad(array, pad_width=1, mode='constant', constant_values=0)
2493
2684
 
@@ -4105,27 +4296,30 @@ class Network_3D:
4105
4296
  else:
4106
4297
  outer_edges = dilate_3D_old(outer_edges)
4107
4298
 
4108
- labelled_edges, num_edge = label_objects(outer_edges)
4299
+ #labelled_edges, num_edge = ndimage.label(outer_edges)
4109
4300
 
4110
- if inners:
4111
- inner_edges = hash_inners(self._search_region, binary_edges, GPU = GPU)
4301
+ inner_edges = hash_inners(self._search_region, binary_edges, GPU = GPU)
4112
4302
 
4113
- del binary_edges
4303
+ del binary_edges
4304
+
4305
+ outer_edges = (inner_edges > 0) | (outer_edges > 0)
4306
+
4307
+ #inner_labels, num_edge = ndimage.label(inner_edges)
4114
4308
 
4115
- inner_labels, num_edge = label_objects(inner_edges)
4309
+ del inner_edges
4116
4310
 
4117
- del inner_edges
4311
+ outer_edges, num_edge = ndimage.label(outer_edges)
4118
4312
 
4119
- labelled_edges = combine_edges(labelled_edges, inner_labels)
4313
+ #labelled_edges = combine_edges(labelled_edges, inner_labels)
4120
4314
 
4121
- num_edge = np.max(labelled_edges)
4315
+ #num_edge = np.max(labelled_edges)
4122
4316
 
4123
- if num_edge < 256:
4124
- labelled_edges = labelled_edges.astype(np.uint8)
4125
- elif num_edge < 65536:
4126
- labelled_edges = labelled_edges.astype(np.uint16)
4317
+ #if num_edge < 256:
4318
+ # labelled_edges = labelled_edges.astype(np.uint8)
4319
+ #elif num_edge < 65536:
4320
+ # labelled_edges = labelled_edges.astype(np.uint16)
4127
4321
 
4128
- self._edges = labelled_edges
4322
+ self._edges = outer_edges
4129
4323
 
4130
4324
  def label_nodes(self):
4131
4325
  """
@@ -4135,21 +4329,37 @@ class Network_3D:
4135
4329
  self._nodes, num_nodes = label_objects(nodes, structure_3d)
4136
4330
 
4137
4331
  def combine_nodes(self, root_nodes, other_nodes, other_ID, identity_dict, root_ID = None, centroids = False, down_factor = None):
4138
-
4139
4332
  """Internal method to merge two labelled node arrays into one"""
4140
-
4141
4333
  print("Combining node arrays")
4142
-
4334
+
4335
+ # Calculate the maximum value that will exist in the output
4336
+ max_root = np.max(root_nodes)
4337
+ max_other = np.max(other_nodes)
4338
+ max_output = max_root + max_other # Worst case: all other_nodes shifted by max_root
4339
+
4340
+ # Determine the minimum dtype needed
4341
+ if max_output <= 255:
4342
+ target_dtype = np.uint8
4343
+ elif max_output <= 65535:
4344
+ target_dtype = np.uint16
4345
+ else:
4346
+ target_dtype = np.uint32
4347
+
4348
+ # Convert arrays to appropriate dtype
4349
+ root_nodes = root_nodes.astype(target_dtype)
4350
+ other_nodes = other_nodes.astype(target_dtype)
4351
+
4352
+ # Now perform the merge
4143
4353
  mask = (root_nodes == 0) & (other_nodes > 0)
4144
4354
  if np.any(mask):
4145
- max_val = np.max(root_nodes)
4146
- other_nodes[:] = np.where(mask, other_nodes + max_val, 0)
4147
- if centroids:
4148
- new_dict = network_analysis._find_centroids(other_nodes, down_factor = down_factor)
4149
- if down_factor is not None:
4150
- for item in new_dict:
4151
- new_dict[item] = down_factor * new_dict[item]
4152
- self.node_centroids.update(new_dict)
4355
+ other_nodes_shifted = np.where(other_nodes > 0, other_nodes + max_root, 0)
4356
+ if centroids:
4357
+ new_dict = network_analysis._find_centroids(other_nodes_shifted, down_factor = down_factor)
4358
+ if down_factor is not None:
4359
+ for item in new_dict:
4360
+ new_dict[item] = down_factor * new_dict[item]
4361
+ self.node_centroids.update(new_dict)
4362
+ other_nodes = np.where(mask, other_nodes_shifted, 0)
4153
4363
 
4154
4364
  if root_ID is not None:
4155
4365
  rootIDs = list(np.unique(root_nodes)) #Sets up adding these vals to the identitiy dictionary. Gets skipped if this has already been done.
@@ -4188,7 +4398,7 @@ class Network_3D:
4188
4398
 
4189
4399
  return nodes, identity_dict
4190
4400
 
4191
- def merge_nodes(self, addn_nodes_name, label_nodes = True, root_id = "Root_Nodes", centroids = False, down_factor = None):
4401
+ def merge_nodes(self, addn_nodes_name, label_nodes = True, root_id = "Root_Nodes", centroids = False, down_factor = None, is_array = False):
4192
4402
  """
4193
4403
  Merges the self._nodes attribute with alternate labelled node images. The alternate nodes can be inputted as a string for a filepath to a tif,
4194
4404
  or as a directory address containing only tif images, which will merge the _nodes attribute with all tifs in the folder. The _node_identities attribute
@@ -4215,7 +4425,11 @@ class Network_3D:
4215
4425
  self.node_centroids[item] = down_factor * self.node_centroids[item]
4216
4426
 
4217
4427
  try: #Try presumes the input is a tif
4218
- addn_nodes = tifffile.imread(addn_nodes_name) #If not this will fail and activate the except block
4428
+ if not is_array:
4429
+ addn_nodes = tifffile.imread(addn_nodes_name) #If not this will fail and activate the except block
4430
+ else:
4431
+ addn_nodes = addn_nodes_name # Passing it an array directly
4432
+ addn_nodes_name = "Node"
4219
4433
 
4220
4434
  if label_nodes is True:
4221
4435
  addn_nodes, num_nodes2 = label_objects(addn_nodes) # Label the node objects. Note this presumes no overlap between node masks.
@@ -4227,7 +4441,6 @@ class Network_3D:
4227
4441
  num_nodes = int(np.max(node_labels))
4228
4442
 
4229
4443
  except: #Exception presumes the input is a directory containing multiple tifs, to allow multi-node stackage.
4230
-
4231
4444
  addn_nodes_list = directory_info(addn_nodes_name)
4232
4445
 
4233
4446
  for i, addn_nodes in enumerate(addn_nodes_list):
@@ -4573,7 +4786,7 @@ class Network_3D:
4573
4786
  Sets the communities attribute by splitting the network into communities
4574
4787
  """
4575
4788
 
4576
- self._communities, self.normalized_weights, stats = modularity.community_partition(self._network_lists, weighted = weighted, style = style, dostats = dostats, seed = seed)
4789
+ self._communities, self.normalized_weights, stats = modularity.community_partition(self._network, weighted = weighted, style = style, dostats = dostats, seed = seed)
4577
4790
 
4578
4791
  return stats
4579
4792
 
@@ -4589,6 +4802,8 @@ class Network_3D:
4589
4802
  self._network = network_analysis.open_network(self._network_lists)
4590
4803
 
4591
4804
 
4805
+
4806
+
4592
4807
  def rescale(self, array, directory = None):
4593
4808
  """
4594
4809
  Scale a downsampled overlay or extracted image object back to the size that is present in either a Network_3D's node or edge properties.
@@ -4882,14 +5097,14 @@ class Network_3D:
4882
5097
 
4883
5098
 
4884
5099
 
4885
- def prune_samenode_connections(self):
5100
+ def prune_samenode_connections(self, target = None):
4886
5101
  """
4887
5102
  If working with a network that has multiple node identities (from merging nodes or otherwise manipulating this property),
4888
5103
  this method will remove from the network and network_lists properties any connections that exist between the same node identity,
4889
5104
  in case we want to investigate only connections between differing objects.
4890
5105
  """
4891
5106
 
4892
- self._network_lists, self._node_identities = network_analysis.prune_samenode_connections(self._network_lists, self._node_identities)
5107
+ self._network_lists, self._node_identities = network_analysis.prune_samenode_connections(self._network_lists, self._node_identities, target = target)
4893
5108
  self._network, num_weights = network_analysis.weighted_network(self._network_lists)
4894
5109
 
4895
5110
 
@@ -5355,7 +5570,8 @@ class Network_3D:
5355
5570
  Returns:
5356
5571
  dict: Dictionary containing various network statistics
5357
5572
  """
5358
- G = self._network
5573
+ G_unweighted = self._network
5574
+ G = convert_to_multigraph(self._network)
5359
5575
  stats = {}
5360
5576
 
5361
5577
  # Basic graph properties
@@ -5386,13 +5602,13 @@ class Network_3D:
5386
5602
  try:
5387
5603
  stats['avg_betweenness_centrality'] = np.mean(list(nx.betweenness_centrality(G).values()))
5388
5604
  stats['avg_closeness_centrality'] = np.mean(list(nx.closeness_centrality(G).values()))
5389
- stats['avg_eigenvector_centrality'] = np.mean(list(nx.eigenvector_centrality(G, max_iter=1000).values()))
5605
+ stats['avg_eigenvector_centrality'] = np.mean(list(nx.eigenvector_centrality(G_unweighted, max_iter=1000).values()))
5390
5606
  except:
5391
5607
  stats['centrality_measures'] = "Failed to compute - graph might be too large or disconnected"
5392
5608
 
5393
5609
  # Clustering and transitivity
5394
- stats['avg_clustering_coefficient'] = nx.average_clustering(G)
5395
- stats['transitivity'] = nx.transitivity(G)
5610
+ stats['avg_clustering_coefficient'] = nx.average_clustering(G_unweighted)
5611
+ stats['transitivity'] = nx.transitivity(G_unweighted)
5396
5612
 
5397
5613
  # Path lengths
5398
5614
  if nx.is_connected(G):
@@ -5608,7 +5824,7 @@ class Network_3D:
5608
5824
  print(f"Using {volume} for the volume measurement (Volume of provided mask as scaled by xy and z scaling)")
5609
5825
 
5610
5826
  # Compute distance transform on padded array
5611
- legal = smart_dilate.compute_distance_transform_distance(legal, sampling = [self.z_scale, self.xy_scale, self.xy_scale])
5827
+ legal = smart_dilate.compute_distance_transform_distance(legal, sampling = [self.z_scale, self.xy_scale, self.xy_scale], fast_dil = True)
5612
5828
 
5613
5829
  # Remove padding after distance transform
5614
5830
  if dim == 2:
@@ -5702,7 +5918,6 @@ class Network_3D:
5702
5918
 
5703
5919
 
5704
5920
  def morph_proximity(self, search = 0, targets = None, fastdil = False):
5705
-
5706
5921
  if type(search) == list:
5707
5922
  search_x, search_z = search #Suppose we just want to directly pass these params
5708
5923
  else:
@@ -5711,7 +5926,6 @@ class Network_3D:
5711
5926
  num_nodes = int(np.max(self._nodes))
5712
5927
 
5713
5928
  my_dict = proximity.create_node_dictionary(self._nodes, num_nodes, search_x, search_z, targets = targets, fastdil = fastdil, xy_scale = self._xy_scale, z_scale = self._z_scale, search = search)
5714
-
5715
5929
  my_dict = proximity.find_shared_value_pairs(my_dict)
5716
5930
 
5717
5931
  my_dict = create_and_save_dataframe(my_dict)