nettracer3d 1.1.0__py3-none-any.whl → 1.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nettracer3d might be problematic. Click here for more details.

nettracer3d/nettracer.py CHANGED
@@ -384,6 +384,13 @@ def invert_dict(d):
384
384
  inverted.setdefault(value, []).append(key)
385
385
  return inverted
386
386
 
387
+ def revert_dict(d):
388
+ inverted = {}
389
+ for key, value_list in d.items():
390
+ for value in value_list:
391
+ inverted[value] = key
392
+ return inverted
393
+
387
394
  def invert_dict_special(d):
388
395
 
389
396
  d = invert_dict(d)
@@ -626,6 +633,10 @@ def remove_branches_new(skeleton, length):
626
633
  image_copy = (image_copy[1:-1, 1:-1, 1:-1]).astype(np.uint8)
627
634
  return image_copy
628
635
 
636
+ import numpy as np
637
+ from collections import deque, defaultdict
638
+
639
+
629
640
  def remove_branches(skeleton, length):
630
641
  """Used to compensate for overly-branched skeletons resulting from the scipy 3d skeletonization algorithm"""
631
642
 
@@ -737,8 +748,40 @@ def estimate_object_radii(labeled_array, gpu=False, n_jobs=None, xy_scale = 1, z
737
748
  else:
738
749
  return morphology.estimate_object_radii_cpu(labeled_array, n_jobs, xy_scale = xy_scale, z_scale = z_scale)
739
750
 
751
+ def get_surface_areas(labeled, xy_scale=1, z_scale=1):
752
+ labels = np.unique(labeled)
753
+ labels = labels[labels > 0] # Remove background label
754
+ max_label = int(np.max(labeled))
755
+
756
+ # Size array to accommodate highest label value
757
+ surface_areas = np.zeros(max_label + 1, dtype=np.float64)
758
+
759
+ # Check each of 6 face directions (±x, ±y, ±z)
760
+ for axis in range(3):
761
+ # Determine face area based on axis (anisotropic scaling)
762
+ if axis == 2: # z-axis: face is in xy plane
763
+ face_area = xy_scale * xy_scale
764
+ else: # x or y axis: face is perpendicular to xy plane
765
+ face_area = xy_scale * z_scale
766
+
767
+ for direction in [-1, 1]:
768
+ # Shift array to compare with neighbors
769
+ shifted = np.roll(labeled, direction, axis=axis)
770
+
771
+ # Find faces exposed to different label (including background)
772
+ exposed_faces = (labeled != shifted) & (labeled > 0)
773
+
774
+ # Count exposed faces per label
775
+ face_counts = np.bincount(labeled[exposed_faces],
776
+ minlength=max_label + 1)
777
+ surface_areas += face_counts * face_area
778
+
779
+ # Create dictionary mapping label to surface area
780
+ result = {int(label): float(surface_areas[label]) for label in labels}
781
+
782
+ return result
740
783
 
741
- def break_and_label_skeleton(skeleton, peaks = 1, branch_removal = 0, comp_dil = 0, max_vol = 0, directory = None, return_skele = False, nodes = None):
784
+ def break_and_label_skeleton(skeleton, peaks = 1, branch_removal = 0, comp_dil = 0, max_vol = 0, directory = None, return_skele = False, nodes = None, compute = True, unify = False, xy_scale = 1, z_scale = 1):
742
785
  """Internal method to break open a skeleton at its branchpoints and label the remaining components, for an 8bit binary array"""
743
786
 
744
787
  if type(skeleton) == str:
@@ -747,18 +790,28 @@ def break_and_label_skeleton(skeleton, peaks = 1, branch_removal = 0, comp_dil =
747
790
  else:
748
791
  broken_skele = None
749
792
 
750
- #old_skeleton = copy.deepcopy(skeleton) # The skeleton might get modified in label_vertices so we can make a preserved copy of it to use later
751
-
752
793
  if nodes is None:
753
794
 
754
- verts = label_vertices(skeleton, peaks = peaks, branch_removal = branch_removal, comp_dil = comp_dil, max_vol = max_vol, return_skele = return_skele)
795
+ verts = label_vertices(skeleton, peaks = peaks, branch_removal = branch_removal, comp_dil = comp_dil, max_vol = max_vol, return_skele = return_skele, compute = compute)
755
796
 
756
797
  else:
757
798
  verts = nodes
758
799
 
759
800
  verts = invert_array(verts)
760
801
 
761
- #skeleton = old_skeleton
802
+ """
803
+ if compute: # We are interested in the endpoints if we are doing the optional computation later
804
+ endpoints = []
805
+ image_copy = np.pad(skeleton, pad_width=1, mode='constant', constant_values=0)
806
+ nonzero_coords = np.transpose(np.nonzero(image_copy))
807
+ for x, y, z in nonzero_coords:
808
+ mini = image_copy[x-1:x+2, y-1:y+2, z-1:z+2]
809
+ nearby_sum = np.sum(mini)
810
+ threshold = 2 * image_copy[x, y, z]
811
+
812
+ if nearby_sum <= threshold:
813
+ endpoints.append((x, y, z))
814
+ """
762
815
 
763
816
  image_copy = skeleton * verts
764
817
 
@@ -776,9 +829,147 @@ def break_and_label_skeleton(skeleton, peaks = 1, branch_removal = 0, comp_dil =
776
829
  tifffile.imwrite(filename, labeled_image, photometric='minisblack')
777
830
  print(f"Broken skeleton saved to {filename}")
778
831
 
779
- return labeled_image
832
+ if not unify:
833
+ verts = None
834
+ else:
835
+ verts = invert_array(verts)
836
+
837
+ if compute:
838
+
839
+ return labeled_image, verts, skeleton, None
840
+
841
+ return labeled_image, verts, None, None
842
+
843
+ def compute_optional_branchstats(verts, labeled_array, endpoints, xy_scale = 1, z_scale = 1):
844
+
845
+ #Lengths:
846
+ # Get all non-background coordinates and their labels in one pass
847
+ z, y, x = np.where(labeled_array != 0)
848
+ labels = labeled_array[z, y, x]
849
+
850
+ # Sort by label
851
+ sort_idx = np.argsort(labels)
852
+ labels_sorted = labels[sort_idx]
853
+ z_sorted = z[sort_idx]
854
+ y_sorted = y[sort_idx]
855
+ x_sorted = x[sort_idx]
856
+
857
+ # Find where each label starts
858
+ unique_labels, split_idx = np.unique(labels_sorted, return_index=True)
859
+ split_idx = split_idx[1:] # Remove first index for np.split
860
+
861
+ # Split into groups
862
+ z_split = np.split(z_sorted, split_idx)
863
+ y_split = np.split(y_sorted, split_idx)
864
+ x_split = np.split(x_sorted, split_idx)
865
+
866
+ # Build dict
867
+ coords_dict = {label: np.column_stack([z, y, x])
868
+ for label, z, y, x in zip(unique_labels, z_split, y_split, x_split)}
869
+
870
+ from sklearn.neighbors import NearestNeighbors
871
+ from scipy.spatial.distance import pdist, squareform
872
+ len_dict = {}
873
+ tortuosity_dict = {}
874
+ angle_dict = {}
875
+ for label, coords in coords_dict.items():
876
+ len_dict[label] = morphology.calculate_skeleton_lengths(labeled_array.shape, xy_scale=xy_scale, z_scale=z_scale, skeleton_coords=coords)
877
+
878
+ # Find neighbors for all points at once
879
+ nbrs = NearestNeighbors(radius=1.74, algorithm='kd_tree').fit(coords)
880
+ neighbor_counts = nbrs.radius_neighbors(coords, return_distance=False)
881
+ neighbor_counts = np.array([len(n) - 1 for n in neighbor_counts]) # -1 to exclude self
882
+
883
+ # Endpoints have exactly 1 neighbor
884
+ endpoints = coords[neighbor_counts == 1]
885
+
886
+ if len(endpoints) > 1:
887
+ # Scale endpoints
888
+ scaled_endpoints = endpoints.copy().astype(float)
889
+ scaled_endpoints[:, 0] *= z_scale # z dimension
890
+ scaled_endpoints[:, 1] *= xy_scale # y dimension
891
+ scaled_endpoints[:, 2] *= xy_scale # x dimension
892
+
893
+ # calculate distances on scaled coordinates
894
+ distances = pdist(scaled_endpoints, metric='euclidean')
895
+ max_distance = distances.max()
896
+
897
+ tortuosity_dict[label] = len_dict[label]/max_distance
898
+
899
+ for branch, length in len_dict.items():
900
+ if length == 0: # This can happen for branches that are 1 pixel which shouldn't have '0' length technically, so we just set them to the length of a pixel
901
+ len_dict[branch] = xy_scale
902
+ tortuosity_dict[branch] = 1
780
903
 
904
+ """
905
+ verts = invert_array(verts)
906
+ for x, y, z in endpoints:
907
+ try:
908
+ verts[z,y,x] = 1
909
+ except IndexError:
910
+ print(x, y, z)
911
+
912
+ temp_network = Network_3D(nodes = verts, edges = labeled_array, xy_scale = xy_scale, z_scale = z_scale)
913
+ temp_network.calculate_all(temp_network.nodes, temp_network.edges, xy_scale = temp_network.xy_scale, z_scale = temp_network.z_scale, search = None, diledge = None, inners = False, remove_trunk = 0, ignore_search_region = True, other_nodes = None, label_nodes = True, directory = None, GPU = False, fast_dil = False, skeletonize = False, GPU_downsample = None)
914
+ temp_network.calculate_node_centroids()
915
+ from itertools import combinations
916
+ for node in temp_network.network.nodes:
917
+ neighbors = list(temp_network.network.neighbors(node))
918
+
919
+ # Skip if fewer than 2 neighbors (endpoints or isolated nodes)
920
+ if len(neighbors) < 2:
921
+ continue
922
+
923
+ # Get all unique pairs of neighbors
924
+ neighbor_pairs = combinations(neighbors, 2)
925
+
926
+ angles = []
927
+ for neighbor1, neighbor2 in neighbor_pairs:
928
+ # Get coordinates from centroids
929
+ point_a = temp_network.node_centroids[neighbor1]
930
+ point_b = temp_network.node_centroids[node] # vertex
931
+ point_c = temp_network.node_centroids[neighbor2]
932
+
933
+ # Calculate angle
934
+ angle_result = calculate_3d_angle(point_a, point_b, point_c, xy_scale = xy_scale, z_scale = z_scale)
935
+ angles.append(angle_result)
936
+
937
+ angle_dict[node] = angles
938
+ """
939
+
940
+ return len_dict, tortuosity_dict, angle_dict
941
+
942
+ def calculate_3d_angle(point_a, point_b, point_c, xy_scale = 1, z_scale = 1):
943
+ """Calculate 3D angle at vertex B between points A-B-C."""
944
+ z1, y1, x1 = point_a
945
+ z2, y2, x2 = point_b # vertex
946
+ z3, y3, x3 = point_c
947
+
948
+ # Apply scaling
949
+ scaled_a = np.array([x1 * xy_scale, y1 * xy_scale, z1 * z_scale])
950
+ scaled_b = np.array([x2 * xy_scale, y2 * xy_scale, z2 * z_scale])
951
+ scaled_c = np.array([x3 * xy_scale, y3 * xy_scale, z3 * z_scale])
952
+
953
+ # Create vectors from vertex B
954
+ vec_ba = scaled_a - scaled_b
955
+ vec_bc = scaled_c - scaled_b
956
+
957
+ # Calculate angle using dot product
958
+ dot_product = np.dot(vec_ba, vec_bc)
959
+ magnitude_ba = np.linalg.norm(vec_ba)
960
+ magnitude_bc = np.linalg.norm(vec_bc)
961
+
962
+ # Avoid division by zero
963
+ if magnitude_ba == 0 or magnitude_bc == 0:
964
+ return {'angle_degrees': 0}
965
+
966
+ cos_angle = dot_product / (magnitude_ba * magnitude_bc)
967
+ cos_angle = np.clip(cos_angle, -1.0, 1.0) # Handle numerical errors
968
+
969
+ angle_radians = np.arccos(cos_angle)
970
+ angle_degrees = np.degrees(angle_radians)
781
971
 
972
+ return angle_degrees
782
973
 
783
974
  def threshold(arr, proportion, custom_rad = None):
784
975
 
@@ -907,9 +1098,12 @@ def show_3d(arrays_3d=None, arrays_4d=None, down_factor=None, order=0, xy_scale=
907
1098
  # Downsample arrays if specified
908
1099
  arrays_3d = [downsample(array, down_factor, order=order) for array in arrays_3d] if arrays_3d is not None else None
909
1100
  arrays_4d = [downsample(array, down_factor, order=order) for array in arrays_4d] if arrays_4d is not None else None
1101
+ scale = [z_scale * down_factor, xy_scale * down_factor, xy_scale * down_factor]
1102
+ else:
1103
+ scale = [z_scale, xy_scale, xy_scale]
1104
+
910
1105
 
911
1106
  viewer = napari.Viewer(ndisplay=3)
912
- scale = [z_scale, xy_scale, xy_scale] # [z, y, x] order for napari
913
1107
 
914
1108
  # Add 3D arrays if provided
915
1109
  if arrays_3d is not None:
@@ -2067,8 +2261,6 @@ def binarize(arrayimage, directory = None):
2067
2261
 
2068
2262
  arrayimage = arrayimage != 0
2069
2263
 
2070
- arrayimage = arrayimage.astype(np.uint8)
2071
-
2072
2264
  arrayimage = arrayimage * 255
2073
2265
 
2074
2266
  if type(arrayimage) == str:
@@ -2079,7 +2271,7 @@ def binarize(arrayimage, directory = None):
2079
2271
  tifffile.imwrite(f"{directory}/binary.tif", arrayimage)
2080
2272
 
2081
2273
 
2082
- return arrayimage
2274
+ return arrayimage.astype(np.uint8)
2083
2275
 
2084
2276
  def dilate(arrayimage, amount, xy_scale = 1, z_scale = 1, directory = None, fast_dil = False, recursive = False, dilate_xy = None, dilate_z = None):
2085
2277
  """
@@ -2129,7 +2321,7 @@ def erode(arrayimage, amount, xy_scale = 1, z_scale = 1, mode = 0, preserve_labe
2129
2321
  arrayimage = binarize(arrayimage)
2130
2322
  erode_xy, erode_z = dilation_length_to_pixels(xy_scale, z_scale, amount, amount)
2131
2323
 
2132
- if mode == 0:
2324
+ if mode == 2:
2133
2325
  arrayimage = (erode_3D(arrayimage, erode_xy, erode_xy, erode_z)) * 255
2134
2326
  else:
2135
2327
  arrayimage = erode_3D_dt(arrayimage, amount, xy_scaling=xy_scale, z_scaling=z_scale, preserve_labels = preserve_labels)
@@ -2184,7 +2376,7 @@ def skeletonize(arrayimage, directory = None):
2184
2376
 
2185
2377
  return arrayimage
2186
2378
 
2187
- def label_branches(array, peaks = 0, branch_removal = 0, comp_dil = 0, max_vol = 0, down_factor = None, directory = None, nodes = None, bonus_array = None, GPU = True, arrayshape = None):
2379
+ def label_branches(array, peaks = 0, branch_removal = 0, comp_dil = 0, max_vol = 0, down_factor = None, directory = None, nodes = None, bonus_array = None, GPU = True, arrayshape = None, compute = False, unify = False, union_val = 10, xy_scale = 1, z_scale = 1):
2188
2380
  """
2189
2381
  Can be used to label branches a binary image. Labelled output will be saved to the active directory if none is specified. Note this works better on already thin filaments and may over-divide larger trunkish objects.
2190
2382
  :param array: (Mandatory, string or ndarray) - If string, a path to a tif file to label. Note that the ndarray alternative is for internal use mainly and will not save its output.
@@ -2208,26 +2400,31 @@ def label_branches(array, peaks = 0, branch_removal = 0, comp_dil = 0, max_vol =
2208
2400
  else:
2209
2401
  arrayshape = arrayshape
2210
2402
 
2211
-
2212
2403
  if nodes is None:
2213
2404
 
2214
2405
  array = array > 0
2215
2406
 
2216
2407
  other_array = skeletonize(array)
2217
2408
 
2218
- other_array = break_and_label_skeleton(other_array, peaks = peaks, branch_removal = branch_removal, comp_dil = comp_dil, max_vol = max_vol, nodes = nodes)
2409
+ other_array, verts, skele, endpoints = break_and_label_skeleton(other_array, peaks = peaks, branch_removal = branch_removal, comp_dil = comp_dil, max_vol = max_vol, nodes = nodes, compute = compute, unify = unify, xy_scale = xy_scale, z_scale = z_scale)
2219
2410
 
2220
2411
  else:
2221
- array = break_and_label_skeleton(array, peaks = peaks, branch_removal = branch_removal, comp_dil = comp_dil, max_vol = max_vol, nodes = nodes)
2412
+ if down_factor is not None:
2413
+ bonus_array = downsample(bonus_array, down_factor)
2414
+ array, verts, skele, endpoints = break_and_label_skeleton(array, peaks = peaks, branch_removal = branch_removal, comp_dil = comp_dil, max_vol = max_vol, nodes = nodes, compute = compute, unify = unify, xy_scale = xy_scale, z_scale = z_scale)
2415
+
2416
+ if unify is True and nodes is not None:
2417
+ from . import branch_stitcher
2418
+ verts = dilate_3D_old(verts, 3, 3, 3,)
2419
+ verts, _ = label_objects(verts)
2420
+ array = branch_stitcher.trace(bonus_array, array, verts, score_thresh = union_val)
2421
+ verts = None
2222
2422
 
2223
- if nodes is not None and down_factor is not None:
2224
- array = upsample_with_padding(array, down_factor, arrayshape)
2225
2423
 
2226
2424
  if nodes is None:
2227
2425
 
2228
2426
  array = smart_dilate.smart_label(array, other_array, GPU = GPU, remove_template = True)
2229
2427
  #distance = smart_dilate.compute_distance_transform_distance(array)
2230
- print("Watershedding result...")
2231
2428
  #array = water(-distance, other_array, mask=array) #Tried out skimage watershed as shown and found it did not label branches as well as smart_label (esp combined combined with post-processing label splitting if needed)
2232
2429
 
2233
2430
  else:
@@ -2256,8 +2453,11 @@ def label_branches(array, peaks = 0, branch_removal = 0, comp_dil = 0, max_vol =
2256
2453
  else:
2257
2454
  print("Branches labelled")
2258
2455
 
2456
+ if nodes is not None and down_factor is not None:
2457
+ array = upsample_with_padding(array, down_factor, arrayshape)
2458
+
2259
2459
 
2260
- return array
2460
+ return array, verts, skele, endpoints
2261
2461
 
2262
2462
  def fix_branches_network(array, G, communities, fix_val = None):
2263
2463
 
@@ -2377,7 +2577,7 @@ def label_vertices(array, peaks = 0, branch_removal = 0, comp_dil = 0, max_vol =
2377
2577
  else:
2378
2578
  broken_skele = None
2379
2579
 
2380
- if down_factor > 0:
2580
+ if down_factor > 1:
2381
2581
  array_shape = array.shape
2382
2582
  array = downsample(array, down_factor, order)
2383
2583
  if order == 3:
@@ -2927,7 +3127,7 @@ class Network_3D:
2927
3127
  for _ in range(weight):
2928
3128
  lista.append(u)
2929
3129
  listb.append(v)
2930
- listc.append(weight)
3130
+ listc.append(0)
2931
3131
 
2932
3132
  self._network_lists = [lista, listb, listc]
2933
3133
 
@@ -3161,7 +3361,14 @@ class Network_3D:
3161
3361
  if directory is None:
3162
3362
  try:
3163
3363
  if len(self._nodes.shape) == 3:
3164
- tifffile.imwrite(f"{filename}", self._nodes, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3364
+ try:
3365
+ tifffile.imwrite(f"{filename}", self._nodes, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3366
+ except:
3367
+ try:
3368
+ tifffile.imwrite(f"{filename}", self._nodes)
3369
+ except:
3370
+ self._nodes = binarize(self._nodes)
3371
+ tifffile.imwrite(f"{filename}", self._nodes, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3165
3372
  else:
3166
3373
  tifffile.imwrite(f"{filename}", self._nodes)
3167
3374
  print(f"Nodes saved to {filename}")
@@ -3170,9 +3377,16 @@ class Network_3D:
3170
3377
  if directory is not None:
3171
3378
  try:
3172
3379
  if len(self._nodes.shape) == 3:
3173
- tifffile.imwrite(f"{directory}/{filename}", self._nodes, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3380
+ try:
3381
+ tifffile.imwrite(f"{directory}/{filename}", self._nodes, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3382
+ except:
3383
+ try:
3384
+ tifffile.imwrite(f"{directory}/{filename}", self._nodes)
3385
+ except:
3386
+ self._nodes = binarize(self._nodes)
3387
+ tifffile.imwrite(f"{directory}/{filename}", self._nodes, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3174
3388
  else:
3175
- tifffile.imwrite(f"{directory}/{filename}")
3389
+ tifffile.imwrite(f"{directory}/{filename}", self._nodes)
3176
3390
  print(f"Nodes saved to {directory}/{filename}")
3177
3391
  except Exception as e:
3178
3392
  print(f"Could not save nodes to {directory}")
@@ -3202,11 +3416,25 @@ class Network_3D:
3202
3416
 
3203
3417
  if self._edges is not None:
3204
3418
  if directory is None:
3205
- tifffile.imwrite(f"{filename}", self._edges, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3419
+ try:
3420
+ tifffile.imwrite(f"{filename}", self._edges, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3421
+ except:
3422
+ try:
3423
+ tifffile.imwrite(f"{filename}", self._edges)
3424
+ except:
3425
+ self._edges = binarize(self._edges)
3426
+ tifffile.imwrite(f"{filename}", self._edges, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3206
3427
  print(f"Edges saved to {filename}")
3207
3428
 
3208
3429
  if directory is not None:
3209
- tifffile.imwrite(f"{directory}/{filename}", self._edges, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3430
+ try:
3431
+ tifffile.imwrite(f"{directory}/{filename}", self._edges, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3432
+ except:
3433
+ try:
3434
+ tifffile.imwrite(f"{directory}/{filename}", self._edges)
3435
+ except:
3436
+ self._edges = binarize(self._edges)
3437
+ tifffile.imwrite(f"{directory}/{filename}", self._edges, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3210
3438
  print(f"Edges saved to {directory}/{filename}")
3211
3439
 
3212
3440
  if self._edges is None:
@@ -3379,14 +3607,28 @@ class Network_3D:
3379
3607
  if self._network_overlay is not None:
3380
3608
  if directory is None:
3381
3609
  if len(self._network_overlay.shape) == 3:
3382
- tifffile.imwrite(f"{filename}", self._network_overlay, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3610
+ try:
3611
+ tifffile.imwrite(f"{filename}", self._network_overlay, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3612
+ except:
3613
+ try:
3614
+ tifffile.imwrite(f"{filename}", self._network_overlay)
3615
+ except:
3616
+ self._network_overlay = binarize(self._network_overlay)
3617
+ tifffile.imwrite(f"{filename}", self._network_overlay, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3383
3618
  else:
3384
3619
  tifffile.imwrite(f"{filename}", self._network_overlay)
3385
3620
  print(f"Network overlay saved to {filename}")
3386
3621
 
3387
3622
  if directory is not None:
3388
3623
  if len(self._network_overlay.shape) == 3:
3389
- tifffile.imwrite(f"{directory}/{filename}", self._network_overlay, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3624
+ try:
3625
+ tifffile.imwrite(f"{directory}/{filename}", self._network_overlay, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3626
+ except:
3627
+ try:
3628
+ tifffile.imwrite(f"{directory}/{filename}", self._network_overlay)
3629
+ except:
3630
+ self._network_overlay = binarize(self._network_overlay)
3631
+ tifffile.imwrite(f"{directory}/{filename}", self._network_overlay, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3390
3632
  else:
3391
3633
  tifffile.imwrite(f"{directory}/{filename}", self._network_overlay)
3392
3634
  print(f"Network overlay saved to {directory}/{filename}")
@@ -3410,14 +3652,28 @@ class Network_3D:
3410
3652
  if self._id_overlay is not None:
3411
3653
  if directory is None:
3412
3654
  if len(self._id_overlay.shape) == 3:
3413
- tifffile.imwrite(f"{filename}", self._id_overlay, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3655
+ try:
3656
+ tifffile.imwrite(f"{filename}", self._id_overlay, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3657
+ except:
3658
+ try:
3659
+ tifffile.imwrite(f"{filename}", self._id_overlay)
3660
+ except:
3661
+ self._id_overlay = binarize(self._id_overlay)
3662
+ tifffile.imwrite(f"{filename}", self._id_overlay, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3414
3663
  else:
3415
3664
  tifffile.imwrite(f"{filename}", self._id_overlay, imagej=True)
3416
3665
  print(f"Network overlay saved to {filename}")
3417
3666
 
3418
3667
  if directory is not None:
3419
3668
  if len(self._id_overlay.shape) == 3:
3420
- tifffile.imwrite(f"{directory}/{filename}", self._id_overlay, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3669
+ try:
3670
+ tifffile.imwrite(f"{directory}/{filename}", self._id_overlay, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3671
+ except:
3672
+ try:
3673
+ tifffile.imwrite(f"{directory}/{filename}", self._id_overlay)
3674
+ except:
3675
+ self._id_overlay = binarize(self._id_overlay)
3676
+ tifffile.imwrite(f"{directory}/{filename}", self._id_overlay, imagej=True, metadata=imagej_metadata, resolution=(resolution_value, resolution_value))
3421
3677
  else:
3422
3678
  tifffile.imwrite(f"{directory}/{filename}", self._id_overlay)
3423
3679
  print(f"ID overlay saved to {directory}/{filename}")
@@ -4153,6 +4409,88 @@ class Network_3D:
4153
4409
  self._network_lists = network_analysis.read_excel_to_lists(df)
4154
4410
  self._network, net_weights = network_analysis.weighted_network(df)
4155
4411
 
4412
+ def create_id_network(self, n=5):
4413
+ import ast
4414
+ import random
4415
+
4416
+ if self.node_identities is None:
4417
+ return
4418
+
4419
+ def invert_dict(d):
4420
+ inverted = {}
4421
+ for key, value in d.items():
4422
+ inverted.setdefault(value, []).append(key)
4423
+ return inverted
4424
+
4425
+ # Invert to get identity -> list of nodes
4426
+ identity_to_nodes = invert_dict(self.node_identities)
4427
+
4428
+ G = nx.Graph()
4429
+ edge_set = set()
4430
+
4431
+ # Step 1: Connect nodes within same exact identity
4432
+ for identity, nodes in identity_to_nodes.items():
4433
+ if len(nodes) <= 1:
4434
+ continue
4435
+
4436
+ # Each node chooses n random neighbors from its identity group
4437
+ for node in nodes:
4438
+ available = [other for other in nodes if other != node]
4439
+ num_to_choose = min(n, len(available))
4440
+ neighbors = random.sample(available, num_to_choose)
4441
+
4442
+ for neighbor in neighbors:
4443
+ edge = tuple(sorted([node, neighbor]))
4444
+ edge_set.add(edge)
4445
+
4446
+ # Step 2: For list-like identities, connect across groups with shared sub-identities
4447
+ for identity, nodes in identity_to_nodes.items():
4448
+ if identity.startswith('['):
4449
+ try:
4450
+ sub_identities = ast.literal_eval(identity)
4451
+
4452
+ # For each sub-identity in this list-like identity
4453
+ for sub_id in sub_identities:
4454
+ # Find all OTHER identity groups that contain this sub-identity
4455
+ for other_identity, other_nodes in identity_to_nodes.items():
4456
+ if other_identity == identity:
4457
+ continue # Skip connecting to same exact identity (already done in Step 1)
4458
+
4459
+ # Check if other_identity contains sub_id
4460
+ contains_sub_id = False
4461
+
4462
+ if other_identity.startswith('['):
4463
+ try:
4464
+ other_sub_ids = ast.literal_eval(other_identity)
4465
+ if sub_id in other_sub_ids:
4466
+ contains_sub_id = True
4467
+ except (ValueError, SyntaxError):
4468
+ pass
4469
+ elif other_identity == sub_id:
4470
+ # Single identity that matches our sub-identity
4471
+ contains_sub_id = True
4472
+
4473
+ if contains_sub_id:
4474
+ # Each node from current identity connects to n nodes from other_identity
4475
+ for node in nodes:
4476
+ num_to_choose = min(n, len(other_nodes))
4477
+ if num_to_choose > 0:
4478
+ neighbors = random.sample(other_nodes, num_to_choose)
4479
+
4480
+ for neighbor in neighbors:
4481
+ edge = tuple(sorted([node, neighbor]))
4482
+ edge_set.add(edge)
4483
+
4484
+ except (ValueError, SyntaxError):
4485
+ pass # Not a valid list, treat as already handled in Step 1
4486
+
4487
+ G.add_edges_from(edge_set)
4488
+ self.network = G
4489
+
4490
+
4491
+
4492
+
4493
+
4156
4494
  def calculate_all(self, nodes, edges, xy_scale = 1, z_scale = 1, down_factor = None, search = None, diledge = None, inners = True, remove_trunk = 0, ignore_search_region = False, other_nodes = None, label_nodes = True, directory = None, GPU = True, fast_dil = True, skeletonize = False, GPU_downsample = None):
4157
4495
  """
4158
4496
  Method to calculate and save to mem all properties of a Network_3D object. In general, after initializing a Network_3D object, this method should be called on the node and edge masks that will be used to calculate the network.
@@ -5240,7 +5578,7 @@ class Network_3D:
5240
5578
  network_analysis.create_bar_graph(proportion_dict, title2, "Node Identity", "Proportion", directory=directory)
5241
5579
 
5242
5580
  try:
5243
- network_analysis.create_bar_graph(densities, f'Clustering Factor of Node Identities with {search} from nodes {root}', "Node Identity", "Density Search/Density Total", directory=directory)
5581
+ network_analysis.create_bar_graph(densities, f'Relative Density of Node Identities with {search} from nodes {root}', "Node Identity", "Density Search/Density Total", directory=directory)
5244
5582
  except:
5245
5583
  densities = None
5246
5584
 
@@ -5470,7 +5808,6 @@ class Network_3D:
5470
5808
  else:
5471
5809
  search_x, search_z = dilation_length_to_pixels(self._xy_scale, self._z_scale, search, search)
5472
5810
 
5473
-
5474
5811
  num_nodes = int(np.max(self._nodes))
5475
5812
 
5476
5813
  my_dict = proximity.create_node_dictionary(self._nodes, num_nodes, search_x, search_z, targets = targets, fastdil = fastdil, xy_scale = self._xy_scale, z_scale = self._z_scale, search = search)
@@ -5630,7 +5967,7 @@ class Network_3D:
5630
5967
  neighborhoods.visualize_cluster_composition_umap(self.node_centroids, None, id_dictionary = self.node_identities, graph_label = "Node ID", title = 'UMAP Visualization of Node Centroids')
5631
5968
 
5632
5969
 
5633
- def identity_umap(self, data):
5970
+ def identity_umap(self, data, mode = 0):
5634
5971
 
5635
5972
  try:
5636
5973
 
@@ -5650,16 +5987,18 @@ class Network_3D:
5650
5987
  else:
5651
5988
  del umap_dict[item]
5652
5989
 
5653
- from scipy.stats import zscore
5990
+ #from scipy.stats import zscore
5654
5991
 
5655
5992
  # Z-score normalize each marker (column)
5656
- for key in umap_dict:
5657
- umap_dict[key] = zscore(umap_dict[key])
5658
-
5993
+ #for key in umap_dict:
5994
+ #umap_dict[key] = zscore(umap_dict[key])
5659
5995
 
5660
5996
  from . import neighborhoods
5661
5997
 
5662
- neighborhoods.visualize_cluster_composition_umap(umap_dict, None, id_dictionary = neighbor_classes, graph_label = "Node ID", title = 'UMAP Visualization of Node Identities by Z-Score')
5998
+ if mode == 0:
5999
+ neighborhoods.visualize_cluster_composition_umap(umap_dict, None, id_dictionary = neighbor_classes, graph_label = "Node ID", title = 'UMAP Visualization of Node Identities by Z-Score')
6000
+ else:
6001
+ neighborhoods.visualize_cluster_composition_umap(umap_dict, None, id_dictionary = neighbor_classes, graph_label = "Node ID", title = 'UMAP Visualization of Node Identities by Z-Score', neighborhoods = self.communities, original_communities = self.communities)
5663
6002
 
5664
6003
  except Exception as e:
5665
6004
  import traceback
@@ -5778,7 +6117,6 @@ class Network_3D:
5778
6117
  neighbor_group[com] = neighbors[node]
5779
6118
  except:
5780
6119
  neighbor_group[com] = 0
5781
- print(neighbors)
5782
6120
  neighborhoods.visualize_cluster_composition_umap(umap_dict, id_set, neighborhoods = neighbor_group, original_communities = neighbors)
5783
6121
  elif label == 1:
5784
6122
  neighborhoods.visualize_cluster_composition_umap(umap_dict, id_set, label = True)
@@ -5791,6 +6129,19 @@ class Network_3D:
5791
6129
  return output, id_set
5792
6130
 
5793
6131
 
6132
+ def group_nodes_by_intensity(self, data, count = None):
6133
+
6134
+ from . import neighborhoods
6135
+
6136
+ clusters = neighborhoods.cluster_arrays(data, count, seed = 42)
6137
+
6138
+ coms = {}
6139
+
6140
+ for i, cluster in enumerate(clusters):
6141
+ coms[i + 1] = cluster
6142
+
6143
+ self.communities = revert_dict(coms)
6144
+
5794
6145
  def assign_neighborhoods(self, seed, count, limit = None, prev_coms = None, proportional = False, mode = 0):
5795
6146
 
5796
6147
  from . import neighborhoods
@@ -5941,13 +6292,6 @@ class Network_3D:
5941
6292
 
5942
6293
  def community_cells(self, size = 32, xy_scale = 1, z_scale = 1):
5943
6294
 
5944
- def revert_dict(d):
5945
- inverted = {}
5946
- for key, value_list in d.items():
5947
- for value in value_list:
5948
- inverted[value] = key
5949
- return inverted
5950
-
5951
6295
  size_x = int(size * xy_scale)
5952
6296
  size_z = int(size * z_scale)
5953
6297