nettracer3d 0.9.3__py3-none-any.whl → 0.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nettracer3d might be problematic. Click here for more details.

nettracer3d/nettracer.py CHANGED
@@ -4,6 +4,7 @@ import tifffile
4
4
  from scipy import ndimage
5
5
  from skimage import measure
6
6
  import cv2
7
+ import ast
7
8
  import concurrent.futures
8
9
  from concurrent.futures import ThreadPoolExecutor, as_completed
9
10
  from scipy.ndimage import zoom
@@ -382,6 +383,27 @@ def invert_dict(d):
382
383
  inverted.setdefault(value, []).append(key)
383
384
  return inverted
384
385
 
386
+ def invert_dict_special(d):
387
+
388
+ d = invert_dict(d)
389
+
390
+ new_dict = copy.deepcopy(d)
391
+
392
+ for key, vals in d.items():
393
+
394
+ try:
395
+ idens = ast.literal_eval(key)
396
+ for iden in idens:
397
+ try:
398
+ new_dict[iden].extend(vals)
399
+ except:
400
+ new_dict[iden] = vals
401
+ del new_dict[key]
402
+ except:
403
+ pass
404
+ return new_dict
405
+
406
+
385
407
  def invert_array(array):
386
408
  """Internal method used to flip node array indices. 0 becomes 255 and vice versa."""
387
409
  inverted_array = np.where(array == 0, 255, 0).astype(np.uint8)
@@ -1245,31 +1267,6 @@ def dilate_2D(array, search, scaling = 1):
1245
1267
 
1246
1268
  return inv
1247
1269
 
1248
- def erode_2D(array, search, scaling=1):
1249
- """
1250
- Erode a 2D array using distance transform method.
1251
-
1252
- Parameters:
1253
- array -- Input 2D binary array
1254
- search -- Distance within which to erode
1255
- scaling -- Scaling factor (default: 1)
1256
-
1257
- Returns:
1258
- Eroded 2D array
1259
- """
1260
- # For erosion, we work directly with the foreground
1261
- # No need to invert the array
1262
-
1263
- # Compute distance transform on the foreground
1264
- dt = smart_dilate.compute_distance_transform_distance(array)
1265
-
1266
- # Apply scaling
1267
- dt = dt * scaling
1268
-
1269
- # Threshold to keep only points that are at least 'search' distance from the boundary
1270
- eroded = dt >= search
1271
-
1272
- return eroded
1273
1270
 
1274
1271
  def dilate_3D_dt(array, search_distance, xy_scaling=1.0, z_scaling=1.0):
1275
1272
  """
@@ -1331,7 +1328,42 @@ def dilate_3D_dt(array, search_distance, xy_scaling=1.0, z_scaling=1.0):
1331
1328
 
1332
1329
  return inv.astype(np.uint8)
1333
1330
 
1334
- def erode_3D_dt(array, search_distance, xy_scaling=1.0, z_scaling=1.0):
1331
+ def erode_2D(array, search, scaling=1, preserve_labels = False):
1332
+ """
1333
+ Erode a 2D array using distance transform method.
1334
+
1335
+ Parameters:
1336
+ array -- Input 2D binary array
1337
+ search -- Distance within which to erode
1338
+ scaling -- Scaling factor (default: 1)
1339
+
1340
+ Returns:
1341
+ Eroded 2D array
1342
+ """
1343
+ # For erosion, we work directly with the foreground
1344
+ # No need to invert the array
1345
+
1346
+ if preserve_labels:
1347
+ from skimage.segmentation import find_boundaries
1348
+ borders = find_boundaries(array, mode='thick')
1349
+ mask = array * invert_array(borders)
1350
+ mask = smart_dilate.compute_distance_transform_distance(mask)
1351
+ mask = mask * scaling
1352
+ mask = mask >= search
1353
+ array = mask * array
1354
+ else:
1355
+ # Compute distance transform on the foreground
1356
+ dt = smart_dilate.compute_distance_transform_distance(array)
1357
+
1358
+ # Apply scaling
1359
+ dt = dt * scaling
1360
+
1361
+ # Threshold to keep only points that are at least 'search' distance from the boundary
1362
+ array = dt > search
1363
+
1364
+ return array
1365
+
1366
+ def erode_3D_dt(array, search_distance, xy_scaling=1.0, z_scaling=1.0, preserve_labels = False):
1335
1367
  """
1336
1368
  Erode a 3D array using distance transform method. DT erosion produces perfect results
1337
1369
  with Euclidean geometry, but may be slower for large arrays.
@@ -1349,43 +1381,24 @@ def erode_3D_dt(array, search_distance, xy_scaling=1.0, z_scaling=1.0):
1349
1381
 
1350
1382
  if array.shape[0] == 1:
1351
1383
  # Handle 2D case
1352
- return erode_2D(array, search_distance, scaling=xy_scaling)
1353
-
1354
- # For erosion, we work directly with the foreground (no inversion needed)
1384
+ return erode_2D(array, search_distance, scaling=xy_scaling, preserve_labels = True)
1355
1385
 
1356
- """
1357
- # Determine which dimension needs resampling
1358
- if (z_scaling > xy_scaling):
1359
- # Z dimension needs to be stretched
1360
- zoom_factor = [z_scaling/xy_scaling, 1, 1] # Scale factor for [z, y, x]
1361
- rev_factor = [xy_scaling/z_scaling, 1, 1]
1362
- cardinal = xy_scaling
1363
- elif (xy_scaling > z_scaling):
1364
- # XY dimensions need to be stretched
1365
- zoom_factor = [1, xy_scaling/z_scaling, xy_scaling/z_scaling] # Scale factor for [z, y, x]
1366
- rev_factor = [1, z_scaling/xy_scaling, z_scaling/xy_scaling] # Scale factor for [z, y, x]
1367
- cardinal = z_scaling
1368
- else:
1369
- # Already uniform scaling, no need to resample
1370
- zoom_factor = None
1371
- rev_factor = None
1372
- cardinal = xy_scaling
1373
-
1374
- # Resample the mask if needed
1375
- if zoom_factor:
1376
- array = ndimage.zoom(array, zoom_factor, order=0) # Use order=0 for binary masks
1377
- """
1378
-
1379
- print("Computing a distance transform for a perfect erosion...")
1380
1386
 
1381
- array = smart_dilate.compute_distance_transform_distance(array, sampling = [z_scaling, xy_scaling, xy_scaling])
1382
-
1383
- # Apply scaling factor
1384
- #array = array * cardinal
1385
-
1386
- # Threshold the distance transform to get eroded result
1387
- # For erosion, we keep only the points that are at least search_distance from the boundary
1388
- array = array >= search_distance
1387
+ if preserve_labels:
1388
+
1389
+
1390
+ from skimage.segmentation import find_boundaries
1391
+
1392
+ borders = find_boundaries(array, mode='thick')
1393
+ mask = array * invert_array(borders)
1394
+ mask = smart_dilate.compute_distance_transform_distance(mask, sampling = [z_scaling, xy_scaling, xy_scaling])
1395
+ mask = mask >= search_distance
1396
+ array = mask * array
1397
+ else:
1398
+ array = smart_dilate.compute_distance_transform_distance(array, sampling = [z_scaling, xy_scaling, xy_scaling])
1399
+ # Threshold the distance transform to get eroded result
1400
+ # For erosion, we keep only the points that are at least search_distance from the boundary
1401
+ array = array > search_distance
1389
1402
 
1390
1403
  # Resample back to original dimensions if needed
1391
1404
  #if rev_factor:
@@ -1552,7 +1565,7 @@ def dilate_3D_old(tiff_array, dilated_x=3, dilated_y=3, dilated_z=3):
1552
1565
 
1553
1566
 
1554
1567
  def erode_3D(tiff_array, eroded_x, eroded_y, eroded_z):
1555
- """Internal method to erode an array in 3D. Erosion this way is much faster than using a distance transform although the latter is theoretically more accurate.
1568
+ """Internal method to erode an array in 3D. Erosion this way is faster than using a distance transform although the latter is theoretically more accurate.
1556
1569
  Arguments are an array, and the desired pixel erosion amounts in X, Y, Z."""
1557
1570
 
1558
1571
  if tiff_array.shape[0] == 1:
@@ -2102,22 +2115,34 @@ def dilate(arrayimage, amount, xy_scale = 1, z_scale = 1, directory = None, fast
2102
2115
 
2103
2116
  return arrayimage
2104
2117
 
2105
- def erode(arrayimage, amount, xy_scale = 1, z_scale = 1, mode = 0):
2106
- if len(np.unique(arrayimage)) > 2: #binarize
2118
+ def erode(arrayimage, amount, xy_scale = 1, z_scale = 1, mode = 0, preserve_labels = False):
2119
+ if not preserve_labels and len(np.unique(arrayimage)) > 2: #binarize
2107
2120
  arrayimage = binarize(arrayimage)
2108
2121
  erode_xy, erode_z = dilation_length_to_pixels(xy_scale, z_scale, amount, amount)
2109
2122
 
2110
2123
  if mode == 0:
2111
2124
  arrayimage = (erode_3D(arrayimage, erode_xy, erode_xy, erode_z)) * 255
2112
2125
  else:
2113
- arrayimage = erode_3D_dt(arrayimage, amount, xy_scaling=xy_scale, z_scaling=z_scale)
2126
+ arrayimage = erode_3D_dt(arrayimage, amount, xy_scaling=xy_scale, z_scaling=z_scale, preserve_labels = preserve_labels)
2114
2127
 
2115
2128
  if np.max(arrayimage) == 1:
2116
2129
  arrayimage = arrayimage * 255
2117
2130
 
2118
2131
  return arrayimage
2119
2132
 
2133
+ def iden_set(idens):
2120
2134
 
2135
+ idens = set(idens)
2136
+ real_iden_set = []
2137
+ for iden in idens:
2138
+ try:
2139
+ options = ast.literal_eval(iden)
2140
+ for opt in options:
2141
+ real_iden_set.append(opt)
2142
+ except:
2143
+ real_iden_set.append(iden)
2144
+
2145
+ return set(real_iden_set)
2121
2146
 
2122
2147
 
2123
2148
 
@@ -3813,7 +3838,7 @@ class Network_3D:
3813
3838
 
3814
3839
  self._search_region = self._nodes
3815
3840
 
3816
- def calculate_edges(self, binary_edges, diledge = None, inners = True, hash_inner_edges = True, search = None, remove_edgetrunk = 0, GPU = True, fast_dil = False, skeletonized = False):
3841
+ def calculate_edges(self, binary_edges, diledge = None, inners = True, search = None, remove_edgetrunk = 0, GPU = True, fast_dil = False, skeletonized = False):
3817
3842
  """
3818
3843
  Method to calculate the edges that are used to directly connect nodes. May be done with or without the search region, however using search_region is recommended.
3819
3844
  The search_region property must be set to use the search region, otherwise the nodes property must be set. Sets the edges property
@@ -3822,7 +3847,6 @@ class Network_3D:
3822
3847
  so some amount of dilation is recommended if there are any, but not so much to create overconnectivity. This is a value that needs to be tuned by the user.
3823
3848
  :param inners: (Optional - Val = True; boolean). Will use inner edges if True, will not if False. Inner edges are parts of the edge mask that exist within search regions. If search regions overlap,
3824
3849
  any edges that exist within the overlap will only assert connectivity if 'inners' is True.
3825
- :param hash_inner_edges: (Optional - Val = True; boolean). If False, all search regions that contain an edge object connecting multiple nodes will be assigned as connected.
3826
3850
  If True, an extra processing step is used to sort the correct connectivity amongst these search_regions. Can only be computed when search_regions property is set.
3827
3851
  :param search: (Optional - Val = None; int). Amount for nodes to search for connections, assuming the search_regions are not being used. Assigning a value to this param will utilize the secondary algorithm and not the search_regions.
3828
3852
  :param remove_edgetrunk: (Optional - Val = 0; int). Amount of times to remove the 'Trunk' from the edges. A trunk in this case is the largest (by vol) edge object remaining after nodes have broken up the edges.
@@ -3875,11 +3899,7 @@ class Network_3D:
3875
3899
  labelled_edges, num_edge = label_objects(outer_edges)
3876
3900
 
3877
3901
  if inners:
3878
-
3879
- if search is None and hash_inner_edges is True:
3880
- inner_edges = hash_inners(self._search_region, binary_edges, GPU = GPU)
3881
- else:
3882
- inner_edges = establish_inner_edges(search_region, binary_edges)
3902
+ inner_edges = hash_inners(self._search_region, binary_edges, GPU = GPU)
3883
3903
 
3884
3904
  del binary_edges
3885
3905
 
@@ -4011,7 +4031,7 @@ class Network_3D:
4011
4031
  self._network_lists = network_analysis.read_excel_to_lists(df)
4012
4032
  self._network, net_weights = network_analysis.weighted_network(df)
4013
4033
 
4014
- def calculate_all(self, nodes, edges, xy_scale = 1, z_scale = 1, down_factor = None, search = None, diledge = None, inners = True, hash_inners = True, remove_trunk = 0, ignore_search_region = False, other_nodes = None, label_nodes = True, directory = None, GPU = True, fast_dil = True, skeletonize = False, GPU_downsample = None):
4034
+ def calculate_all(self, nodes, edges, xy_scale = 1, z_scale = 1, down_factor = None, search = None, diledge = None, inners = True, remove_trunk = 0, ignore_search_region = False, other_nodes = None, label_nodes = True, directory = None, GPU = True, fast_dil = True, skeletonize = False, GPU_downsample = None):
4015
4035
  """
4016
4036
  Method to calculate and save to mem all properties of a Network_3D object. In general, after initializing a Network_3D object, this method should be called on the node and edge masks that will be used to calculate the network.
4017
4037
  :param nodes: (Mandatory; String or ndarray). Filepath to segmented nodes mask or a numpy array containing the same.
@@ -4024,7 +4044,6 @@ class Network_3D:
4024
4044
  so some amount of dilation is recommended if there are any, but not so much to create overconnectivity. This is a value that needs to be tuned by the user.
4025
4045
  :param inners: (Optional - Val = True; boolean). Will use inner edges if True, will not if False. Inner edges are parts of the edge mask that exist within search regions. If search regions overlap,
4026
4046
  any edges that exist within the overlap will only assert connectivity if 'inners' is True.
4027
- :param hash_inners: (Optional - Val = True; boolean). If False, all search regions that contain an edge object connecting multiple nodes will be assigned as connected.
4028
4047
  If True, an extra processing step is used to sort the correct connectivity amongst these search_regions. Can only be computed when search_regions property is set.
4029
4048
  :param remove_trunk: (Optional - Val = 0; int). Amount of times to remove the 'Trunk' from the edges. A trunk in this case is the largest (by vol) edge object remaining after nodes have broken up the edges.
4030
4049
  Any 'Trunks' removed will be absent for connection calculations.
@@ -4086,7 +4105,7 @@ class Network_3D:
4086
4105
  except:
4087
4106
  pass
4088
4107
 
4089
- self.calculate_edges(edges, diledge = diledge, inners = inners, hash_inner_edges = hash_inners, search = search, remove_edgetrunk = remove_trunk, GPU = GPU, fast_dil = fast_dil, skeletonized = skeletonize) #Will have to be moved out if the second method becomes more directly implemented
4108
+ self.calculate_edges(edges, diledge = diledge, inners = inners, search = search, remove_edgetrunk = remove_trunk, GPU = GPU, fast_dil = fast_dil, skeletonized = skeletonize) #Will have to be moved out if the second method becomes more directly implemented
4090
4109
  else:
4091
4110
  self._edges, _ = label_objects(edges)
4092
4111
 
@@ -5070,13 +5089,16 @@ class Network_3D:
5070
5089
 
5071
5090
 
5072
5091
  for node in G.nodes():
5073
- nodeid = node_identities[node]
5074
- neighbors = list(G.neighbors(node))
5075
- for subnode in neighbors:
5076
- subnodeid = node_identities[subnode]
5077
- if subnodeid == root:
5078
- neighborhood_dict[nodeid] += 1
5079
- break
5092
+ try:
5093
+ nodeid = node_identities[node]
5094
+ neighbors = list(G.neighbors(node))
5095
+ for subnode in neighbors:
5096
+ subnodeid = node_identities[subnode]
5097
+ if subnodeid == root:
5098
+ neighborhood_dict[nodeid] += 1
5099
+ break
5100
+ except:
5101
+ pass
5080
5102
 
5081
5103
  title1 = f'Neighborhood Distribution of Nodes in Network from Nodes: {root}'
5082
5104
  title2 = f'Neighborhood Distribution of Nodes in Network from Nodes {root} as a proportion of total nodes of that ID'
@@ -5185,34 +5207,22 @@ class Network_3D:
5185
5207
  bounds = (min_coords, max_coords)
5186
5208
  dim_list = max_coords - min_coords
5187
5209
 
5188
-
5189
- if dim == 3:
5190
-
5191
- """
5192
- if self.xy_scale > self.z_scale: # xy will be 'expanded' more so its components will be arbitrarily further from the border than z ones
5193
- factor_xy = (self.z_scale/self.xy_scale) * factor # So 'factor' in the xy dim has to get smaller
5194
- factor_z = factor
5195
- elif self.z_scale > self.xy_scale: # Same idea
5196
- factor_z = (self.xy_scale/self.z_scale) * factor
5197
- factor_xy = factor
5198
- else:
5199
- factor_z = factor
5200
- factor_xy = factor
5201
- """
5202
-
5203
- for centroid in roots:
5204
-
5205
- if ((centroid[2] - min_coords[0]) > dim_list[0] * factor) and ((max_coords[0] - centroid[2]) > dim_list[0] * factor) and ((centroid[1] - min_coords[1]) > dim_list[1] * factor) and ((max_coords[1] - centroid[1]) > dim_list[1] * factor) and ((centroid[0] - min_coords[2]) > dim_list[2] * factor) and ((max_coords[2] - centroid[0]) > dim_list[2] * factor):
5210
+ for centroid in roots:
5211
+ # Assuming centroid is [z, y, x] based on your indexing
5212
+ z, y, x = centroid[0], centroid[1], centroid[2]
5213
+
5214
+ # Check x-dimension
5215
+ x_ok = (x - min_coords[0]) > dim_list[0] * factor and (max_coords[0] - x) > dim_list[0] * factor
5216
+ # Check y-dimension
5217
+ y_ok = (y - min_coords[1]) > dim_list[1] * factor and (max_coords[1] - y) > dim_list[1] * factor
5218
+
5219
+ if dim == 3: # 3D case
5220
+ # Check z-dimension
5221
+ z_ok = (z - min_coords[2]) > dim_list[2] * factor and (max_coords[2] - z) > dim_list[2] * factor
5222
+ if x_ok and y_ok and z_ok:
5206
5223
  new_list.append(centroid)
5207
-
5208
-
5209
- #if ((centroid[2] - min_coords[0]) > dim_list[0] * factor) and ((max_coords[0] - centroid[2]) > dim_list[0] * factor) and ((centroid[1] - min_coords[1]) > dim_list[1] * factor) and ((max_coords[1] - centroid[1]) > dim_list[1] * factor) and ((centroid[0] - min_coords[2]) > dim_list[2] * factor) and ((max_coords[2] - centroid[0]) > dim_list[2] * factor):
5210
- #new_list.append(centroid)
5211
- #print(f"dim_list: {dim_list}, centroid: {centroid}, min_coords: {min_coords}, max_coords: {max_coords}")
5212
- else:
5213
- for centroid in roots:
5214
-
5215
- if ((centroid[2] - min_coords[0]) > dim_list[0] * factor) and ((max_coords[0] - centroid[2]) > dim_list[0] * factor) and ((centroid[1] - min_coords[1]) > dim_list[1] * factor) and ((max_coords[1] - centroid[1]) > dim_list[1] * factor):
5224
+ else: # 2D case
5225
+ if x_ok and y_ok:
5216
5226
  new_list.append(centroid)
5217
5227
 
5218
5228
  else:
@@ -5265,8 +5275,6 @@ class Network_3D:
5265
5275
  print(f"Utilizing {len(roots)} root points. Note that low n values are unstable.")
5266
5276
  is_subset = True
5267
5277
 
5268
-
5269
-
5270
5278
  roots = proximity.convert_centroids_to_array(roots, xy_scale = self.xy_scale, z_scale = self.z_scale)
5271
5279
 
5272
5280
  n_subset = len(targs)
@@ -5451,7 +5459,7 @@ class Network_3D:
5451
5459
 
5452
5460
  community_dict = invert_dict(self.communities)
5453
5461
  summation = 0
5454
- id_set = set(self.node_identities.values())
5462
+ id_set = iden_set(self.node_identities.values())
5455
5463
  output = {sort: 0 for sort in id_set}
5456
5464
  template = copy.deepcopy(output)
5457
5465
 
@@ -5463,7 +5471,12 @@ class Network_3D:
5463
5471
 
5464
5472
  # Count identities in this community
5465
5473
  for node in nodes:
5466
- counter[self.node_identities[node]] += 1
5474
+ try:
5475
+ idens = ast.literal_eval(self.node_identities[node])
5476
+ for iden in idens:
5477
+ counter[iden] += 1
5478
+ except:
5479
+ counter[self.node_identities[node]] += 1
5467
5480
 
5468
5481
  # Convert to proportions within this community and weight by size
5469
5482
  for sort in counter:
@@ -5492,11 +5505,60 @@ class Network_3D:
5492
5505
  neighborhoods.visualize_cluster_composition_umap(self.node_centroids, None, id_dictionary = self.node_identities, graph_label = "Node ID", title = 'UMAP Visualization of Node Centroids')
5493
5506
 
5494
5507
 
5508
+
5509
+ def identity_umap(self):
5510
+
5511
+ try:
5512
+
5513
+ id_set = iden_set(self.node_identities.values())
5514
+
5515
+ template = np.zeros(len(id_set))
5516
+
5517
+ id_dict = {}
5518
+ for i, iden in enumerate(id_set):
5519
+ id_dict[iden] = i
5520
+
5521
+ umap_dict = {}
5522
+
5523
+ for node in self.node_identities.keys():
5524
+ umap_dict[node] = copy.deepcopy(template)
5525
+ try:
5526
+ idens = ast.literal_eval(self.node_identities[node])
5527
+ for iden in idens:
5528
+ index = id_dict[iden]
5529
+ ref = umap_dict[node]
5530
+ ref[index] = 1
5531
+ umap_dict[node] = ref
5532
+ except:
5533
+ index = id_dict[self.node_identities[node]]
5534
+ ref = umap_dict[node]
5535
+ ref[index] = 1
5536
+ umap_dict[node] = ref
5537
+
5538
+ neighbor_classes = {}
5539
+ import random
5540
+
5541
+ for node, iden in self.node_identities.items():
5542
+ try:
5543
+ idens = ast.literal_eval(iden)
5544
+ neighbor_classes[node] = random.choice(idens)
5545
+ except:
5546
+ neighbor_classes[node] = iden
5547
+
5548
+
5549
+ from . import neighborhoods
5550
+
5551
+ neighborhoods.visualize_cluster_composition_umap(umap_dict, None, id_dictionary = neighbor_classes, graph_label = "Node ID", title = 'UMAP Visualization of Node Identities', draw_lines = True)
5552
+
5553
+ except Exception as e:
5554
+ print(f"Error: {e}")
5555
+
5556
+
5495
5557
  def community_id_info_per_com(self, umap = False, label = 0, limit = 0, proportional = False, neighbors = None):
5496
5558
 
5497
5559
  community_dict = invert_dict(self.communities)
5498
5560
  summation = 0
5499
- id_set = set(self.node_identities.values())
5561
+ id_set = iden_set(self.node_identities.values())
5500
5562
  id_dict = {}
5501
5563
  for i, iden in enumerate(id_set):
5502
5564
  id_dict[iden] = i
@@ -5515,7 +5577,15 @@ class Network_3D:
5515
5577
 
5516
5578
  # Count identities in this community
5517
5579
  for node in nodes:
5518
- counter[id_dict[self.node_identities[node]]] += 1 # Keep them as arrays
5580
+ try:
5581
+ idens = ast.literal_eval(self.node_identities[node])
5582
+ for iden in idens:
5583
+ counter[id_dict[iden]] += 1
5584
+ except:
5585
+ try:
5586
+ counter[id_dict[self.node_identities[node]]] += 1 # Keep them as arrays
5587
+ except:
5588
+ pass
5519
5589
 
5520
5590
  for i in range(len(counter)): # Translate them into proportions out of 1
5521
5591
 
@@ -5527,12 +5597,11 @@ class Network_3D:
5527
5597
  umap_dict[community] = counter
5528
5598
 
5529
5599
  else:
5530
- idens = invert_dict(self.node_identities)
5600
+ idens = invert_dict_special(self.node_identities)
5531
5601
  iden_count = {}
5532
5602
  template = {}
5533
5603
  node_count = len(list(self.communities.keys()))
5534
5604
 
5535
-
5536
5605
  for iden in id_set:
5537
5606
  template[iden] = 0
5538
5607
 
@@ -5548,7 +5617,15 @@ class Network_3D:
5548
5617
  counter = np.zeros(len(id_set))
5549
5618
 
5550
5619
  for node in nodes:
5551
- iden_tracker[self.node_identities[node]] += 1
5620
+ try:
5621
+ idents = ast.literal_eval(self.node_identities[node])
5622
+ for iden in idents:
5623
+ iden_tracker[iden] += 1
5624
+ except:
5625
+ try:
5626
+ iden_tracker[self.node_identities[node]] += 1
5627
+ except:
5628
+ pass
5552
5629
 
5553
5630
  i = 0
5554
5631
 
@@ -5819,7 +5896,7 @@ class Network_3D:
5819
5896
  return heat_dict, overlay
5820
5897
 
5821
5898
 
5822
- def merge_node_ids(self, path, data):
5899
+ def merge_node_ids(self, path, data, include = True):
5823
5900
 
5824
5901
  if self.node_identities is None: # Prepare modular dict
5825
5902
 
@@ -5828,9 +5905,17 @@ class Network_3D:
5828
5905
  nodes = list(np.unique(data))
5829
5906
  if 0 in nodes:
5830
5907
  del nodes[0]
5831
-
5832
5908
  for node in nodes:
5833
- self.node_identities[node] = ''
5909
+
5910
+ self.node_identities[node] = [] # Assign to lists at first
5911
+ else:
5912
+ for node, iden in self.node_identities.items():
5913
+ try:
5914
+ self.node_identities[node] = ast.literal_eval(iden)
5915
+ except:
5916
+ self.node_identities[node] = [iden]
5917
+
5918
+
5834
5919
 
5835
5920
  img_list = directory_info(path)
5836
5921
 
@@ -5840,9 +5925,12 @@ class Network_3D:
5840
5925
  if len(np.unique(mask)) != 2:
5841
5926
 
5842
5927
  mask = otsu_binarize(mask)
5928
+ else:
5929
+ mask = mask != 0
5843
5930
 
5844
5931
  nodes = data * mask
5845
- nodes = list(np.unique(nodes))
5932
+ nodes = np.unique(nodes)
5933
+ nodes = nodes.tolist()
5846
5934
  if 0 in nodes:
5847
5935
  del nodes[0]
5848
5936
 
@@ -5853,21 +5941,43 @@ class Network_3D:
5853
5941
  else:
5854
5942
  base_name = img
5855
5943
 
5944
+ assigned = {}
5945
+
5946
+
5856
5947
  for node in self.node_identities.keys():
5857
5948
 
5858
5949
  try:
5859
5950
 
5860
- if node in nodes:
5951
+ if int(node) in nodes:
5861
5952
 
5862
- self.node_identities[node] += f" {base_name}+"
5953
+ self.node_identities[node].append(f'{base_name}+')
5863
5954
 
5864
- else:
5955
+ elif include:
5865
5956
 
5866
- self.node_identities[node] += f" {base_name}-"
5957
+ self.node_identities[node].append(f'{base_name}-')
5867
5958
 
5868
5959
  except:
5869
5960
  pass
5870
5961
 
5962
+ modify_dict = copy.deepcopy(self.node_identities)
5963
+
5964
+ for node, iden in self.node_identities.items():
5965
+
5966
+ try:
5967
+
5968
+ if len(iden) == 1:
5969
+
5970
+ modify_dict[node] = str(iden[0]) # Singleton lists become bare strings
5971
+ elif len(iden) == 0:
5972
+ del modify_dict[node]
5973
+ else:
5974
+ modify_dict[node] = str(iden) # We hold multi element lists as strings for compatibility
5975
+
5976
+ except:
5977
+ pass
5978
+
5979
+ self.node_identities = modify_dict
5980
+
5871
5981
 
5872
5982
  def nearest_neighbors_avg(self, root, targ, xy_scale = 1, z_scale = 1, num = 1, heatmap = False, threed = True, numpy = False, quant = False, centroids = True):
5873
5983