nettracer3d 0.9.5__py3-none-any.whl → 0.9.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -347,8 +347,7 @@ def visualize_cluster_composition_umap(cluster_data: Dict[int, np.ndarray],
347
347
  id_dictionary: Optional[Dict[int, str]] = None,
348
348
  graph_label = "Community ID",
349
349
  title = 'UMAP Visualization of Community Compositions',
350
- neighborhoods: Optional[Dict[int, int]] = None,
351
- draw_lines: bool = False):
350
+ neighborhoods: Optional[Dict[int, int]] = None):
352
351
  """
353
352
  Convert cluster composition data to UMAP visualization.
354
353
 
@@ -371,8 +370,6 @@ def visualize_cluster_composition_umap(cluster_data: Dict[int, np.ndarray],
371
370
  neighborhoods : dict, optional
372
371
  Dictionary mapping node IDs to neighborhood IDs {node_id: neighborhood_id}.
373
372
  If provided, points will be colored by neighborhood using community coloration methods.
374
- draw_lines : bool
375
- Whether to draw lines between nodes that share identities (default: False)
376
373
 
377
374
  Returns:
378
375
  --------
@@ -456,111 +453,15 @@ def visualize_cluster_composition_umap(cluster_data: Dict[int, np.ndarray],
456
453
  plt.figure(figsize=(12, 8))
457
454
 
458
455
  if n_components == 2:
459
- # Draw scatter with different markers for multi-identity nodes if draw_lines is enabled
460
- if draw_lines:
461
- # Separate multi-identity and singleton nodes for different markers
462
- singleton_indices = []
463
- multi_indices = []
464
- singleton_colors = []
465
- multi_colors = []
466
-
467
- for i, cluster_id in enumerate(cluster_ids):
468
- vec = cluster_data[cluster_id]
469
- if np.sum(vec) > 1: # Multi-identity
470
- multi_indices.append(i)
471
- multi_colors.append(point_colors[i] if isinstance(point_colors, list) else point_colors)
472
- else: # Singleton
473
- singleton_indices.append(i)
474
- singleton_colors.append(point_colors[i] if isinstance(point_colors, list) else point_colors)
475
-
476
- # Draw singleton nodes as circles
477
- if singleton_indices:
478
- if use_neighborhood_coloring or use_identity_coloring:
479
- scatter1 = plt.scatter(embedding[singleton_indices, 0], embedding[singleton_indices, 1],
480
- c=singleton_colors, s=100, alpha=0.7, marker='o')
481
- else:
482
- scatter1 = plt.scatter(embedding[singleton_indices, 0], embedding[singleton_indices, 1],
483
- c=[point_colors[i] for i in singleton_indices], cmap='viridis', s=100, alpha=0.7, marker='o')
484
-
485
- # Draw multi-identity nodes as squares
486
- if multi_indices:
487
- if use_neighborhood_coloring or use_identity_coloring:
488
- scatter2 = plt.scatter(embedding[multi_indices, 0], embedding[multi_indices, 1],
489
- c=multi_colors, s=100, alpha=0.7, marker='s')
490
- else:
491
- scatter2 = plt.scatter(embedding[multi_indices, 0], embedding[multi_indices, 1],
492
- c=[point_colors[i] for i in multi_indices], cmap='viridis', s=100, alpha=0.7, marker='s')
493
- scatter = scatter2 # For colorbar reference
494
- else:
495
- scatter = scatter1 if singleton_indices else None
456
+ if use_neighborhood_coloring:
457
+ scatter = plt.scatter(embedding[:, 0], embedding[:, 1],
458
+ c=point_colors, s=100, alpha=0.7)
459
+ elif use_identity_coloring:
460
+ scatter = plt.scatter(embedding[:, 0], embedding[:, 1],
461
+ c=point_colors, s=100, alpha=0.7)
496
462
  else:
497
- # Original behavior when draw_lines is False
498
- if use_neighborhood_coloring:
499
- scatter = plt.scatter(embedding[:, 0], embedding[:, 1],
500
- c=point_colors, s=100, alpha=0.7)
501
- elif use_identity_coloring:
502
- scatter = plt.scatter(embedding[:, 0], embedding[:, 1],
503
- c=point_colors, s=100, alpha=0.7)
504
- else:
505
- scatter = plt.scatter(embedding[:, 0], embedding[:, 1],
506
- c=point_colors, cmap='viridis', s=100, alpha=0.7)
507
-
508
- # Draw lines between nodes with shared identities (only if draw_lines=True)
509
- if draw_lines:
510
- # First pass: identify unique multi-identity configurations and their representatives
511
- multi_config_map = {} # Maps tuple(config) -> {'count': int, 'representative_idx': int}
512
-
513
- for i, cluster_id in enumerate(cluster_ids):
514
- vec = cluster_data[cluster_id]
515
- if np.sum(vec) > 1: # Multi-identity node
516
- config = tuple(vec) # Convert to hashable tuple
517
- if config not in multi_config_map:
518
- multi_config_map[config] = {'count': 1, 'representative_idx': i}
519
- else:
520
- multi_config_map[config]['count'] += 1
521
-
522
- # Second pass: draw lines for each unique configuration
523
- for config, info in multi_config_map.items():
524
- i = info['representative_idx']
525
- count = info['count']
526
- vec1 = np.array(config)
527
-
528
- # For each identity this configuration has, find the closest representative
529
- identity_indices = np.where(vec1 == 1)[0]
530
-
531
- for identity_idx in identity_indices:
532
- best_target = None
533
- best_distance = float('inf')
534
- backup_target = None
535
- backup_distance = float('inf')
536
-
537
- # Find closest node with this specific identity
538
- for j, cluster_id2 in enumerate(cluster_ids):
539
- if i != j: # Don't connect to self
540
- vec2 = cluster_data[cluster_id2]
541
- if vec2[identity_idx] == 1: # Shares this specific identity
542
- distance = np.linalg.norm(embedding[i] - embedding[j])
543
-
544
- # Prefer singleton nodes
545
- if np.sum(vec2) == 1: # Singleton
546
- if distance < best_distance:
547
- best_distance = distance
548
- best_target = j
549
- else: # Multi-identity node (backup)
550
- if distance < backup_distance:
551
- backup_distance = distance
552
- backup_target = j
553
-
554
- # Draw line to best target (prefer singleton, fallback to multi)
555
- target = best_target if best_target is not None else backup_target
556
- if target is not None:
557
- # Calculate relative line weight with reasonable cap
558
- max_count = max(info['count'] for info in multi_config_map.values())
559
- relative_weight = count / max_count # Normalize to 0-1
560
- line_weight = 0.3 + relative_weight * 1.2 # Scale to 0.3-1.5 range
561
- plt.plot([embedding[i, 0], embedding[target, 0]],
562
- [embedding[i, 1], embedding[target, 1]],
563
- alpha=0.3, color='gray', linewidth=line_weight)
463
+ scatter = plt.scatter(embedding[:, 0], embedding[:, 1],
464
+ c=point_colors, cmap='viridis', s=100, alpha=0.7)
564
465
 
565
466
  if label:
566
467
  # Add cluster ID labels
@@ -615,112 +516,15 @@ def visualize_cluster_composition_umap(cluster_data: Dict[int, np.ndarray],
615
516
  fig = plt.figure(figsize=(14, 10))
616
517
  ax = fig.add_subplot(111, projection='3d')
617
518
 
618
- # Draw scatter with different markers for multi-identity nodes if draw_lines is enabled
619
- if draw_lines:
620
- # Separate multi-identity and singleton nodes for different markers
621
- singleton_indices = []
622
- multi_indices = []
623
- singleton_colors = []
624
- multi_colors = []
625
-
626
- for i, cluster_id in enumerate(cluster_ids):
627
- vec = cluster_data[cluster_id]
628
- if np.sum(vec) > 1: # Multi-identity
629
- multi_indices.append(i)
630
- multi_colors.append(point_colors[i] if isinstance(point_colors, list) else point_colors)
631
- else: # Singleton
632
- singleton_indices.append(i)
633
- singleton_colors.append(point_colors[i] if isinstance(point_colors, list) else point_colors)
634
-
635
- # Draw singleton nodes as circles
636
- if singleton_indices:
637
- if use_neighborhood_coloring or use_identity_coloring:
638
- scatter1 = ax.scatter(embedding[singleton_indices, 0], embedding[singleton_indices, 1], embedding[singleton_indices, 2],
639
- c=singleton_colors, s=100, alpha=0.7, marker='o')
640
- else:
641
- scatter1 = ax.scatter(embedding[singleton_indices, 0], embedding[singleton_indices, 1], embedding[singleton_indices, 2],
642
- c=[point_colors[i] for i in singleton_indices], cmap='viridis', s=100, alpha=0.7, marker='o')
643
-
644
- # Draw multi-identity nodes as squares
645
- if multi_indices:
646
- if use_neighborhood_coloring or use_identity_coloring:
647
- scatter2 = ax.scatter(embedding[multi_indices, 0], embedding[multi_indices, 1], embedding[multi_indices, 2],
648
- c=multi_colors, s=100, alpha=0.7, marker='s')
649
- else:
650
- scatter2 = ax.scatter(embedding[multi_indices, 0], embedding[multi_indices, 1], embedding[multi_indices, 2],
651
- c=[point_colors[i] for i in multi_indices], cmap='viridis', s=100, alpha=0.7, marker='s')
652
- scatter = scatter2 # For colorbar reference
653
- else:
654
- scatter = scatter1 if singleton_indices else None
519
+ if use_neighborhood_coloring:
520
+ scatter = ax.scatter(embedding[:, 0], embedding[:, 1], embedding[:, 2],
521
+ c=point_colors, s=100, alpha=0.7)
522
+ elif use_identity_coloring:
523
+ scatter = ax.scatter(embedding[:, 0], embedding[:, 1], embedding[:, 2],
524
+ c=point_colors, s=100, alpha=0.7)
655
525
  else:
656
- # Original behavior when draw_lines is False
657
- if use_neighborhood_coloring:
658
- scatter = ax.scatter(embedding[:, 0], embedding[:, 1], embedding[:, 2],
659
- c=point_colors, s=100, alpha=0.7)
660
- elif use_identity_coloring:
661
- scatter = ax.scatter(embedding[:, 0], embedding[:, 1], embedding[:, 2],
662
- c=point_colors, s=100, alpha=0.7)
663
- else:
664
- scatter = ax.scatter(embedding[:, 0], embedding[:, 1], embedding[:, 2],
665
- c=point_colors, cmap='viridis', s=100, alpha=0.7)
666
-
667
- # Draw lines between nodes with shared identities (only if draw_lines=True)
668
- if draw_lines:
669
- # First pass: identify unique multi-identity configurations and their representatives
670
- multi_config_map = {} # Maps tuple(config) -> {'count': int, 'representative_idx': int}
671
-
672
- for i, cluster_id in enumerate(cluster_ids):
673
- vec = cluster_data[cluster_id]
674
- if np.sum(vec) > 1: # Multi-identity node
675
- config = tuple(vec) # Convert to hashable tuple
676
- if config not in multi_config_map:
677
- multi_config_map[config] = {'count': 1, 'representative_idx': i}
678
- else:
679
- multi_config_map[config]['count'] += 1
680
-
681
- # Second pass: draw lines for each unique configuration
682
- for config, info in multi_config_map.items():
683
- i = info['representative_idx']
684
- count = info['count']
685
- vec1 = np.array(config)
686
-
687
- # For each identity this configuration has, find the closest representative
688
- identity_indices = np.where(vec1 == 1)[0]
689
-
690
- for identity_idx in identity_indices:
691
- best_target = None
692
- best_distance = float('inf')
693
- backup_target = None
694
- backup_distance = float('inf')
695
-
696
- # Find closest node with this specific identity
697
- for j, cluster_id2 in enumerate(cluster_ids):
698
- if i != j: # Don't connect to self
699
- vec2 = cluster_data[cluster_id2]
700
- if vec2[identity_idx] == 1: # Shares this specific identity
701
- distance = np.linalg.norm(embedding[i] - embedding[j])
702
-
703
- # Prefer singleton nodes
704
- if np.sum(vec2) == 1: # Singleton
705
- if distance < best_distance:
706
- best_distance = distance
707
- best_target = j
708
- else: # Multi-identity node (backup)
709
- if distance < backup_distance:
710
- backup_distance = distance
711
- backup_target = j
712
-
713
- # Draw line to best target (prefer singleton, fallback to multi)
714
- target = best_target if best_target is not None else backup_target
715
- if target is not None:
716
- # Calculate relative line weight with reasonable cap
717
- max_count = max(info['count'] for info in multi_config_map.values())
718
- relative_weight = count / max_count # Normalize to 0-1
719
- line_weight = 0.3 + relative_weight * 1.2 # Scale to 0.3-1.5 range
720
- ax.plot([embedding[i, 0], embedding[target, 0]],
721
- [embedding[i, 1], embedding[target, 1]],
722
- [embedding[i, 2], embedding[target, 2]],
723
- alpha=0.3, color='gray', linewidth=line_weight)
526
+ scatter = ax.scatter(embedding[:, 0], embedding[:, 1], embedding[:, 2],
527
+ c=point_colors, cmap='viridis', s=100, alpha=0.7)
724
528
 
725
529
  if label:
726
530
  # Add cluster ID labels
nettracer3d/nettracer.py CHANGED
@@ -1735,54 +1735,6 @@ def combine_edges(edge_labels_1, edge_labels_2):
1735
1735
 
1736
1736
  return np.where(mask, offset_labels, edge_labels_1)
1737
1737
 
1738
- def combine_nodes(root_nodes, other_nodes, other_ID, identity_dict, root_ID = None):
1739
-
1740
- """Internal method to merge two labelled node arrays into one"""
1741
-
1742
- print("Combining node arrays")
1743
-
1744
- mask = (root_nodes == 0) & (other_nodes > 0)
1745
- if np.any(mask):
1746
- max_val = np.max(root_nodes)
1747
- other_nodes[:] = np.where(mask, other_nodes + max_val, 0)
1748
-
1749
- if root_ID is not None:
1750
- rootIDs = list(np.unique(root_nodes)) #Sets up adding these vals to the identitiy dictionary. Gets skipped if this has already been done.
1751
-
1752
- if rootIDs[0] == 0: #np unique can include 0 which we don't want.
1753
- del rootIDs[0]
1754
-
1755
- otherIDs = list(np.unique(other_nodes)) #Sets up adding other vals to the identity dictionary.
1756
-
1757
- if otherIDs[0] == 0:
1758
- del otherIDs[0]
1759
-
1760
- if root_ID is not None: #Adds the root vals to the dictionary if it hasn't already
1761
-
1762
- if other_ID.endswith('.tiff'):
1763
- other_ID = other_ID[:-5]
1764
- elif other_ID.endswith('.tif'):
1765
- other_ID = other_ID[:-4]
1766
-
1767
- for item in rootIDs:
1768
- identity_dict[item] = root_ID
1769
-
1770
- for item in otherIDs: #Always adds the other vals to the dictionary
1771
- try:
1772
- other_ID = os.path.basename(other_ID)
1773
- except:
1774
- pass
1775
- if other_ID.endswith('.tiff'):
1776
- other_ID = other_ID[:-5]
1777
- elif other_ID.endswith('.tif'):
1778
- other_ID = other_ID[:-4]
1779
-
1780
- identity_dict[item] = other_ID
1781
-
1782
- nodes = root_nodes + other_nodes #Combine the outer edges with the inner edges modified via the above steps
1783
-
1784
- return nodes, identity_dict
1785
-
1786
1738
  def directory_info(directory = None):
1787
1739
  """Internal method to get the files in a directory, optionally the current directory if nothing passed"""
1788
1740
 
@@ -3925,7 +3877,58 @@ class Network_3D:
3925
3877
  """
3926
3878
  self._nodes, num_nodes = label_objects(nodes, structure_3d)
3927
3879
 
3928
- def merge_nodes(self, addn_nodes_name, label_nodes = True, root_id = "Root_Nodes"):
3880
+ def combine_nodes(self, root_nodes, other_nodes, other_ID, identity_dict, root_ID = None, centroids = False):
3881
+
3882
+ """Internal method to merge two labelled node arrays into one"""
3883
+
3884
+ print("Combining node arrays")
3885
+
3886
+ mask = (root_nodes == 0) & (other_nodes > 0)
3887
+ if np.any(mask):
3888
+ max_val = np.max(root_nodes)
3889
+ other_nodes[:] = np.where(mask, other_nodes + max_val, 0)
3890
+ if centroids:
3891
+ new_dict = network_analysis._find_centroids(other_nodes)
3892
+ self.node_centroids.update(new_dict)
3893
+
3894
+ if root_ID is not None:
3895
+ rootIDs = list(np.unique(root_nodes)) #Sets up adding these vals to the identitiy dictionary. Gets skipped if this has already been done.
3896
+
3897
+ if rootIDs[0] == 0: #np unique can include 0 which we don't want.
3898
+ del rootIDs[0]
3899
+
3900
+ otherIDs = list(np.unique(other_nodes)) #Sets up adding other vals to the identity dictionary.
3901
+
3902
+ if otherIDs[0] == 0:
3903
+ del otherIDs[0]
3904
+
3905
+ if root_ID is not None: #Adds the root vals to the dictionary if it hasn't already
3906
+
3907
+ if other_ID.endswith('.tiff'):
3908
+ other_ID = other_ID[:-5]
3909
+ elif other_ID.endswith('.tif'):
3910
+ other_ID = other_ID[:-4]
3911
+
3912
+ for item in rootIDs:
3913
+ identity_dict[item] = root_ID
3914
+
3915
+ for item in otherIDs: #Always adds the other vals to the dictionary
3916
+ try:
3917
+ other_ID = os.path.basename(other_ID)
3918
+ except:
3919
+ pass
3920
+ if other_ID.endswith('.tiff'):
3921
+ other_ID = other_ID[:-5]
3922
+ elif other_ID.endswith('.tif'):
3923
+ other_ID = other_ID[:-4]
3924
+
3925
+ identity_dict[item] = other_ID
3926
+
3927
+ nodes = root_nodes + other_nodes #Combine the outer edges with the inner edges modified via the above steps
3928
+
3929
+ return nodes, identity_dict
3930
+
3931
+ def merge_nodes(self, addn_nodes_name, label_nodes = True, root_id = "Root_Nodes", centroids = False):
3929
3932
  """
3930
3933
  Merges the self._nodes attribute with alternate labelled node images. The alternate nodes can be inputted as a string for a filepath to a tif,
3931
3934
  or as a directory address containing only tif images, which will merge the _nodes attribute with all tifs in the folder. The _node_identities attribute
@@ -3945,16 +3948,20 @@ class Network_3D:
3945
3948
 
3946
3949
  identity_dict = {} #A dictionary to deliniate the node identities
3947
3950
 
3951
+ if centroids:
3952
+ self.node_centroids = network_analysis._find_centroids(self._nodes)
3953
+
3954
+
3948
3955
  try: #Try presumes the input is a tif
3949
3956
  addn_nodes = tifffile.imread(addn_nodes_name) #If not this will fail and activate the except block
3950
3957
 
3951
3958
  if label_nodes is True:
3952
3959
  addn_nodes, num_nodes2 = label_objects(addn_nodes) # Label the node objects. Note this presumes no overlap between node masks.
3953
- node_labels, identity_dict = combine_nodes(self._nodes, addn_nodes, addn_nodes_name, identity_dict, nodes_name) #This method stacks labelled arrays
3960
+ node_labels, identity_dict = self.combine_nodes(self._nodes, addn_nodes, addn_nodes_name, identity_dict, nodes_name, centroids = centroids) #This method stacks labelled arrays
3954
3961
  num_nodes = np.max(node_labels)
3955
3962
 
3956
3963
  else: #If nodes already labelled
3957
- node_labels, identity_dict = combine_nodes(self._nodes, addn_nodes, addn_nodes_name, identity_dict, nodes_name)
3964
+ node_labels, identity_dict = self.combine_nodes(self._nodes, addn_nodes, addn_nodes_name, identity_dict, nodes_name, centroids = centroids)
3958
3965
  num_nodes = int(np.max(node_labels))
3959
3966
 
3960
3967
  except: #Exception presumes the input is a directory containing multiple tifs, to allow multi-node stackage.
@@ -3972,16 +3979,15 @@ class Network_3D:
3972
3979
  if label_nodes is True:
3973
3980
  addn_nodes, num_nodes2 = label_objects(addn_nodes) # Label the node objects. Note this presumes no overlap between node masks.
3974
3981
  if i == 0:
3975
- node_labels, identity_dict = combine_nodes(self._nodes, addn_nodes, addn_nodes_ID, identity_dict, nodes_name)
3976
-
3982
+ node_labels, identity_dict = self.combine_nodes(self._nodes, addn_nodes, addn_nodes_ID, identity_dict, nodes_name, centroids = centroids)
3977
3983
  else:
3978
- node_labels, identity_dict = combine_nodes(node_labels, addn_nodes, addn_nodes_ID, identity_dict)
3984
+ node_labels, identity_dict = self.combine_nodes(node_labels, addn_nodes, addn_nodes_ID, identity_dict, centroids = centroids)
3979
3985
 
3980
3986
  else:
3981
3987
  if i == 0:
3982
- node_labels, identity_dict = combine_nodes(self._nodes, addn_nodes, addn_nodes_ID, identity_dict, nodes_name)
3988
+ node_labels, identity_dict = self.combine_nodes(self._nodes, addn_nodes, addn_nodes_ID, identity_dict, nodes_name, centroids = centroids)
3983
3989
  else:
3984
- node_labels, identity_dict = combine_nodes(node_labels, addn_nodes, addn_nodes_ID, identity_dict)
3990
+ node_labels, identity_dict = self.combine_nodes(node_labels, addn_nodes, addn_nodes_ID, identity_dict, centroids = centroids)
3985
3991
  except Exception as e:
3986
3992
  print("Could not open additional nodes, verify they are being inputted correctly...")
3987
3993
 
@@ -5505,55 +5511,42 @@ class Network_3D:
5505
5511
  neighborhoods.visualize_cluster_composition_umap(self.node_centroids, None, id_dictionary = self.node_identities, graph_label = "Node ID", title = 'UMAP Visualization of Node Centroids')
5506
5512
 
5507
5513
 
5508
-
5509
- def identity_umap(self):
5514
+ def identity_umap(self, data):
5510
5515
 
5511
5516
  try:
5512
5517
 
5513
- id_set = iden_set(self.node_identities.values())
5514
-
5515
- template = np.zeros(len(id_set))
5518
+ neighbor_classes = {}
5519
+ import random
5516
5520
 
5517
- id_dict = {}
5518
- for i, iden in enumerate(id_set):
5519
- id_dict[iden] = i
5521
+ umap_dict = copy.deepcopy(data)
5520
5522
 
5521
- umap_dict = {}
5523
+ for item in data.keys():
5524
+ if item in self.node_identities:
5525
+ try:
5526
+ parse = ast.literal_eval(self.node_identities[item])
5527
+ neighbor_classes[item] = random.choice(parse)
5528
+ except:
5529
+ neighbor_classes[item] = self.node_identities[item]
5522
5530
 
5523
- for node in self.node_identities.keys():
5524
- umap_dict[node] = copy.deepcopy(template)
5525
- try:
5526
- idens = ast.literal_eval(self.node_identities[node])
5527
- for iden in idens:
5528
- index = id_dict[iden]
5529
- ref = umap_dict[node]
5530
- ref[index] = 1
5531
- umap_dict[node] = ref
5532
- except:
5533
- index = id_dict[self.node_identities[node]]
5534
- ref = umap_dict[node]
5535
- ref[index] = 1
5536
- umap_dict[node] = ref
5531
+ else:
5532
+ del umap_dict[item]
5537
5533
 
5538
- neighbor_classes = {}
5539
- import random
5534
+ from scipy.stats import zscore
5540
5535
 
5541
- for node, iden in self.node_identities.items():
5542
- try:
5543
- idens = ast.literal_eval(iden)
5544
- neighbor_classes[node] = random.choice(idens)
5545
- except:
5546
- neighbor_classes[node] = iden
5536
+ # Z-score normalize each marker (column)
5537
+ for key in umap_dict:
5538
+ umap_dict[key] = zscore(umap_dict[key])
5547
5539
 
5548
5540
 
5549
5541
  from . import neighborhoods
5550
5542
 
5551
- neighborhoods.visualize_cluster_composition_umap(umap_dict, None, id_dictionary = neighbor_classes, graph_label = "Node ID", title = 'UMAP Visualization of Node Identities', draw_lines = True)
5543
+ neighborhoods.visualize_cluster_composition_umap(umap_dict, None, id_dictionary = neighbor_classes, graph_label = "Node ID", title = 'UMAP Visualization of Node Identities by Z-Score')
5552
5544
 
5553
5545
  except Exception as e:
5546
+ import traceback
5547
+ print(traceback.format_exc())
5554
5548
  print(f"Error: {e}")
5555
5549
 
5556
-
5557
5550
  def community_id_info_per_com(self, umap = False, label = 0, limit = 0, proportional = False, neighbors = None):
5558
5551
 
5559
5552
  community_dict = invert_dict(self.communities)
@@ -5895,6 +5888,23 @@ class Network_3D:
5895
5888
  overlay = neighborhoods.create_community_heatmap(heat_dict, self.communities, self.node_centroids, shape = shape, is_3d=is3d, labeled_array = self.nodes)
5896
5889
  return heat_dict, overlay
5897
5890
 
5891
+ def get_merge_node_dictionaries(self, path, data):
5892
+
5893
+ img_list = directory_info(path)
5894
+ id_dicts = []
5895
+ num_nodes = np.max(data)
5896
+
5897
+ for i, img in enumerate(img_list):
5898
+ if img.endswith('.tiff') or img.endswith('.tif'):
5899
+ print(f"Processing image {img}")
5900
+ mask = tifffile.imread(f'{path}/{img}')
5901
+ if len(mask.shape) == 2:
5902
+ mask = np.expand_dims(mask, axis = 0)
5903
+
5904
+ id_dict = proximity.create_node_dictionary_id(data, mask, num_nodes)
5905
+ id_dicts.append(id_dict)
5906
+
5907
+ return id_dicts
5898
5908
 
5899
5909
  def merge_node_ids(self, path, data, include = True):
5900
5910
 
@@ -5920,46 +5930,49 @@ class Network_3D:
5920
5930
  img_list = directory_info(path)
5921
5931
 
5922
5932
  for i, img in enumerate(img_list):
5923
- mask = tifffile.imread(f'{path}/{img}')
5924
5933
 
5925
- if len(np.unique(mask)) != 2:
5934
+ if img.endswith('.tiff') or img.endswith('.tif'):
5926
5935
 
5927
- mask = otsu_binarize(mask)
5928
- else:
5929
- mask = mask != 0
5936
+ mask = tifffile.imread(f'{path}/{img}')
5930
5937
 
5931
- nodes = data * mask
5932
- nodes = np.unique(nodes)
5933
- nodes = nodes.tolist()
5934
- if 0 in nodes:
5935
- del nodes[0]
5938
+ if len(np.unique(mask)) != 2:
5936
5939
 
5937
- if img.endswith('.tiff'):
5938
- base_name = img[:-5]
5939
- elif img.endswith('.tif'):
5940
- base_name = img[:-4]
5941
- else:
5942
- base_name = img
5940
+ mask = otsu_binarize(mask)
5941
+ else:
5942
+ mask = mask != 0
5943
+
5944
+ nodes = data * mask
5945
+ nodes = np.unique(nodes)
5946
+ nodes = nodes.tolist()
5947
+ if 0 in nodes:
5948
+ del nodes[0]
5949
+
5950
+ if img.endswith('.tiff'):
5951
+ base_name = img[:-5]
5952
+ elif img.endswith('.tif'):
5953
+ base_name = img[:-4]
5954
+ else:
5955
+ base_name = img
5943
5956
 
5944
- assigned = {}
5957
+ assigned = {}
5945
5958
 
5946
5959
 
5947
- for node in self.node_identities.keys():
5960
+ for node in self.node_identities.keys():
5948
5961
 
5949
- try:
5962
+ try:
5950
5963
 
5951
- if int(node) in nodes:
5964
+ if int(node) in nodes:
5952
5965
 
5953
- self.node_identities[node].append(f'{base_name}+')
5966
+ self.node_identities[node].append(f'{base_name}+')
5954
5967
 
5955
- elif include:
5968
+ elif include:
5956
5969
 
5957
- self.node_identities[node].append(f'{base_name}-')
5970
+ self.node_identities[node].append(f'{base_name}-')
5958
5971
 
5959
- except:
5960
- pass
5972
+ except:
5973
+ pass
5961
5974
 
5962
- modify_dict = copy.deepcopy(self.node_identities)
5975
+ modify_dict = copy.deepcopy(self.node_identities)
5963
5976
 
5964
5977
  for node, iden in self.node_identities.items():
5965
5978