nettracer3d 0.2.7__tar.gz → 0.2.9__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. {nettracer3d-0.2.7/src/nettracer3d.egg-info → nettracer3d-0.2.9}/PKG-INFO +1 -1
  2. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/pyproject.toml +1 -1
  3. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d/community_extractor.py +100 -16
  4. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d/morphology.py +25 -20
  5. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d/nettracer.py +69 -34
  6. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d/nettracer_gui.py +310 -34
  7. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d/network_analysis.py +108 -36
  8. {nettracer3d-0.2.7 → nettracer3d-0.2.9/src/nettracer3d.egg-info}/PKG-INFO +1 -1
  9. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/LICENSE +0 -0
  10. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/README.md +0 -0
  11. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/setup.cfg +0 -0
  12. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d/__init__.py +0 -0
  13. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d/hub_getter.py +0 -0
  14. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d/modularity.py +0 -0
  15. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d/network_draw.py +0 -0
  16. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d/node_draw.py +0 -0
  17. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d/proximity.py +0 -0
  18. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d/simple_network.py +0 -0
  19. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d/smart_dilate.py +0 -0
  20. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d.egg-info/SOURCES.txt +0 -0
  21. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d.egg-info/dependency_links.txt +0 -0
  22. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d.egg-info/requires.txt +0 -0
  23. {nettracer3d-0.2.7 → nettracer3d-0.2.9}/src/nettracer3d.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: nettracer3d
3
- Version: 0.2.7
3
+ Version: 0.2.9
4
4
  Summary: Scripts for intializing and analyzing networks from segmentations of three dimensional images.
5
5
  Author-email: Liam McLaughlin <boom2449@gmail.com>
6
6
  Project-URL: User_Manual, https://drive.google.com/drive/folders/1fTkz3n4LN9_VxKRKC8lVQSlrz_wq0bVn?usp=drive_link
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "nettracer3d"
3
- version = "0.2.7"
3
+ version = "0.2.9"
4
4
  authors = [
5
5
  { name="Liam McLaughlin", email="boom2449@gmail.com" },
6
6
  ]
@@ -2,7 +2,7 @@ import pandas as pd
2
2
  import networkx as nx
3
3
  import tifffile
4
4
  import numpy as np
5
- from typing import List, Dict, Tuple
5
+ from typing import List, Dict, Tuple, Union, Any
6
6
  from collections import defaultdict, Counter
7
7
  from networkx.algorithms import community
8
8
  from scipy import ndimage
@@ -648,7 +648,69 @@ def find_hub_nodes(G: nx.Graph, proportion: float = 0.1) -> List:
648
648
 
649
649
  return hub_nodes
650
650
 
651
+ def get_color_name_mapping():
652
+ """Return a dictionary of common colors and their RGB values."""
653
+ return {
654
+ 'red': (255, 0, 0),
655
+ 'green': (0, 255, 0),
656
+ 'blue': (0, 0, 255),
657
+ 'yellow': (255, 255, 0),
658
+ 'cyan': (0, 255, 255),
659
+ 'magenta': (255, 0, 255),
660
+ 'purple': (128, 0, 128),
661
+ 'orange': (255, 165, 0),
662
+ 'brown': (165, 42, 42),
663
+ 'pink': (255, 192, 203),
664
+ 'navy': (0, 0, 128),
665
+ 'teal': (0, 128, 128),
666
+ 'olive': (128, 128, 0),
667
+ 'maroon': (128, 0, 0),
668
+ 'lime': (50, 205, 50),
669
+ 'indigo': (75, 0, 130),
670
+ 'violet': (238, 130, 238),
671
+ 'coral': (255, 127, 80),
672
+ 'turquoise': (64, 224, 208),
673
+ 'gold': (255, 215, 0)
674
+ }
675
+
676
+ def rgb_to_color_name(rgb: Tuple[int, int, int]) -> str:
677
+ """
678
+ Convert an RGB tuple to its nearest color name.
679
+
680
+ Args:
681
+ rgb: Tuple of (r, g, b) values
682
+
683
+ Returns:
684
+ str: Name of the closest matching color
685
+ """
686
+ color_map = get_color_name_mapping()
687
+
688
+ # Convert input RGB to numpy array
689
+ rgb_array = np.array(rgb)
690
+
691
+ # Calculate Euclidean distance to all known colors
692
+ min_distance = float('inf')
693
+ closest_color = None
694
+
695
+ for color_name, color_rgb in color_map.items():
696
+ distance = np.sqrt(np.sum((rgb_array - np.array(color_rgb)) ** 2))
697
+ if distance < min_distance:
698
+ min_distance = distance
699
+ closest_color = color_name
700
+
701
+ return closest_color
651
702
 
703
+ def convert_node_colors_to_names(node_to_color: Dict[int, Tuple[int, int, int]]) -> Dict[int, str]:
704
+ """
705
+ Convert a dictionary of node-to-RGB mappings to node-to-color-name mappings.
706
+
707
+ Args:
708
+ node_to_color: Dictionary mapping node IDs to RGB tuples
709
+
710
+ Returns:
711
+ Dictionary mapping node IDs to color names
712
+ """
713
+ return {node: rgb_to_color_name(color) for node, color in node_to_color.items()}
652
714
 
653
715
  def generate_distinct_colors(n_colors: int) -> List[Tuple[int, int, int]]:
654
716
  """
@@ -721,33 +783,55 @@ def assign_community_colors(community_dict: Dict[int, int], labeled_array: np.nd
721
783
  mask = labeled_array == label
722
784
  for i in range(3): # RGB channels
723
785
  rgb_array[mask, i] = node_to_color[label][i]
786
+
787
+ node_to_color_names = convert_node_colors_to_names(community_to_color)
788
+
724
789
 
725
- return rgb_array
790
+ return rgb_array, node_to_color_names
726
791
 
727
- def assign_community_grays(community_dict: Dict[int, int], labeled_array: np.ndarray) -> np.ndarray:
792
+ def assign_community_grays(community_dict: Dict[int, Union[int, str, Any]], labeled_array: np.ndarray) -> np.ndarray:
728
793
  """
729
- Assign distinct grayscale values to communities.
794
+ Assign grayscale values to communities. For numeric communities, uses the community
795
+ number directly. For string/other communities, assigns sequential values.
730
796
 
731
797
  Args:
732
- community_dict: Dictionary mapping node IDs to community numbers
798
+ community_dict: Dictionary mapping node IDs to community identifiers (numbers or strings)
733
799
  labeled_array: 3D numpy array with labels corresponding to node IDs
734
800
 
735
801
  Returns:
736
- grayscale numpy array
802
+ tuple: (grayscale numpy array, mapping of node IDs to assigned values)
737
803
  """
738
- # Get unique communities
739
- communities = set(community_dict.values())
740
- n_communities = len(communities)
804
+ # Determine if we're dealing with numeric or string communities
805
+ sample_value = next(iter(community_dict.values()))
806
+ is_numeric = isinstance(sample_value, (int, float))
741
807
 
742
- # Generate evenly spaced grayscale values (excluding pure black for background)
743
- gray_values = np.linspace(1, 255, n_communities, dtype=np.uint8)
808
+ if is_numeric:
809
+ # For numeric communities, use values directly
810
+ node_to_gray = community_dict
811
+ max_val = max(community_dict.values())
812
+ else:
813
+ # For string/other communities, assign sequential values
814
+ unique_communities = sorted(set(community_dict.values()))
815
+ community_to_value = {comm: i+1 for i, comm in enumerate(unique_communities)}
816
+ node_to_gray = {node: community_to_value[comm] for node, comm in community_dict.items()}
817
+ max_val = len(unique_communities)
744
818
 
745
- # Create direct mapping from node ID to grayscale value
746
- node_to_gray = {node: gray_values[list(communities).index(comm)]
747
- for node, comm in community_dict.items()}
819
+ # Choose appropriate dtype based on maximum value
820
+ if max_val <= 255:
821
+ dtype = np.uint8
822
+ elif max_val <= 65535:
823
+ dtype = np.uint16
824
+ else:
825
+ dtype = np.uint32
748
826
 
749
827
  # Create output array
750
- gray_array = np.zeros_like(labeled_array, dtype=np.uint8)
828
+ gray_array = np.zeros_like(labeled_array, dtype=dtype)
829
+
830
+ # Create mapping of unique communities to their grayscale values
831
+ if is_numeric:
832
+ community_to_gray = {comm: comm for comm in set(community_dict.values())}
833
+ else:
834
+ community_to_gray = {comm: i+1 for i, comm in enumerate(sorted(set(community_dict.values())))}
751
835
 
752
836
  # Use numpy's vectorized operations for faster assignment
753
837
  unique_labels = np.unique(labeled_array)
@@ -755,7 +839,7 @@ def assign_community_grays(community_dict: Dict[int, int], labeled_array: np.nda
755
839
  if label in node_to_gray:
756
840
  gray_array[labeled_array == label] = node_to_gray[label]
757
841
 
758
- return gray_array
842
+ return gray_array, community_to_gray
759
843
 
760
844
 
761
845
  if __name__ == "__main__":
@@ -270,51 +270,56 @@ def calculate_voxel_volumes(array, xy_scale=1, z_scale=1):
270
270
  return volumes
271
271
 
272
272
 
273
- def search_neighbor_ids(nodes, targets, id_dict, neighborhood_dict, totals, search, xy_scale, z_scale):
273
+ def search_neighbor_ids(nodes, targets, id_dict, neighborhood_dict, totals, search, xy_scale, z_scale, root):
274
274
 
275
-
275
+ if 0 in targets:
276
+ targets.remove(0)
276
277
  targets = np.isin(nodes, targets)
277
278
  targets = nettracer.binarize(targets)
278
279
 
279
280
  dilate_xy, dilate_z = nettracer.dilation_length_to_pixels(xy_scale, z_scale, search, search)
280
- print(f"Dilation parameters - xy: {dilate_xy}, z: {dilate_z}")
281
281
 
282
- targets = nettracer.dilate_3D_recursive(targets, dilate_xy, dilate_xy, dilate_z)
283
- targets = targets != 0
284
- print(f"After dilation - targets shape: {targets.shape}, sum: {np.sum(targets)}")
282
+ dilated = nettracer.dilate_3D_recursive(targets, dilate_xy, dilate_xy, dilate_z)
283
+ dilated = dilated - targets #technically we dont need the cores
284
+ search_vol = np.count_nonzero(dilated) * xy_scale * xy_scale * z_scale #need this for density
285
+ targets = dilated != 0
286
+ del dilated
287
+
285
288
 
286
289
  targets = targets * nodes
287
- print(f"After multiplication with nodes - unique values in targets: {np.unique(targets)}")
288
290
 
289
291
  unique, counts = np.unique(targets, return_counts=True)
290
292
  count_dict = dict(zip(unique, counts))
291
- print(f"Initial count_dict: {count_dict}")
293
+ print(count_dict)
292
294
 
293
295
  del count_dict[0]
294
- print(f"count_dict after removing zeros: {count_dict}")
295
296
 
296
297
  unique, counts = np.unique(nodes, return_counts=True)
297
298
  total_dict = dict(zip(unique, counts))
298
- print(f"Initial total_dict: {total_dict}")
299
-
299
+ print(total_dict)
300
+
300
301
  del total_dict[0]
301
- print(f"total_dict after removing zeros: {total_dict}")
302
302
 
303
- print(f"id_dict keys: {list(id_dict.keys())}")
304
- print(f"Initial neighborhood_dict: {neighborhood_dict}")
305
- print(f"Initial totals: {totals}")
306
303
 
307
304
  for label in total_dict:
308
305
  if label in id_dict:
309
306
  if label in count_dict:
310
307
  neighborhood_dict[id_dict[label]] += count_dict[label]
311
- print(f"Updated neighborhood_dict[{id_dict[label]}] with count {count_dict[label]}")
312
308
  totals[id_dict[label]] += total_dict[label]
313
- print(f"Updated totals[{id_dict[label]}] with total {total_dict[label]}")
309
+
310
+
311
+ try:
312
+ del neighborhood_dict[root] #no good way to get this
313
+ del totals[root] #no good way to get this
314
+ except:
315
+ pass
314
316
 
315
- print(f"Final neighborhood_dict: {neighborhood_dict}")
316
- print(f"Final totals: {totals}")
317
- return neighborhood_dict, totals
317
+ volume = nodes.shape[0] * nodes.shape[1] * nodes.shape[2] * xy_scale * xy_scale * z_scale
318
+ densities = {}
319
+ for nodeid, amount in totals.items():
320
+ densities[nodeid] = (neighborhood_dict[nodeid]/search_vol)/(amount/volume)
321
+
322
+ return neighborhood_dict, totals, densities
318
323
 
319
324
 
320
325
 
@@ -322,15 +322,25 @@ def create_and_save_dataframe(pairwise_connections, excel_filename = None):
322
322
  df = pd.concat([df, temp_df], axis=1)
323
323
 
324
324
  if excel_filename is not None:
325
-
326
- try:
325
+ # Remove file extension if present to use as base path
326
+ base_path = excel_filename.rsplit('.', 1)[0]
327
327
 
328
- # Save the DataFrame to an Excel file
329
- df.to_excel(excel_filename, index=False)
330
- print(f"Network file saved to {excel_filename}")
331
-
328
+ # First try to save as CSV
329
+ try:
330
+ csv_path = f"{base_path}.csv"
331
+ df.to_csv(csv_path, index=False)
332
+ print(f"Network file saved to {csv_path}")
333
+ return
332
334
  except Exception as e:
333
- print(f"Unable to write network file to disk... please make sure that {excel_filename} is being saved to a valid directory and try again")
335
+ print(f"Could not save as CSV: {str(e)}")
336
+
337
+ # If CSV fails, try to save as Excel
338
+ try:
339
+ xlsx_path = f"{base_path}.xlsx"
340
+ df.to_excel(xlsx_path, index=False)
341
+ print(f"Network file saved to {xlsx_path}")
342
+ except Exception as e:
343
+ print(f"Unable to write network file to disk... please make sure that {base_path}.xlsx is being saved to a valid directory and try again")
334
344
 
335
345
  else:
336
346
  return df
@@ -1383,7 +1393,7 @@ def binarize(arrayimage, directory = None):
1383
1393
 
1384
1394
  return arrayimage
1385
1395
 
1386
- def dilate(arrayimage, amount, xy_scale = 1, z_scale = 1, directory = None, fast_dil = False):
1396
+ def dilate(arrayimage, amount, xy_scale = 1, z_scale = 1, directory = None, fast_dil = False, recursive = False):
1387
1397
  """
1388
1398
  Can be used to dilate a binary image in 3D. Dilated output will be saved to the active directory if none is specified. Note that dilation is done with single-instance kernels and not iterations, and therefore
1389
1399
  objects will lose their shape somewhat and become cube-ish if the 'amount' param is ever significantly larger than the objects in quesiton.
@@ -1408,13 +1418,15 @@ def dilate(arrayimage, amount, xy_scale = 1, z_scale = 1, directory = None, fast
1408
1418
  if len(np.unique(arrayimage)) > 2: #binarize
1409
1419
  arrayimage = binarize(arrayimage)
1410
1420
 
1411
- if not fast_dil:
1421
+ if not fast_dil and not recursive:
1412
1422
  arrayimage = (dilate_3D(arrayimage, dilate_xy, dilate_xy, dilate_z)) * 255
1413
1423
  if np.max(arrayimage) == 1:
1414
1424
  arrayimage = arrayimage * 255
1415
-
1416
- else:
1425
+ elif not recursive:
1417
1426
  arrayimage = (dilate_3D_old(arrayimage, dilate_xy, dilate_xy, dilate_z)) * 255
1427
+ else:
1428
+ arrayimage = (dilate_3D_recursive(arrayimage, dilate_xy, dilate_xy, dilate_z)) * 255
1429
+
1418
1430
 
1419
1431
 
1420
1432
  if type(image) == str:
@@ -1935,6 +1947,9 @@ def encapsulate(parent_dir = None, name = None):
1935
1947
 
1936
1948
  return new_folder_path
1937
1949
 
1950
+
1951
+
1952
+
1938
1953
  #THE 3D NETWORK CLASS
1939
1954
 
1940
1955
  class Network_3D:
@@ -2420,7 +2435,7 @@ class Network_3D:
2420
2435
 
2421
2436
 
2422
2437
  if filename is None:
2423
- filename = "drawn_network.tif"
2438
+ filename = "overlay_1.tif"
2424
2439
  elif not filename.endswith(('.tif', '.tiff')):
2425
2440
  filename += '.tif'
2426
2441
 
@@ -2436,7 +2451,7 @@ class Network_3D:
2436
2451
  def save_id_overlay(self, directory = None, filename = None):
2437
2452
 
2438
2453
  if filename is None:
2439
- filename = "labelled_node_indices.tif"
2454
+ filename = "overlay_2.tif"
2440
2455
  if not filename.endswith(('.tif', '.tiff')):
2441
2456
  filename += '.tif'
2442
2457
 
@@ -2457,9 +2472,7 @@ class Network_3D:
2457
2472
  :param directory: (Optional - Val = None; String). The path to an intended directory to save the properties to.
2458
2473
  """
2459
2474
 
2460
-
2461
- if directory is None:
2462
- directory = encapsulate(parent_dir = parent_dir, name = name)
2475
+ directory = encapsulate(parent_dir = parent_dir, name = name)
2463
2476
 
2464
2477
  try:
2465
2478
  self.save_nodes(directory)
@@ -2720,7 +2733,7 @@ class Network_3D:
2720
2733
  items = directory_info(directory)
2721
2734
 
2722
2735
  for item in items:
2723
- if item == 'node_communities.xlsx':
2736
+ if item == 'node_communities.xlsx' or item == 'node_communities.csv':
2724
2737
  if directory is not None:
2725
2738
  self._communities = network_analysis.read_excel_to_singval_dict(f'{directory}/{item}')
2726
2739
  print("Succesfully loaded communities")
@@ -2772,7 +2785,7 @@ class Network_3D:
2772
2785
  items = directory_info(directory)
2773
2786
 
2774
2787
  for item in items:
2775
- if item == 'drawn_network.tif':
2788
+ if item == 'overlay_1.tif':
2776
2789
  if directory is not None:
2777
2790
  self._network_overlay = tifffile.imread(f'{directory}/{item}')
2778
2791
  print("Succesfully loaded network overlay")
@@ -2797,7 +2810,7 @@ class Network_3D:
2797
2810
  items = directory_info(directory)
2798
2811
 
2799
2812
  for item in items:
2800
- if item == 'labelled_node_indices.tif':
2813
+ if item == 'overlay_2.tif':
2801
2814
  if directory is not None:
2802
2815
  self._id_overlay = tifffile.imread(f'{directory}/{item}')
2803
2816
  print("Succesfully loaded id overlay")
@@ -2811,7 +2824,7 @@ class Network_3D:
2811
2824
  #print("Could not find id overlay. They must be in the specified directory and named 'labelled_node_indices.tif'")
2812
2825
 
2813
2826
 
2814
- def assemble(self, directory = None, node_path = None, edge_path = None, search_region_path = None, network_path = None, node_centroids_path = None, node_identities_path = None, edge_centroids_path = None, scaling_path = None, net_overlay_path = None, id_overlay_path = None):
2827
+ def assemble(self, directory = None, node_path = None, edge_path = None, search_region_path = None, network_path = None, node_centroids_path = None, node_identities_path = None, edge_centroids_path = None, scaling_path = None, net_overlay_path = None, id_overlay_path = None, community_path = None ):
2815
2828
  """
2816
2829
  Can be called on a Network_3D object to load all properties simultaneously from a specified directory. It will look for files with the names specified in the property loading methods, in the active directory if none is specified.
2817
2830
  Alternatively, for each property a filepath to any file may be passed to look there to load. This method is intended to be used together with the dump method to easily save and load the Network_3D objects once they had been calculated.
@@ -2835,6 +2848,7 @@ class Network_3D:
2835
2848
  self.load_node_identities(directory, node_identities_path)
2836
2849
  self.load_edge_centroids(directory, edge_centroids_path)
2837
2850
  self.load_scaling(directory, scaling_path)
2851
+ self.load_communities(directory, community_path)
2838
2852
  self.load_network_overlay(directory, net_overlay_path)
2839
2853
  self.load_id_overlay(directory, id_overlay_path)
2840
2854
 
@@ -3127,8 +3141,7 @@ class Network_3D:
3127
3141
  """
3128
3142
 
3129
3143
 
3130
- if directory is None:
3131
- directory = encapsulate()
3144
+ directory = encapsulate()
3132
3145
 
3133
3146
  self._xy_scale = xy_scale
3134
3147
  self._z_scale = z_scale
@@ -3906,25 +3919,37 @@ class Network_3D:
3906
3919
  return hubs, hub_img
3907
3920
 
3908
3921
 
3909
- def extract_communities(self, color_code = True, down_factor = None):
3922
+ def extract_communities(self, color_code = True, down_factor = None, identities = False):
3910
3923
 
3911
3924
  if down_factor is not None:
3912
3925
  original_shape = self._nodes.shape
3913
3926
  temp = downsample(self._nodes, down_factor)
3914
3927
  if color_code:
3915
- image = community_extractor.assign_community_colors(self.communities, temp)
3928
+ if not identities:
3929
+ image, output = community_extractor.assign_community_colors(self.communities, temp)
3930
+ else:
3931
+ image, output = community_extractor.assign_community_colors(self.node_identities, temp)
3916
3932
  else:
3917
- image = community_extractor.assign_community_grays(self.communities, temp)
3933
+ if not identities:
3934
+ image, output = community_extractor.assign_community_grays(self.communities, temp)
3935
+ else:
3936
+ image, output = community_extractor.assign_community_grays(self.node_identities, temp)
3918
3937
  image = upsample_with_padding(image, down_factor, original_shape)
3919
3938
  else:
3920
3939
 
3921
3940
  if color_code:
3922
- image = community_extractor.assign_community_colors(self.communities, self._nodes)
3941
+ if not identities:
3942
+ image, output = community_extractor.assign_community_colors(self.communities, self._nodes)
3943
+ else:
3944
+ image, output = community_extractor.assign_community_colors(self.node_identities, self._nodes)
3923
3945
  else:
3924
- image = community_extractor.assign_community_grays(self.communities, self._nodes)
3946
+ if not identities:
3947
+ image, output = community_extractor.assign_community_grays(self.communities, self._nodes)
3948
+ else:
3949
+ image, output = community_extractor.assign_community_grays(self.node_identities, self._nodes)
3925
3950
 
3926
3951
 
3927
- return image
3952
+ return image, output
3928
3953
 
3929
3954
 
3930
3955
 
@@ -4072,6 +4097,14 @@ class Network_3D:
4072
4097
  except:
4073
4098
  stats['degree_assortativity'] = "Failed to compute"
4074
4099
 
4100
+ try:
4101
+ nodes = np.unique(self._nodes)
4102
+ if nodes[0] == 0:
4103
+ nodes = np.delete(nodes, 0)
4104
+ stats['Unconnected nodes (left out from node image)'] = (len(nodes) - len(G.nodes()))
4105
+ except:
4106
+ stats['Unconnected nodes (left out from node image)'] = "Failed to compute"
4107
+
4075
4108
 
4076
4109
  return stats
4077
4110
 
@@ -4114,9 +4147,9 @@ class Network_3D:
4114
4147
 
4115
4148
 
4116
4149
  elif mode == 1: #Search neighborhoods morphologically, obtain densities
4117
- neighborhood_dict, total_dict = morphology.search_neighbor_ids(self._nodes, targets, node_identities, neighborhood_dict, total_dict, search, self._xy_scale, self._z_scale)
4118
- title1 = f'Volumetric Neighborhood Distribution of Nodes in image from Nodes: {root}'
4119
- title2 = f'Density Distribution of Nodes in image from Nodes {root} as a proportion of total node volume of that ID'
4150
+ neighborhood_dict, total_dict, densities = morphology.search_neighbor_ids(self._nodes, targets, node_identities, neighborhood_dict, total_dict, search, self._xy_scale, self._z_scale, root)
4151
+ title1 = f'Volumetric Neighborhood Distribution of Nodes in image that are {search} from nodes: {root}'
4152
+ title2 = f'Density Distribution of Nodes in image that are {search} from Nodes {root} as a proportion of total node volume of that ID'
4120
4153
 
4121
4154
 
4122
4155
  for identity in neighborhood_dict:
@@ -4126,11 +4159,13 @@ class Network_3D:
4126
4159
 
4127
4160
  network_analysis.create_bar_graph(proportion_dict, title2, "Node Identity", "Proportion", directory=directory)
4128
4161
 
4162
+ try:
4163
+ network_analysis.create_bar_graph(densities, f'Clustering Factor of Node Identities with {search} from nodes {root}', "Node Identity", "Density Search/Density Total", directory=directory)
4164
+ except:
4165
+ densities = None
4129
4166
 
4130
4167
 
4131
-
4132
-
4133
- return neighborhood_dict, proportion_dict, title1, title2
4168
+ return neighborhood_dict, proportion_dict, title1, title2, densities
4134
4169
 
4135
4170
 
4136
4171
 
@@ -555,6 +555,8 @@ class ImageViewerWindow(QMainWindow):
555
555
  if len(self.clicked_values['nodes']) > 1 or len(self.clicked_values['edges']) > 1:
556
556
  combine_obj = highlight_menu.addAction("Combine Object Labels")
557
557
  combine_obj.triggered.connect(self.handle_combine)
558
+ split_obj = highlight_menu.addAction("Split Non-Touching Labels")
559
+ split_obj.triggered.connect(self.handle_seperate)
558
560
  delete_obj = highlight_menu.addAction("Delete Selection")
559
561
  delete_obj.triggered.connect(self.handle_delete)
560
562
  if len(self.clicked_values['nodes']) > 1:
@@ -991,6 +993,58 @@ class ImageViewerWindow(QMainWindow):
991
993
  except Exception as e:
992
994
  print(f"Error: {e}")
993
995
 
996
+ def handle_info(self, sort = 'node'):
997
+
998
+ try:
999
+
1000
+ info_dict = {}
1001
+
1002
+ if sort == 'node':
1003
+
1004
+ label = self.clicked_values['nodes'][-1]
1005
+
1006
+ info_dict['Label'] = label
1007
+
1008
+ info_dict['Object Class'] = 'Node'
1009
+
1010
+ if my_network.node_identities is not None:
1011
+ info_dict['ID'] = my_network.node_identities[label]
1012
+
1013
+ if my_network.network is not None:
1014
+ info_dict['Degree'] = my_network.network.degree(label)
1015
+
1016
+ if my_network.communities is not None:
1017
+ info_dict['Community'] = my_network.communities[label]
1018
+
1019
+ if my_network.node_centroids is not None:
1020
+ info_dict['Centroid'] = my_network.node_centroids[label]
1021
+
1022
+ if self.volume_dict[0] is not None:
1023
+ info_dict['Volume'] = self.volume_dict[0][label]
1024
+
1025
+
1026
+ elif sort == 'edge':
1027
+
1028
+ label = self.clicked_values['edges'][-1]
1029
+
1030
+ info_dict['Label'] = label
1031
+
1032
+ info_dict['Object Class'] = 'Edge'
1033
+
1034
+ if my_network.edge_centroids is not None:
1035
+ info_dict['Centroid'] = my_network.edge_centroids[label]
1036
+
1037
+ if self.volume_dict[1] is not None:
1038
+ info_dict['Volume'] = self.volume_dict[1][label]
1039
+
1040
+ self.format_for_upperright_table(info_dict, title = f'Info on Object')
1041
+
1042
+ except:
1043
+ pass
1044
+
1045
+
1046
+
1047
+
994
1048
  def handle_combine(self):
995
1049
 
996
1050
  try:
@@ -1040,12 +1094,73 @@ class ImageViewerWindow(QMainWindow):
1040
1094
  for column in range(model.columnCount(None)):
1041
1095
  self.network_table.resizeColumnToContents(column)
1042
1096
 
1097
+ self.highlight_overlay = None
1098
+ self.update_display()
1099
+
1100
+ self.show_centroid_dialog()
1101
+
1043
1102
  except Exception as e:
1044
1103
  print(f"Error, could not update network: {e}")
1045
1104
 
1105
+
1046
1106
  except Exception as e:
1047
1107
  print(f"An error has occured: {e}")
1048
1108
 
1109
+ def handle_seperate(self):
1110
+
1111
+ try:
1112
+
1113
+ if len(self.clicked_values['nodes']) > 0:
1114
+ self.create_highlight_overlay(node_indices = self.clicked_values['nodes'])
1115
+ max_val = np.max(my_network.nodes)
1116
+ self.highlight_overlay, num = n3d.label_objects(self.highlight_overlay)
1117
+
1118
+ node_bools = self.highlight_overlay != 0
1119
+ new_max = num + max_val
1120
+ self.highlight_overlay = self.highlight_overlay + max_val
1121
+ self.highlight_overlay = self.highlight_overlay * node_bools
1122
+ if new_max < 256:
1123
+ dtype = np.uint8
1124
+ elif new_max < 65536:
1125
+ dtype = np.uint16
1126
+ else:
1127
+ dtype = np.uint32
1128
+
1129
+ self.highlight_overlay = self.highlight_overlay.astype(dtype)
1130
+ my_network.nodes = my_network.nodes + self.highlight_overlay
1131
+ self.load_channel(0, my_network.nodes, True)
1132
+
1133
+ if len(self.clicked_values['edges']) > 0:
1134
+ self.create_highlight_overlay(edge_indices = self.clicked_values['edges'])
1135
+ max_val = np.max(my_network.edges)
1136
+ self.highlight_overlay, num = n3d.label_objects(self.highlight_overlay)
1137
+ node_bools = self.highlight_overlay != 0
1138
+ new_max = num + max_val
1139
+
1140
+ self.highlight_overlay = self.highlight_overlay + max_val
1141
+ self.highlight_overlay = self.highlight_overlay * node_bools
1142
+ if new_max < 256:
1143
+ dtype = np.uint8
1144
+ elif new_max < 65536:
1145
+ dtype = np.uint16
1146
+ else:
1147
+ dtype = np.uint32
1148
+
1149
+ self.highlight_overlay = self.highlight_overlay.astype(dtype)
1150
+ my_network.edges = my_network.edges + self.highlight_overlay
1151
+ self.load_channel(1, my_network.edges, True)
1152
+ self.highlight_overlay = None
1153
+ self.update_display()
1154
+ print("Network is not updated automatically, please recompute if necesarry. Identities are not automatically updated.")
1155
+ self.show_centroid_dialog()
1156
+
1157
+ except Exception as e:
1158
+ print(f"Error seperating: {e}")
1159
+
1160
+
1161
+
1162
+
1163
+
1049
1164
  def handle_delete(self):
1050
1165
 
1051
1166
  try:
@@ -1399,6 +1514,7 @@ class ImageViewerWindow(QMainWindow):
1399
1514
  # Try to highlight the last selected value in tables
1400
1515
  if self.clicked_values['edges']:
1401
1516
  self.highlight_value_in_tables(self.clicked_values['edges'][-1])
1517
+
1402
1518
 
1403
1519
  elif not self.selecting and self.selection_start: # If we had a click but never started selection
1404
1520
  # Handle as a normal click
@@ -1564,6 +1680,7 @@ class ImageViewerWindow(QMainWindow):
1564
1680
  self.clicked_values = {'nodes': [clicked_value], 'edges': []}
1565
1681
  # Get latest value (or the last remaining one if we just removed an item)
1566
1682
  latest_value = self.clicked_values['nodes'][-1] if self.clicked_values['nodes'] else None
1683
+ self.handle_info('node')
1567
1684
  elif self.active_channel == 1:
1568
1685
  if ctrl_pressed:
1569
1686
  if clicked_value in self.clicked_values['edges']:
@@ -1577,6 +1694,8 @@ class ImageViewerWindow(QMainWindow):
1577
1694
  self.clicked_values = {'nodes': [], 'edges': [clicked_value]}
1578
1695
  # Get latest value (or the last remaining one if we just removed an item)
1579
1696
  latest_value = self.clicked_values['edges'][-1] if self.clicked_values['edges'] else None
1697
+ self.handle_info('edge')
1698
+
1580
1699
 
1581
1700
  # Try to find and highlight the latest value in the current table
1582
1701
  try:
@@ -1689,7 +1808,9 @@ class ImageViewerWindow(QMainWindow):
1689
1808
  mother_action = overlay_menu.addAction("Get Mother Nodes")
1690
1809
  mother_action.triggered.connect(self.show_mother_dialog)
1691
1810
  community_code_action = overlay_menu.addAction("Code Communities")
1692
- community_code_action.triggered.connect(self.show_code_dialog)
1811
+ community_code_action.triggered.connect(lambda: self.show_code_dialog(sort = 'Community'))
1812
+ id_code_action = overlay_menu.addAction("Code Identities")
1813
+ id_code_action.triggered.connect(lambda: self.show_code_dialog(sort = 'Identity'))
1693
1814
 
1694
1815
 
1695
1816
  # Process menu
@@ -2050,6 +2171,37 @@ class ImageViewerWindow(QMainWindow):
2050
2171
  def load_misc(self, sort):
2051
2172
  """Loads various things"""
2052
2173
 
2174
+ def uncork(my_dict, trumper = None):
2175
+
2176
+ if trumper is None:
2177
+ for thing in my_dict:
2178
+ val = my_dict[thing]
2179
+ new_val = val[0]
2180
+ for i in range(1, len(val)):
2181
+ try:
2182
+ new_val += f" AND {val[i]}"
2183
+ except:
2184
+ break
2185
+ my_dict[thing] = new_val
2186
+ elif trumper == '-':
2187
+ for key, value in my_dict.items():
2188
+ my_dict[key] = value[0]
2189
+ else:
2190
+ for thing in my_dict:
2191
+ val = my_dict[thing]
2192
+ if trumper in val:
2193
+ my_dict[thing] = trumper
2194
+ else:
2195
+ new_val = val[0]
2196
+ for i in range(1, len(val)):
2197
+ try:
2198
+ new_val += f" AND {val[i]}"
2199
+ except:
2200
+ break
2201
+ my_dict[thing] = new_val
2202
+
2203
+ return my_dict
2204
+
2053
2205
  if sort != 'Merge Nodes':
2054
2206
 
2055
2207
  try:
@@ -2058,13 +2210,32 @@ class ImageViewerWindow(QMainWindow):
2058
2210
  self,
2059
2211
  f"Load {sort}",
2060
2212
  "",
2061
- "Spreadsheets (*.xlsx *.csv)"
2213
+ "Spreadsheets (*.xlsx *.csv *.json)"
2062
2214
  )
2063
2215
 
2064
2216
  try:
2065
2217
  if sort == 'Node Identities':
2066
2218
  my_network.load_node_identities(file_path = filename)
2067
2219
 
2220
+ first_value = list(my_network.node_identities.values())[0] # Check that there are not multiple IDs
2221
+ if isinstance(first_value, (list, tuple)):
2222
+ trump_value, ok = QInputDialog.getText(
2223
+ self,
2224
+ 'Multiple IDs Detected',
2225
+ 'The node identities appear to contain multiple ids per node in a list.\n'
2226
+ 'If you desire one node ID to trump all others, enter it here.\n'
2227
+ '(Enter "-" to have the first IDs trump all others or press x to skip)'
2228
+ )
2229
+ if not ok or trump_value.strip() == '':
2230
+ trump_value = None
2231
+ elif trump_value.upper() == '-':
2232
+ trump_value = '-'
2233
+ my_network.node_identities = uncork(my_network.node_identities, trump_value)
2234
+ else:
2235
+ trump_value = None
2236
+ my_network.node_identities = uncork(my_network.node_identities, trump_value)
2237
+
2238
+
2068
2239
  if hasattr(my_network, 'node_identities') and my_network.node_identities is not None:
2069
2240
  try:
2070
2241
  self.format_for_upperright_table(my_network.node_identities, 'NodeID', 'Identity', 'Node Identities')
@@ -2091,9 +2262,13 @@ class ImageViewerWindow(QMainWindow):
2091
2262
 
2092
2263
 
2093
2264
  except Exception as e:
2265
+ import traceback
2266
+ print(traceback.format_exc())
2094
2267
  print(f"An error has occured: {e}")
2095
2268
 
2096
2269
  except Exception as e:
2270
+ import traceback
2271
+ print(traceback.format_exc())
2097
2272
  QMessageBox.critical(
2098
2273
  self,
2099
2274
  "Error Loading",
@@ -2227,6 +2402,13 @@ class ImageViewerWindow(QMainWindow):
2227
2402
  except Exception as e:
2228
2403
  print(f"Error loading node identity table: {e}")
2229
2404
 
2405
+
2406
+ if hasattr(my_network, 'communities') and my_network.communities is not None:
2407
+ try:
2408
+ self.format_for_upperright_table(my_network.communities, 'NodeID', 'Community', 'Node Communities')
2409
+ except Exception as e:
2410
+ print(f"Error loading node community table: {e}")
2411
+
2230
2412
  except Exception as e:
2231
2413
  QMessageBox.critical(
2232
2414
  self,
@@ -2245,7 +2427,7 @@ class ImageViewerWindow(QMainWindow):
2245
2427
  self,
2246
2428
  f"Load Network",
2247
2429
  "",
2248
- "Spreadsheets (*.xlsx *.csv)"
2430
+ "Spreadsheets (*.xlsx *.csv *.json)"
2249
2431
  )
2250
2432
 
2251
2433
  my_network.load_network(file_path = filename)
@@ -2281,6 +2463,57 @@ class ImageViewerWindow(QMainWindow):
2281
2463
  else:
2282
2464
  btn.setStyleSheet("")
2283
2465
 
2466
+ def reduce_rgb_dimension(self, array, method='first'):
2467
+ """
2468
+ Reduces a 4D array (Z, Y, X, C) to 3D (Z, Y, X) by dropping the color dimension
2469
+ using the specified method.
2470
+
2471
+ Parameters:
2472
+ -----------
2473
+ array : numpy.ndarray
2474
+ 4D array with shape (Z, Y, X, C) where C is the color channel dimension
2475
+ method : str, optional
2476
+ Method to use for reduction:
2477
+ - 'first': takes the first color channel (default)
2478
+ - 'mean': averages across color channels
2479
+ - 'max': takes maximum value across color channels
2480
+ - 'min': takes minimum value across color channels
2481
+
2482
+ Returns:
2483
+ --------
2484
+ numpy.ndarray
2485
+ 3D array with shape (Z, Y, X)
2486
+
2487
+ Raises:
2488
+ -------
2489
+ ValueError
2490
+ If input array is not 4D or method is not recognized
2491
+ """
2492
+ if array.ndim != 4:
2493
+ raise ValueError(f"Expected 4D array, got {array.ndim}D array")
2494
+
2495
+ if method not in ['first', 'mean', 'max', 'min']:
2496
+ raise ValueError(f"Unknown method: {method}")
2497
+
2498
+ if method == 'first':
2499
+ return array[..., 0]
2500
+ elif method == 'mean':
2501
+ return np.mean(array, axis=-1)
2502
+ elif method == 'max':
2503
+ return np.max(array, axis=-1)
2504
+ else: # min
2505
+ return np.min(array, axis=-1)
2506
+
2507
+ def confirm_rgb_dialog(self):
2508
+ """Shows a dialog asking user to confirm if image is 2D RGB"""
2509
+ msg = QMessageBox()
2510
+ msg.setIcon(QMessageBox.Icon.Question)
2511
+ msg.setText("Image Format Detection")
2512
+ msg.setInformativeText("Is this a 2D color (RGB/CMYK) image?")
2513
+ msg.setWindowTitle("Confirm Image Format")
2514
+ msg.setStandardButtons(QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No)
2515
+ return msg.exec() == QMessageBox.StandardButton.Yes
2516
+
2284
2517
  def load_channel(self, channel_index, channel_data=None, data=False, assign_shape = True):
2285
2518
  """Load a channel and enable active channel selection if needed."""
2286
2519
 
@@ -2294,15 +2527,22 @@ class ImageViewerWindow(QMainWindow):
2294
2527
  "TIFF Files (*.tif *.tiff)"
2295
2528
  )
2296
2529
  self.channel_data[channel_index] = tifffile.imread(filename)
2297
- print(self.channel_data[channel_index].shape)
2298
- if len(self.channel_data[channel_index].shape) == 2:
2299
- #self.channel_data[channel_index] = np.stack((self.channel_data[channel_index], self.channel_data[channel_index]), axis = 0) #currently handle 2d arrays by just making them 3d
2530
+ if len(self.channel_data[channel_index].shape) == 2: # handle 2d data
2300
2531
  self.channel_data[channel_index] = np.expand_dims(self.channel_data[channel_index], axis=0)
2301
2532
 
2302
-
2303
2533
  else:
2304
2534
  self.channel_data[channel_index] = channel_data
2305
2535
 
2536
+ if len(self.channel_data[channel_index].shape) == 3: # potentially 2D RGB
2537
+ if self.channel_data[channel_index].shape[-1] in (3, 4): # last dim is 3 or 4
2538
+ if self.confirm_rgb_dialog():
2539
+ # User confirmed it's 2D RGB, expand to 4D
2540
+ self.channel_data[channel_index] = np.expand_dims(self.channel_data[channel_index], axis=0)
2541
+
2542
+ if len(self.channel_data[channel_index].shape) == 4 and (channel_index == 0 or channel_index == 1):
2543
+ self.channel_data[channel_index] = self.reduce_rgb_dimension(self.channel_data[channel_index])
2544
+
2545
+
2306
2546
 
2307
2547
  if channel_index == 0:
2308
2548
  my_network.nodes = self.channel_data[channel_index]
@@ -2746,8 +2986,8 @@ class ImageViewerWindow(QMainWindow):
2746
2986
  dialog = MotherDialog(self)
2747
2987
  dialog.exec()
2748
2988
 
2749
- def show_code_dialog(self):
2750
- dialog = CodeDialog(self)
2989
+ def show_code_dialog(self, sort = 'Community'):
2990
+ dialog = CodeDialog(self, sort = sort)
2751
2991
  dialog.exec()
2752
2992
 
2753
2993
 
@@ -4134,16 +4374,19 @@ class NeighborIdentityDialog(QDialog):
4134
4374
 
4135
4375
  layout = QFormLayout(self)
4136
4376
 
4137
- self.root = QComboBox()
4138
- self.root.addItems(list(set(my_network.node_identities.values())))
4139
- self.root.setCurrentIndex(0)
4140
- layout.addRow("Root Identity to Search for Neighbor's IDs (search uses nodes of this ID, finds what IDs they connect to", self.root)
4377
+ if my_network.node_identities is not None:
4378
+ self.root = QComboBox()
4379
+ self.root.addItems(list(set(my_network.node_identities.values())))
4380
+ self.root.setCurrentIndex(0)
4381
+ layout.addRow("Root Identity to Search for Neighbor's IDs (search uses nodes of this ID, finds what IDs they connect to", self.root)
4382
+ else:
4383
+ self.root = None
4141
4384
 
4142
4385
  self.directory = QLineEdit("")
4143
4386
  layout.addRow("Output Directory:", self.directory)
4144
4387
 
4145
4388
  self.mode = QComboBox()
4146
- self.mode.addItems(["From Network - Based on Absolute Connectivity", "Use Labeled Nodes - Based on Neighborhood Densities"])
4389
+ self.mode.addItems(["From Network - Based on Absolute Connectivity", "Use Labeled Nodes - Based on Morphological Neighborhood Densities"])
4147
4390
  self.mode.setCurrentIndex(0)
4148
4391
  layout.addRow("Mode", self.mode)
4149
4392
 
@@ -4157,21 +4400,33 @@ class NeighborIdentityDialog(QDialog):
4157
4400
 
4158
4401
  def neighborids(self):
4159
4402
 
4160
- root = self.root.currentText()
4403
+ try:
4161
4404
 
4162
- directory = self.directory.text() if self.directory.text().strip() else None
4405
+ try:
4406
+ root = self.root.currentText()
4407
+ except:
4408
+ pass
4163
4409
 
4164
- mode = self.mode.currentIndex()
4410
+ directory = self.directory.text() if self.directory.text().strip() else None
4165
4411
 
4166
- search = float(self.search.text()) if self.search.text().strip() else 0
4412
+ mode = self.mode.currentIndex()
4167
4413
 
4414
+ search = float(self.search.text()) if self.search.text().strip() else 0
4168
4415
 
4169
- result, result2, title1, title2 = my_network.neighborhood_identities(root = root, directory = directory, mode = mode, search = search)
4170
4416
 
4171
- self.parent().format_for_upperright_table(result, 'Node Identity', 'Amount', title = title1)
4172
- self.parent().format_for_upperright_table(result2, 'Node Identity', 'Proportion', title = title2)
4417
+ result, result2, title1, title2, densities = my_network.neighborhood_identities(root = root, directory = directory, mode = mode, search = search)
4173
4418
 
4174
- self.accept()
4419
+ self.parent().format_for_upperright_table(result, 'Node Identity', 'Amount', title = title1)
4420
+ self.parent().format_for_upperright_table(result2, 'Node Identity', 'Proportion', title = title2)
4421
+
4422
+ if mode == 1:
4423
+
4424
+ self.parent().format_for_upperright_table(densities, 'Node Identity', 'Density in search/density total', title = f'Clustering Factor of Node Identities with {search} from nodes {root}')
4425
+
4426
+
4427
+ self.accept()
4428
+ except Exception as e:
4429
+ print(f"Error: {e}")
4175
4430
 
4176
4431
 
4177
4432
 
@@ -4521,14 +4776,16 @@ class MotherDialog(QDialog):
4521
4776
 
4522
4777
  class CodeDialog(QDialog):
4523
4778
 
4524
- def __init__(self, parent=None):
4779
+ def __init__(self, parent=None, sort = 'Community'):
4525
4780
 
4526
4781
  super().__init__(parent)
4527
- self.setWindowTitle("Community Code Parameters (Will go to Overlay2)")
4782
+ self.setWindowTitle(f"{sort} Code Parameters (Will go to Overlay2)")
4528
4783
  self.setModal(True)
4529
4784
 
4530
4785
  layout = QFormLayout(self)
4531
4786
 
4787
+ self.sort = sort
4788
+
4532
4789
  self.down_factor = QLineEdit("")
4533
4790
  layout.addRow("down_factor (for speeding up overlay generation - optional):", self.down_factor)
4534
4791
 
@@ -4540,7 +4797,7 @@ class CodeDialog(QDialog):
4540
4797
 
4541
4798
 
4542
4799
  # Add Run button
4543
- run_button = QPushButton("Community Code")
4800
+ run_button = QPushButton(f"{sort} Code")
4544
4801
  run_button.clicked.connect(self.code)
4545
4802
  layout.addWidget(run_button)
4546
4803
 
@@ -4553,16 +4810,27 @@ class CodeDialog(QDialog):
4553
4810
  down_factor = float(self.down_factor.text()) if self.down_factor.text().strip() else None
4554
4811
 
4555
4812
 
4556
-
4557
- if my_network.communities is None:
4558
- self.parent().show_partition_dialog()
4813
+ if self.sort == 'Community':
4559
4814
  if my_network.communities is None:
4560
- return
4815
+ self.parent().show_partition_dialog()
4816
+ if my_network.communities is None:
4817
+ return
4818
+ elif my_network.node_identities is None:
4819
+ print("Node identities are not set")
4820
+ return
4821
+
4822
+ if self.sort == 'Community':
4823
+ if mode == 0:
4824
+ image, output = my_network.extract_communities(down_factor = down_factor)
4825
+ elif mode == 1:
4826
+ image, output = my_network.extract_communities(color_code = False, down_factor = down_factor)
4827
+ else:
4828
+ if mode == 0:
4829
+ image, output = my_network.extract_communities(down_factor = down_factor, identities = True)
4830
+ elif mode == 1:
4831
+ image, output = my_network.extract_communities(color_code = False, down_factor = down_factor, identities = True)
4561
4832
 
4562
- if mode == 0:
4563
- image = my_network.extract_communities(down_factor = down_factor)
4564
- elif mode == 1:
4565
- image = my_network.extract_communities(color_code = False, down_factor = down_factor)
4833
+ self.parent().format_for_upperright_table(output, f'{self.sort} Id', f'Encoding Val: {self.sort}', 'Legend')
4566
4834
 
4567
4835
 
4568
4836
  self.parent().load_channel(3, image, True)
@@ -4570,6 +4838,8 @@ class CodeDialog(QDialog):
4570
4838
 
4571
4839
  except Exception as e:
4572
4840
  print(f"An error has occurred: {e}")
4841
+ import traceback
4842
+ print(traceback.format_exc())
4573
4843
 
4574
4844
 
4575
4845
 
@@ -5052,7 +5322,7 @@ class DilateDialog(QDialog):
5052
5322
 
5053
5323
  # Add mode selection dropdown
5054
5324
  self.mode_selector = QComboBox()
5055
- self.mode_selector.addItems(["Binary Dilation", "Preserve Labels (slower)"])
5325
+ self.mode_selector.addItems(["Binary Dilation", "Preserve Labels (slower)", "Recursive Binary Dilation (Use if the dilation radius is much larger than your objects)"])
5056
5326
  self.mode_selector.setCurrentIndex(0) # Default to Mode 1
5057
5327
  layout.addRow("Execution Mode:", self.mode_selector)
5058
5328
 
@@ -5093,12 +5363,18 @@ class DilateDialog(QDialog):
5093
5363
  self.accept()
5094
5364
  return
5095
5365
 
5366
+ if accepted_mode == 2:
5367
+ recursive = True
5368
+ else:
5369
+ recursive = False
5370
+
5096
5371
  # Call dilate method with parameters
5097
5372
  result = n3d.dilate(
5098
5373
  active_data,
5099
5374
  amount,
5100
5375
  xy_scale = xy_scale,
5101
5376
  z_scale = z_scale,
5377
+ recursive = recursive
5102
5378
  )
5103
5379
 
5104
5380
  # Update both the display data and the network object
@@ -1,5 +1,6 @@
1
1
  import pandas as pd
2
2
  import networkx as nx
3
+ import json
3
4
  import tifffile
4
5
  import numpy as np
5
6
  from networkx.algorithms import community
@@ -105,6 +106,25 @@ def open_network(excel_file_path):
105
106
 
106
107
  def read_excel_to_lists(file_path, sheet_name=0):
107
108
  """Convert a pd dataframe to lists. Handles both .xlsx and .csv files"""
109
+ def load_json_to_list(filename):
110
+ with open(filename, 'r') as f:
111
+ data = json.load(f)
112
+
113
+ # Convert only numeric strings to integers, leave other strings as is
114
+ converted_data = [[],[],[]]
115
+ for i in data[0]:
116
+ try:
117
+ converted_data[0].append(int(data[0][i]))
118
+ converted_data[1].append(int(data[1][i]))
119
+ try:
120
+ converted_data[2].append(int(data[2][i]))
121
+ except IndexError:
122
+ converted_data[2].append(0)
123
+ except ValueError:
124
+ converted_data[k] = v
125
+
126
+ return converted_data
127
+
108
128
  if type(file_path) == str:
109
129
  # Check file extension
110
130
  if file_path.lower().endswith('.xlsx'):
@@ -115,8 +135,11 @@ def read_excel_to_lists(file_path, sheet_name=0):
115
135
  # Read the CSV file into a DataFrame without headers
116
136
  df = pd.read_csv(file_path, header=None)
117
137
  df = df.drop(0)
138
+ elif file_path.lower().endswith('.json'):
139
+ df = load_json_to_list(file_path)
140
+ return df
118
141
  else:
119
- raise ValueError("File must be either .xlsx or .csv format")
142
+ raise ValueError("File must be either .xlsx, .csv, or .json format")
120
143
  else:
121
144
  df = file_path
122
145
 
@@ -126,15 +149,15 @@ def read_excel_to_lists(file_path, sheet_name=0):
126
149
  for column_name, column_data in df.items():
127
150
  # Convert the column values to a list and append to the data_lists
128
151
  data_lists.append(column_data.tolist())
129
-
130
152
  master_list = [[], [], []]
131
153
  for i in range(0, len(data_lists), 3):
132
- master_list[0].extend(data_lists[i])
133
- master_list[1].extend(data_lists[i+1])
154
+ master_list[0].extend([int(x) for x in data_lists[i]])
155
+ master_list[1].extend([int(x) for x in data_lists[i+1]])
134
156
  try:
135
- master_list[2].extend(data_lists[i+2])
157
+ master_list[2].extend([int(x) for x in data_lists[i+2]])
136
158
  except IndexError:
137
- pass
159
+ master_list[2].extend([0]) # Note: Changed to list with single int 0
160
+ print(master_list)
138
161
 
139
162
  return master_list
140
163
 
@@ -402,13 +425,28 @@ def read_centroids_to_dict(file_path):
402
425
  Returns:
403
426
  dict: Dictionary with first column as keys and next three columns as numpy array values
404
427
  """
428
+ def load_json_to_dict(filename):
429
+ with open(filename, 'r') as f:
430
+ data = json.load(f)
431
+
432
+ # Convert only numeric strings to integers, leave other strings as is
433
+ converted_data = {}
434
+ for k, v in data.items():
435
+ try:
436
+ converted_data[int(k)] = v
437
+ except ValueError:
438
+ converted_data[k] = v
439
+
440
+ return converted_data
405
441
  # Check file extension
406
442
  if file_path.lower().endswith('.xlsx'):
407
443
  df = pd.read_excel(file_path)
408
444
  elif file_path.lower().endswith('.csv'):
409
445
  df = pd.read_csv(file_path)
446
+ elif file_path.lower().endswith('.json'):
447
+ df = load_json_to_dict(file_path)
410
448
  else:
411
- raise ValueError("Unsupported file format. Please provide either .xlsx or .csv file")
449
+ raise ValueError("Unsupported file format. Please provide either .xlsx, .csv, or .json file")
412
450
 
413
451
  # Initialize an empty dictionary
414
452
  data_dict = {}
@@ -434,13 +472,30 @@ def read_excel_to_singval_dict(file_path):
434
472
  Returns:
435
473
  dict: Dictionary with first column as keys and second column as values
436
474
  """
475
+ def load_json_to_dict(filename):
476
+ with open(filename, 'r') as f:
477
+ data = json.load(f)
478
+
479
+ # Convert only numeric strings to integers, leave other strings as is
480
+ converted_data = {}
481
+ for k, v in data.items():
482
+ try:
483
+ converted_data[int(k)] = v
484
+ except ValueError:
485
+ converted_data[k] = v
486
+
487
+ return converted_data
488
+
437
489
  # Check file extension and read accordingly
438
490
  if file_path.lower().endswith('.xlsx'):
439
491
  df = pd.read_excel(file_path)
440
492
  elif file_path.lower().endswith('.csv'):
441
493
  df = pd.read_csv(file_path)
494
+ elif file_path.lower().endswith('.json'):
495
+ df = load_json_to_dict(file_path)
496
+ return df
442
497
  else:
443
- raise ValueError("Unsupported file format. Please provide either .xlsx or .csv file")
498
+ raise ValueError("Unsupported file format. Please provide either .xlsx, .csv, or .json file")
444
499
 
445
500
  # Convert the DataFrame to a dictionary
446
501
  data_dict = {}
@@ -521,34 +576,35 @@ def find_centroids(nodes, down_factor = None, network = None):
521
576
 
522
577
  return centroid_dict
523
578
 
524
- def _save_centroid_dictionary(centroid_dict, filepath = None, index = 'Node ID'):
579
+ def _save_centroid_dictionary(centroid_dict, filepath=None, index='Node ID'):
525
580
  # Convert dictionary to DataFrame with keys as index and values as a column
526
- #for item in centroid_dict:
527
- #representative = centroid_dict[item]
528
- #break
529
-
530
- #if len(representative) == 3:
531
- #df = pd.DataFrame.from_dict(centroid_dict, orient='index', columns=['Z', 'Y', 'X'])
532
- #elif len(representative) == 2:
533
- #df = pd.DataFrame.from_dict(centroid_dict, orient='index', columns=['Y', 'X'])
534
-
535
581
  df = pd.DataFrame.from_dict(centroid_dict, orient='index', columns=['Z', 'Y', 'X'])
536
-
537
- # Rename the index to 'Node ID'
582
+
583
+ # Rename the index to specified name
538
584
  df.index.name = index
539
-
585
+
540
586
  if filepath is None:
541
- try:
542
- # Save DataFrame to Excel file
543
- df.to_excel('centroids.xlsx', engine='openpyxl')
544
- except Exception as e:
545
- print("Could not save centroids to active directory")
587
+ base_path = 'centroids'
546
588
  else:
589
+ # Remove file extension if present to use as base path
590
+ base_path = filepath.rsplit('.', 1)[0]
591
+
592
+ # First try to save as CSV
593
+ try:
594
+ csv_path = f"{base_path}.csv"
595
+ df.to_csv(csv_path)
596
+ print(f"Successfully saved centroids to {csv_path}")
597
+ return
598
+ except Exception as e:
599
+ print(f"Could not save centroids as CSV: {str(e)}")
600
+
601
+ # If CSV fails, try to save as Excel
547
602
  try:
548
- # Save DataFrame to Excel file
549
- df.to_excel(filepath, engine='openpyxl')
603
+ xlsx_path = f"{base_path}.xlsx"
604
+ df.to_excel(xlsx_path, engine='openpyxl')
605
+ print(f"Successfully saved centroids to {xlsx_path}")
550
606
  except Exception as e:
551
- print(f"Could not save centroids to {filepath}")
607
+ print(f"Could not save centroids as XLSX: {str(e)}")
552
608
 
553
609
  def _find_centroids_GPU(nodes, node_list=None, down_factor=None):
554
610
  """Internal use version to get centroids without saving"""
@@ -1174,15 +1230,31 @@ def edge_to_node(network, node_identities = None):
1174
1230
 
1175
1231
 
1176
1232
  def save_singval_dict(dict, index_name, valname, filename):
1177
-
1178
- #index name goes on the left, valname on the right
1233
+ # Convert dictionary to DataFrame
1179
1234
  df = pd.DataFrame.from_dict(dict, orient='index', columns=[valname])
1180
-
1181
- # Rename the index to 'Node ID'
1235
+
1236
+ # Rename the index
1182
1237
  df.index.name = index_name
1183
-
1184
- # Save DataFrame to Excel file
1185
- df.to_excel(filename, engine='openpyxl')
1238
+
1239
+ # Remove file extension if present to use as base path
1240
+ base_path = filename.rsplit('.', 1)[0]
1241
+
1242
+ # First try to save as CSV
1243
+ try:
1244
+ csv_path = f"{base_path}.csv"
1245
+ df.to_csv(csv_path)
1246
+ print(f"Successfully saved {valname} data to {csv_path}")
1247
+ return
1248
+ except Exception as e:
1249
+ print(f"Could not save as CSV: {str(e)}")
1250
+
1251
+ # If CSV fails, try to save as Excel
1252
+ try:
1253
+ xlsx_path = f"{base_path}.xlsx"
1254
+ df.to_excel(xlsx_path, engine='openpyxl')
1255
+ print(f"Successfully saved {valname} data to {xlsx_path}")
1256
+ except Exception as e:
1257
+ print(f"Could not save as XLSX: {str(e)}")
1186
1258
 
1187
1259
 
1188
1260
  def rand_net_weighted(num_rows, num_nodes, nodes):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: nettracer3d
3
- Version: 0.2.7
3
+ Version: 0.2.9
4
4
  Summary: Scripts for intializing and analyzing networks from segmentations of three dimensional images.
5
5
  Author-email: Liam McLaughlin <boom2449@gmail.com>
6
6
  Project-URL: User_Manual, https://drive.google.com/drive/folders/1fTkz3n4LN9_VxKRKC8lVQSlrz_wq0bVn?usp=drive_link
File without changes
File without changes
File without changes