nettracer3d 0.2.5__py3-none-any.whl → 0.2.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nettracer3d/community_extractor.py +153 -0
- nettracer3d/hub_getter.py +1 -1
- nettracer3d/modularity.py +263 -19
- nettracer3d/morphology.py +144 -7
- nettracer3d/nettracer.py +542 -48
- nettracer3d/nettracer_gui.py +1685 -255
- nettracer3d/network_analysis.py +96 -26
- nettracer3d/node_draw.py +3 -1
- nettracer3d/proximity.py +66 -6
- nettracer3d/simple_network.py +82 -72
- {nettracer3d-0.2.5.dist-info → nettracer3d-0.2.7.dist-info}/METADATA +1 -1
- nettracer3d-0.2.7.dist-info/RECORD +18 -0
- nettracer3d-0.2.5.dist-info/RECORD +0 -18
- {nettracer3d-0.2.5.dist-info → nettracer3d-0.2.7.dist-info}/LICENSE +0 -0
- {nettracer3d-0.2.5.dist-info → nettracer3d-0.2.7.dist-info}/WHEEL +0 -0
- {nettracer3d-0.2.5.dist-info → nettracer3d-0.2.7.dist-info}/top_level.txt +0 -0
nettracer3d/nettracer.py
CHANGED
|
@@ -362,51 +362,108 @@ def establish_inner_edges(nodes, edge):
|
|
|
362
362
|
return inner_edges
|
|
363
363
|
|
|
364
364
|
|
|
365
|
-
def upsample_with_padding(data, factor, original_shape):
|
|
366
|
-
"""
|
|
367
|
-
|
|
365
|
+
def upsample_with_padding(data, factor=None, original_shape=None):
|
|
366
|
+
"""
|
|
367
|
+
Upsample a 3D or 4D array with optional different scaling factors per dimension.
|
|
368
|
+
|
|
369
|
+
Parameters:
|
|
370
|
+
-----------
|
|
371
|
+
data : ndarray
|
|
372
|
+
Input 3D array or 4D array (where 4th dimension is RGB) to be upsampled
|
|
373
|
+
factor : float or tuple, optional
|
|
374
|
+
Upsampling factor. If float, same factor is applied to all dimensions.
|
|
375
|
+
If tuple, should contain three values for z, y, x dimensions respectively.
|
|
376
|
+
If None, factor is calculated from original_shape.
|
|
377
|
+
original_shape : tuple, optional
|
|
378
|
+
Target shape for the output array. Used to calculate factors if factor is None.
|
|
379
|
+
|
|
380
|
+
Returns:
|
|
381
|
+
--------
|
|
382
|
+
ndarray
|
|
383
|
+
Upsampled and padded array matching the original shape
|
|
384
|
+
"""
|
|
385
|
+
if original_shape is None:
|
|
386
|
+
raise ValueError("original_shape must be provided")
|
|
387
|
+
|
|
388
|
+
# Handle 4D color arrays
|
|
389
|
+
is_color = len(data.shape) == 4 and data.shape[-1] == 3
|
|
390
|
+
if is_color:
|
|
391
|
+
# Split into separate color channels
|
|
392
|
+
channels = [data[..., i] for i in range(3)]
|
|
393
|
+
upsampled_channels = []
|
|
394
|
+
|
|
395
|
+
for channel in channels:
|
|
396
|
+
# Upsample each channel separately
|
|
397
|
+
upsampled_channel = _upsample_3d_array(channel, factor, original_shape)
|
|
398
|
+
upsampled_channels.append(upsampled_channel)
|
|
399
|
+
|
|
400
|
+
# Stack the channels back together
|
|
401
|
+
return np.stack(upsampled_channels, axis=-1)
|
|
402
|
+
else:
|
|
403
|
+
# Handle regular 3D array
|
|
404
|
+
return _upsample_3d_array(data, factor, original_shape)
|
|
368
405
|
|
|
369
|
-
|
|
406
|
+
def _upsample_3d_array(data, factor, original_shape):
|
|
407
|
+
"""Helper function to handle the upsampling of a single 3D array"""
|
|
370
408
|
original_shape = np.array(original_shape)
|
|
409
|
+
current_shape = np.array(data.shape)
|
|
410
|
+
|
|
411
|
+
# Calculate factors if not provided
|
|
412
|
+
if factor is None:
|
|
413
|
+
# Compute the ratio between original and current shape for each dimension
|
|
414
|
+
factors = [os / cs for os, cs in zip(original_shape, current_shape)]
|
|
415
|
+
# If all factors are the same, use a single number for efficiency
|
|
416
|
+
if len(set(factors)) == 1:
|
|
417
|
+
factor = factors[0]
|
|
418
|
+
else:
|
|
419
|
+
factor = tuple(factors)
|
|
420
|
+
elif isinstance(factor, (int, float)):
|
|
421
|
+
factor = factor # Keep it as a single number
|
|
422
|
+
|
|
423
|
+
# Upsample the input array
|
|
371
424
|
binary_array = zoom(data, factor, order=0)
|
|
372
425
|
upsampled_shape = np.array(binary_array.shape)
|
|
373
|
-
|
|
426
|
+
|
|
374
427
|
# Calculate the positive differences in dimensions
|
|
375
428
|
difference_dims = original_shape - upsampled_shape
|
|
376
|
-
|
|
429
|
+
|
|
377
430
|
# Calculate the padding amounts for each dimension
|
|
378
431
|
padding_dims = np.maximum(difference_dims, 0)
|
|
379
432
|
padding_before = padding_dims // 2
|
|
380
433
|
padding_after = padding_dims - padding_before
|
|
381
|
-
|
|
434
|
+
|
|
382
435
|
# Pad the binary array along each dimension
|
|
383
|
-
padded_array = np.pad(binary_array,
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
436
|
+
padded_array = np.pad(binary_array,
|
|
437
|
+
[(padding_before[0], padding_after[0]),
|
|
438
|
+
(padding_before[1], padding_after[1]),
|
|
439
|
+
(padding_before[2], padding_after[2])],
|
|
440
|
+
mode='constant',
|
|
441
|
+
constant_values=0)
|
|
442
|
+
|
|
387
443
|
# Calculate the subtraction amounts for each dimension
|
|
388
444
|
sub_dims = np.maximum(-difference_dims, 0)
|
|
389
445
|
sub_before = sub_dims // 2
|
|
390
446
|
sub_after = sub_dims - sub_before
|
|
391
|
-
|
|
447
|
+
|
|
448
|
+
# Remove excess dimensions sequentially
|
|
392
449
|
# Remove planes from the beginning and end
|
|
393
450
|
if sub_dims[0] == 0:
|
|
394
451
|
trimmed_planes = padded_array
|
|
395
452
|
else:
|
|
396
453
|
trimmed_planes = padded_array[sub_before[0]:-sub_after[0], :, :]
|
|
397
|
-
|
|
454
|
+
|
|
398
455
|
# Remove rows from the beginning and end
|
|
399
456
|
if sub_dims[1] == 0:
|
|
400
457
|
trimmed_rows = trimmed_planes
|
|
401
458
|
else:
|
|
402
459
|
trimmed_rows = trimmed_planes[:, sub_before[1]:-sub_after[1], :]
|
|
403
|
-
|
|
460
|
+
|
|
404
461
|
# Remove columns from the beginning and end
|
|
405
462
|
if sub_dims[2] == 0:
|
|
406
463
|
trimmed_array = trimmed_rows
|
|
407
464
|
else:
|
|
408
465
|
trimmed_array = trimmed_rows[:, :, sub_before[2]:-sub_after[2]]
|
|
409
|
-
|
|
466
|
+
|
|
410
467
|
return trimmed_array
|
|
411
468
|
|
|
412
469
|
def remove_branches(skeleton, length):
|
|
@@ -556,6 +613,104 @@ def threshold(arr, proportion, custom_rad = None):
|
|
|
556
613
|
|
|
557
614
|
return arr
|
|
558
615
|
|
|
616
|
+
def z_project(array3d, method='max'):
|
|
617
|
+
"""
|
|
618
|
+
Project a 3D numpy array along the Z axis to create a 2D array.
|
|
619
|
+
|
|
620
|
+
Parameters:
|
|
621
|
+
array3d (numpy.ndarray): 3D input array with shape (Z, Y, X)
|
|
622
|
+
method (str): Projection method - 'max', 'mean', 'min', 'sum', or 'std'
|
|
623
|
+
|
|
624
|
+
Returns:
|
|
625
|
+
numpy.ndarray: 2D projected array with shape (Y, X)
|
|
626
|
+
"""
|
|
627
|
+
if not isinstance(array3d, np.ndarray) or array3d.ndim != 3:
|
|
628
|
+
raise ValueError("Input must be a 3D numpy array")
|
|
629
|
+
|
|
630
|
+
if method == 'max':
|
|
631
|
+
return np.max(array3d, axis=0)
|
|
632
|
+
elif method == 'mean':
|
|
633
|
+
return np.mean(array3d, axis=0)
|
|
634
|
+
elif method == 'min':
|
|
635
|
+
return np.min(array3d, axis=0)
|
|
636
|
+
elif method == 'sum':
|
|
637
|
+
return np.sum(array3d, axis=0)
|
|
638
|
+
elif method == 'std':
|
|
639
|
+
return np.std(array3d, axis=0)
|
|
640
|
+
else:
|
|
641
|
+
raise ValueError("Method must be one of: 'max', 'mean', 'min', 'sum', 'std'")
|
|
642
|
+
|
|
643
|
+
def fill_holes_3d(array):
|
|
644
|
+
|
|
645
|
+
def process_slice(slice_2d, border_threshold=0.08):
|
|
646
|
+
"""
|
|
647
|
+
Process a 2D slice, considering components that touch less than border_threshold
|
|
648
|
+
of any border length as potential holes.
|
|
649
|
+
|
|
650
|
+
Args:
|
|
651
|
+
slice_2d: 2D binary array
|
|
652
|
+
border_threshold: proportion of border that must be touched to be considered background
|
|
653
|
+
"""
|
|
654
|
+
slice_2d = slice_2d.astype(np.uint8)
|
|
655
|
+
labels, num_features = ndimage.label(slice_2d)
|
|
656
|
+
|
|
657
|
+
if num_features == 0:
|
|
658
|
+
return np.zeros_like(slice_2d)
|
|
659
|
+
|
|
660
|
+
# Get dimensions for threshold calculations
|
|
661
|
+
height, width = slice_2d.shape
|
|
662
|
+
|
|
663
|
+
# Dictionary to store border intersection lengths for each label
|
|
664
|
+
border_proportions = {}
|
|
665
|
+
|
|
666
|
+
for label in range(1, num_features + 1):
|
|
667
|
+
mask = labels == label
|
|
668
|
+
|
|
669
|
+
# Calculate proportion of each border this component touches
|
|
670
|
+
top_prop = np.sum(mask[0, :]) / width
|
|
671
|
+
bottom_prop = np.sum(mask[-1, :]) / width
|
|
672
|
+
left_prop = np.sum(mask[:, 0]) / height
|
|
673
|
+
right_prop = np.sum(mask[:, -1]) / height
|
|
674
|
+
|
|
675
|
+
# If it touches any border significantly, consider it background
|
|
676
|
+
border_proportions[label] = max(top_prop, bottom_prop, left_prop, right_prop)
|
|
677
|
+
|
|
678
|
+
# Create mask of components that either don't touch borders
|
|
679
|
+
# or touch less than the threshold proportion
|
|
680
|
+
background_labels = {label for label, prop in border_proportions.items()
|
|
681
|
+
if prop > border_threshold}
|
|
682
|
+
|
|
683
|
+
holes_mask = ~np.isin(labels, list(background_labels))
|
|
684
|
+
|
|
685
|
+
return holes_mask
|
|
686
|
+
|
|
687
|
+
array = binarize(array)
|
|
688
|
+
inv_array = invert_array(array)
|
|
689
|
+
|
|
690
|
+
# Create arrays for all three planes
|
|
691
|
+
array_xy = np.zeros_like(inv_array, dtype=np.uint8)
|
|
692
|
+
array_xz = np.zeros_like(inv_array, dtype=np.uint8)
|
|
693
|
+
array_yz = np.zeros_like(inv_array, dtype=np.uint8)
|
|
694
|
+
|
|
695
|
+
# Process XY plane
|
|
696
|
+
for z in range(inv_array.shape[0]):
|
|
697
|
+
array_xy[z] = process_slice(inv_array[z])
|
|
698
|
+
|
|
699
|
+
# Process XZ plane
|
|
700
|
+
for y in range(inv_array.shape[1]):
|
|
701
|
+
slice_xz = inv_array[:, y, :]
|
|
702
|
+
array_xz[:, y, :] = process_slice(slice_xz)
|
|
703
|
+
|
|
704
|
+
# Process YZ plane
|
|
705
|
+
for x in range(inv_array.shape[2]):
|
|
706
|
+
slice_yz = inv_array[:, :, x]
|
|
707
|
+
array_yz[:, :, x] = process_slice(slice_yz)
|
|
708
|
+
|
|
709
|
+
# Combine results from all three planes
|
|
710
|
+
filled = (array_xy | array_xz | array_yz) * 255
|
|
711
|
+
return array + filled
|
|
712
|
+
|
|
713
|
+
|
|
559
714
|
|
|
560
715
|
|
|
561
716
|
|
|
@@ -813,6 +968,205 @@ def dilate_3D(tiff_array, dilated_x, dilated_y, dilated_z):
|
|
|
813
968
|
|
|
814
969
|
return final_result
|
|
815
970
|
|
|
971
|
+
def dilate_3D_recursive(tiff_array, dilated_x, dilated_y, dilated_z, step_size=None):
|
|
972
|
+
"""Recursive 3D dilation method that handles odd-numbered dilations properly.
|
|
973
|
+
|
|
974
|
+
Args:
|
|
975
|
+
tiff_array: Input 3D array
|
|
976
|
+
dilated_x, dilated_y, dilated_z: Odd numbers representing total dilation size
|
|
977
|
+
step_size: Size of dilation step for this iteration
|
|
978
|
+
|
|
979
|
+
Each dilation parameter represents (n-1)/2 steps outward from the object.
|
|
980
|
+
"""
|
|
981
|
+
# Calculate the smallest dimension of the array
|
|
982
|
+
min_dim = min(tiff_array.shape)
|
|
983
|
+
|
|
984
|
+
# For small dilations relative to array size, don't use recursion
|
|
985
|
+
max_dilation = max(dilated_x, dilated_y, dilated_z)
|
|
986
|
+
if max_dilation < (0.2 * min_dim):
|
|
987
|
+
return dilate_3D_recursive(tiff_array, dilated_x, dilated_y, dilated_z, step_size=1)
|
|
988
|
+
|
|
989
|
+
# Initialize step_size for first call
|
|
990
|
+
if step_size is None:
|
|
991
|
+
# Start with a reasonable step size based on the largest dilation
|
|
992
|
+
step_size = min(5, max((max_dilation - 1) // 2 // 3, 1))
|
|
993
|
+
|
|
994
|
+
# Base case: if step_size is 1 or we've achieved full dilation
|
|
995
|
+
if step_size == 1 or (dilated_x <= 1 and dilated_y <= 1 and dilated_z <= 1):
|
|
996
|
+
def create_circular_kernel(diameter):
|
|
997
|
+
radius = diameter/2
|
|
998
|
+
size = radius
|
|
999
|
+
size = int(np.ceil(size))
|
|
1000
|
+
y, x = np.ogrid[-radius:radius+1, -radius:radius+1]
|
|
1001
|
+
distance = np.sqrt(x**2 + y**2)
|
|
1002
|
+
kernel = distance <= radius
|
|
1003
|
+
return kernel.astype(np.uint8)
|
|
1004
|
+
|
|
1005
|
+
def create_ellipsoidal_kernel(long_axis, short_axis):
|
|
1006
|
+
semi_major, semi_minor = long_axis / 2, short_axis / 2
|
|
1007
|
+
size_y = int(np.ceil(semi_minor))
|
|
1008
|
+
size_x = int(np.ceil(semi_major))
|
|
1009
|
+
y, x = np.ogrid[-semi_minor:semi_minor+1, -semi_major:semi_major+1]
|
|
1010
|
+
ellipse = (x**2 / semi_major**2) + (y**2 / semi_minor**2) <= 1
|
|
1011
|
+
return ellipse.astype(np.uint8)
|
|
1012
|
+
|
|
1013
|
+
def process_slice(z):
|
|
1014
|
+
tiff_slice = tiff_array[z].astype(np.uint8)
|
|
1015
|
+
dilated_slice = cv2.dilate(tiff_slice, kernel, iterations=1)
|
|
1016
|
+
return z, dilated_slice
|
|
1017
|
+
|
|
1018
|
+
def process_slice_other(y):
|
|
1019
|
+
tiff_slice = tiff_array[:, y, :].astype(np.uint8)
|
|
1020
|
+
dilated_slice = cv2.dilate(tiff_slice, kernel, iterations=1)
|
|
1021
|
+
return y, dilated_slice
|
|
1022
|
+
|
|
1023
|
+
# Create empty arrays for the dilated results
|
|
1024
|
+
dilated_xy = np.zeros_like(tiff_array, dtype=np.uint8)
|
|
1025
|
+
dilated_xz = np.zeros_like(tiff_array, dtype=np.uint8)
|
|
1026
|
+
|
|
1027
|
+
# Create kernels for final dilation
|
|
1028
|
+
kernel = create_circular_kernel(dilated_x)
|
|
1029
|
+
|
|
1030
|
+
# Process XY plane
|
|
1031
|
+
num_cores = mp.cpu_count()
|
|
1032
|
+
with ThreadPoolExecutor(max_workers=num_cores) as executor:
|
|
1033
|
+
futures = {executor.submit(process_slice, z): z for z in range(tiff_array.shape[0])}
|
|
1034
|
+
for future in as_completed(futures):
|
|
1035
|
+
z, dilated_slice = future.result()
|
|
1036
|
+
dilated_xy[z] = dilated_slice
|
|
1037
|
+
|
|
1038
|
+
# Process XZ plane
|
|
1039
|
+
kernel = create_ellipsoidal_kernel(dilated_x, dilated_z)
|
|
1040
|
+
with ThreadPoolExecutor(max_workers=num_cores) as executor:
|
|
1041
|
+
futures = {executor.submit(process_slice_other, y): y for y in range(tiff_array.shape[1])}
|
|
1042
|
+
for future in as_completed(futures):
|
|
1043
|
+
y, dilated_slice = future.result()
|
|
1044
|
+
dilated_xz[:, y, :] = dilated_slice
|
|
1045
|
+
|
|
1046
|
+
return dilated_xy | dilated_xz
|
|
1047
|
+
|
|
1048
|
+
# Calculate current iteration's dilation sizes (must be odd numbers)
|
|
1049
|
+
current_x_steps = min((dilated_x - 1) // 2, step_size)
|
|
1050
|
+
current_y_steps = min((dilated_y - 1) // 2, step_size)
|
|
1051
|
+
current_z_steps = min((dilated_z - 1) // 2, step_size)
|
|
1052
|
+
|
|
1053
|
+
current_x_dilation = current_x_steps * 2 + 1
|
|
1054
|
+
current_y_dilation = current_y_steps * 2 + 1
|
|
1055
|
+
current_z_dilation = current_z_steps * 2 + 1
|
|
1056
|
+
|
|
1057
|
+
# Perform current iteration's dilation
|
|
1058
|
+
current_result = dilate_3D_recursive(tiff_array, current_x_dilation, current_y_dilation, current_z_dilation, step_size=1)
|
|
1059
|
+
|
|
1060
|
+
# Calculate remaining dilation needed
|
|
1061
|
+
# For X and Y, use the circle radius (current_x_steps)
|
|
1062
|
+
# For Z, use the ellipse short axis (current_z_steps)
|
|
1063
|
+
remaining_x = max(1, dilated_x - (current_x_steps * 2))
|
|
1064
|
+
remaining_y = max(1, dilated_y - (current_y_steps * 2))
|
|
1065
|
+
remaining_z = max(1, dilated_z - (current_z_steps * 2))
|
|
1066
|
+
|
|
1067
|
+
# If no more dilation needed, return current result
|
|
1068
|
+
if remaining_x == 1 and remaining_y == 1 and remaining_z == 1:
|
|
1069
|
+
return current_result
|
|
1070
|
+
|
|
1071
|
+
# Recursive call with remaining dilation and decreased step size
|
|
1072
|
+
return dilate_3D_recursive(current_result, remaining_x, remaining_y, remaining_z, step_size=max(1, step_size - 1))
|
|
1073
|
+
|
|
1074
|
+
def erode_3D(tiff_array, eroded_x, eroded_y, eroded_z):
|
|
1075
|
+
"""Internal method to erode an array in 3D. Erosion this way is much faster than using a distance transform although the latter is theoretically more accurate.
|
|
1076
|
+
Arguments are an array, and the desired pixel erosion amounts in X, Y, Z."""
|
|
1077
|
+
def create_circular_kernel(diameter):
|
|
1078
|
+
"""Create a 2D circular kernel with a given radius.
|
|
1079
|
+
Parameters:
|
|
1080
|
+
radius (int or float): The radius of the circle.
|
|
1081
|
+
Returns:
|
|
1082
|
+
numpy.ndarray: A 2D numpy array representing the circular kernel.
|
|
1083
|
+
"""
|
|
1084
|
+
# Determine the size of the kernel
|
|
1085
|
+
radius = diameter/2
|
|
1086
|
+
size = radius # Diameter of the circle
|
|
1087
|
+
size = int(np.ceil(size)) # Ensure size is an integer
|
|
1088
|
+
|
|
1089
|
+
# Create a grid of (x, y) coordinates
|
|
1090
|
+
y, x = np.ogrid[-radius:radius+1, -radius:radius+1]
|
|
1091
|
+
|
|
1092
|
+
# Calculate the distance from the center (0,0)
|
|
1093
|
+
distance = np.sqrt(x**2 + y**2)
|
|
1094
|
+
|
|
1095
|
+
# Create the circular kernel: points within the radius are 1, others are 0
|
|
1096
|
+
kernel = distance <= radius
|
|
1097
|
+
|
|
1098
|
+
# Convert the boolean array to integer (0 and 1)
|
|
1099
|
+
return kernel.astype(np.uint8)
|
|
1100
|
+
|
|
1101
|
+
def create_ellipsoidal_kernel(long_axis, short_axis):
|
|
1102
|
+
"""Create a 2D ellipsoidal kernel with specified axis lengths and orientation.
|
|
1103
|
+
Parameters:
|
|
1104
|
+
long_axis (int or float): The length of the long axis.
|
|
1105
|
+
short_axis (int or float): The length of the short axis.
|
|
1106
|
+
Returns:
|
|
1107
|
+
numpy.ndarray: A 2D numpy array representing the ellipsoidal kernel.
|
|
1108
|
+
"""
|
|
1109
|
+
semi_major, semi_minor = long_axis / 2, short_axis / 2
|
|
1110
|
+
# Determine the size of the kernel
|
|
1111
|
+
size_y = int(np.ceil(semi_minor))
|
|
1112
|
+
size_x = int(np.ceil(semi_major))
|
|
1113
|
+
|
|
1114
|
+
# Create a grid of (x, y) coordinates centered at (0,0)
|
|
1115
|
+
y, x = np.ogrid[-semi_minor:semi_minor+1, -semi_major:semi_major+1]
|
|
1116
|
+
|
|
1117
|
+
# Ellipsoid equation: (x/a)^2 + (y/b)^2 <= 1
|
|
1118
|
+
ellipse = (x**2 / semi_major**2) + (y**2 / semi_minor**2) <= 1
|
|
1119
|
+
|
|
1120
|
+
return ellipse.astype(np.uint8)
|
|
1121
|
+
|
|
1122
|
+
z_depth = tiff_array.shape[0]
|
|
1123
|
+
|
|
1124
|
+
# Function to process each slice
|
|
1125
|
+
def process_slice(z):
|
|
1126
|
+
tiff_slice = tiff_array[z].astype(np.uint8)
|
|
1127
|
+
eroded_slice = cv2.erode(tiff_slice, kernel, iterations=1)
|
|
1128
|
+
return z, eroded_slice
|
|
1129
|
+
|
|
1130
|
+
def process_slice_other(y):
|
|
1131
|
+
tiff_slice = tiff_array[:, y, :].astype(np.uint8)
|
|
1132
|
+
eroded_slice = cv2.erode(tiff_slice, kernel, iterations=1)
|
|
1133
|
+
return y, eroded_slice
|
|
1134
|
+
|
|
1135
|
+
# Create empty arrays to store the eroded results for the XY and XZ planes
|
|
1136
|
+
eroded_xy = np.zeros_like(tiff_array, dtype=np.uint8)
|
|
1137
|
+
eroded_xz = np.zeros_like(tiff_array, dtype=np.uint8)
|
|
1138
|
+
|
|
1139
|
+
kernel_x = int(eroded_x)
|
|
1140
|
+
kernel = create_circular_kernel(kernel_x)
|
|
1141
|
+
|
|
1142
|
+
num_cores = mp.cpu_count()
|
|
1143
|
+
with ThreadPoolExecutor(max_workers=num_cores) as executor:
|
|
1144
|
+
futures = {executor.submit(process_slice, z): z for z in range(tiff_array.shape[0])}
|
|
1145
|
+
for future in as_completed(futures):
|
|
1146
|
+
z, eroded_slice = future.result()
|
|
1147
|
+
eroded_xy[z] = eroded_slice
|
|
1148
|
+
|
|
1149
|
+
kernel_x = int(eroded_x)
|
|
1150
|
+
kernel_z = int(eroded_z)
|
|
1151
|
+
kernel = create_ellipsoidal_kernel(kernel_x, kernel_z)
|
|
1152
|
+
|
|
1153
|
+
if z_depth != 2:
|
|
1154
|
+
|
|
1155
|
+
with ThreadPoolExecutor(max_workers=num_cores) as executor:
|
|
1156
|
+
futures = {executor.submit(process_slice_other, y): y for y in range(tiff_array.shape[1])}
|
|
1157
|
+
|
|
1158
|
+
for future in as_completed(futures):
|
|
1159
|
+
y, eroded_slice = future.result()
|
|
1160
|
+
eroded_xz[:, y, :] = eroded_slice
|
|
1161
|
+
|
|
1162
|
+
# Overlay the results using AND operation instead of OR for erosion
|
|
1163
|
+
if z_depth != 2:
|
|
1164
|
+
final_result = eroded_xy & eroded_xz
|
|
1165
|
+
else:
|
|
1166
|
+
return eroded_xy
|
|
1167
|
+
|
|
1168
|
+
return final_result
|
|
1169
|
+
|
|
816
1170
|
|
|
817
1171
|
def dilate_3D_old(tiff_array, dilated_x, dilated_y, dilated_z):
|
|
818
1172
|
"""(For cubey dilation only). Internal method to dilate an array in 3D.
|
|
@@ -947,6 +1301,10 @@ def combine_nodes(root_nodes, other_nodes, other_ID, identity_dict, root_ID = No
|
|
|
947
1301
|
identity_dict[item] = root_ID
|
|
948
1302
|
|
|
949
1303
|
for item in otherIDs: #Always adds the other vals to the dictionary
|
|
1304
|
+
try:
|
|
1305
|
+
other_ID = os.path.basename(other_ID)
|
|
1306
|
+
except:
|
|
1307
|
+
pass
|
|
950
1308
|
identity_dict[item] = other_ID
|
|
951
1309
|
|
|
952
1310
|
nodes = root_nodes + other_nodes #Combine the outer edges with the inner edges modified via the above steps
|
|
@@ -1070,6 +1428,23 @@ def dilate(arrayimage, amount, xy_scale = 1, z_scale = 1, directory = None, fast
|
|
|
1070
1428
|
|
|
1071
1429
|
return arrayimage
|
|
1072
1430
|
|
|
1431
|
+
def erode(arrayimage, amount, xy_scale = 1, z_scale = 1):
|
|
1432
|
+
if len(np.unique(arrayimage)) > 2: #binarize
|
|
1433
|
+
arrayimage = binarize(arrayimage)
|
|
1434
|
+
erode_xy, erode_z = dilation_length_to_pixels(xy_scale, z_scale, amount, amount)
|
|
1435
|
+
|
|
1436
|
+
if len(np.unique(arrayimage)) > 2: #binarize
|
|
1437
|
+
arrayimage = binarize(arrayimage)
|
|
1438
|
+
|
|
1439
|
+
arrayimage = (erode_3D(arrayimage, erode_xy, erode_xy, erode_z)) * 255
|
|
1440
|
+
if np.max(arrayimage) == 1:
|
|
1441
|
+
arrayimage = arrayimage * 255
|
|
1442
|
+
|
|
1443
|
+
return arrayimage
|
|
1444
|
+
|
|
1445
|
+
|
|
1446
|
+
|
|
1447
|
+
|
|
1073
1448
|
|
|
1074
1449
|
def skeletonize(arrayimage, directory = None):
|
|
1075
1450
|
"""
|
|
@@ -1617,7 +1992,8 @@ class Network_3D:
|
|
|
1617
1992
|
if array is not None and not isinstance(array, np.ndarray):
|
|
1618
1993
|
raise ValueError("nodes must be a (preferably labelled) numpy array.")
|
|
1619
1994
|
if array is not None and len(array.shape) == 2: #For dealing with 2D images
|
|
1620
|
-
array = np.stack((array, array), axis = 0)
|
|
1995
|
+
#array = np.stack((array, array), axis = 0)
|
|
1996
|
+
array = np.expand_dims(array, axis=0)
|
|
1621
1997
|
self._nodes = array
|
|
1622
1998
|
|
|
1623
1999
|
@nodes.deleter
|
|
@@ -1735,7 +2111,8 @@ class Network_3D:
|
|
|
1735
2111
|
if array is not None and not isinstance(array, np.ndarray):
|
|
1736
2112
|
raise ValueError("edges must be a (preferably labelled) numpy array.")
|
|
1737
2113
|
if array is not None and len(array.shape) == 2: #For dealing with 2D images
|
|
1738
|
-
array = np.stack((array, array), axis = 0)
|
|
2114
|
+
#array = np.stack((array, array), axis = 0)
|
|
2115
|
+
array = np.expand_dims(array, axis=0)
|
|
1739
2116
|
self._edges = array
|
|
1740
2117
|
|
|
1741
2118
|
@edges.deleter
|
|
@@ -1756,6 +2133,9 @@ class Network_3D:
|
|
|
1756
2133
|
"""Sets the search_region property"""
|
|
1757
2134
|
if array is not None and not isinstance(array, np.ndarray):
|
|
1758
2135
|
raise ValueError("search_region must be a (preferably labelled) numpy array.")
|
|
2136
|
+
if array is not None and len(array.shape) == 2: #For dealing with 2D images
|
|
2137
|
+
#array = np.stack((array, array), axis = 0)
|
|
2138
|
+
array = np.expand_dims(array, axis=0)
|
|
1759
2139
|
self._search_region = array
|
|
1760
2140
|
|
|
1761
2141
|
@search_region.deleter
|
|
@@ -1836,6 +2216,9 @@ class Network_3D:
|
|
|
1836
2216
|
"""Sets the nodes property"""
|
|
1837
2217
|
if array is not None and not isinstance(array, np.ndarray):
|
|
1838
2218
|
raise ValueError("network overlay must be a (preferably labelled) numpy array.")
|
|
2219
|
+
if array is not None and len(array.shape) == 2: #For dealing with 2D images
|
|
2220
|
+
#array = np.stack((array, array), axis = 0)
|
|
2221
|
+
array = np.expand_dims(array, axis=0)
|
|
1839
2222
|
|
|
1840
2223
|
self._network_overlay = array
|
|
1841
2224
|
|
|
@@ -1852,6 +2235,9 @@ class Network_3D:
|
|
|
1852
2235
|
"""Sets the nodes property"""
|
|
1853
2236
|
if array is not None and not isinstance(array, np.ndarray):
|
|
1854
2237
|
raise ValueError("id overlay must be a (preferably labelled) numpy array.")
|
|
2238
|
+
if array is not None and len(array.shape) == 2: #For dealing with 2D images
|
|
2239
|
+
#array = np.stack((array, array), axis = 0)
|
|
2240
|
+
array = np.expand_dims(array, axis=0)
|
|
1855
2241
|
|
|
1856
2242
|
self._id_overlay = array
|
|
1857
2243
|
|
|
@@ -2868,12 +3254,14 @@ class Network_3D:
|
|
|
2868
3254
|
|
|
2869
3255
|
#Some methods that may be useful:
|
|
2870
3256
|
|
|
2871
|
-
def community_partition(self, weighted = False, style = 0):
|
|
3257
|
+
def community_partition(self, weighted = False, style = 0, dostats = True):
|
|
2872
3258
|
"""
|
|
2873
3259
|
Sets the communities attribute by splitting the network into communities
|
|
2874
3260
|
"""
|
|
2875
3261
|
|
|
2876
|
-
self._communities, self.normalized_weights = modularity.community_partition(self._network_lists, weighted = weighted, style = style)
|
|
3262
|
+
self._communities, self.normalized_weights, stats = modularity.community_partition(self._network_lists, weighted = weighted, style = style, dostats = dostats)
|
|
3263
|
+
|
|
3264
|
+
return stats
|
|
2877
3265
|
|
|
2878
3266
|
def remove_edge_weights(self):
|
|
2879
3267
|
"""
|
|
@@ -2966,6 +3354,19 @@ class Network_3D:
|
|
|
2966
3354
|
elif num_edge < 65536:
|
|
2967
3355
|
self._edges = self._edges.astype(np.uint16)
|
|
2968
3356
|
|
|
3357
|
+
node_bools = self._nodes == 0
|
|
3358
|
+
|
|
3359
|
+
self._nodes = self._nodes.astype(np.uint32)
|
|
3360
|
+
self._edges = self._edges * node_bools
|
|
3361
|
+
self._nodes = self._nodes + self._edges
|
|
3362
|
+
num_node = np.max(self._nodes)
|
|
3363
|
+
|
|
3364
|
+
if num_node < 256:
|
|
3365
|
+
self._nodes = self._nodes.astype(np.uint8)
|
|
3366
|
+
elif num_node < 65536:
|
|
3367
|
+
self._nodes = self._nodes.astype(np.uint16)
|
|
3368
|
+
|
|
3369
|
+
|
|
2969
3370
|
def trunk_to_node(self):
|
|
2970
3371
|
"""
|
|
2971
3372
|
Converts the edge 'trunk' into a node. In this case, the trunk is the edge that creates the most node-node connections. There may be times when many nodes are connected by a single, expansive edge that obfuscates the rest of the edges. Converting the trunk to a node can better reveal these edges.
|
|
@@ -2989,16 +3390,14 @@ class Network_3D:
|
|
|
2989
3390
|
nodeb.append(addtrunk)
|
|
2990
3391
|
nodea.append(nodesb[i])
|
|
2991
3392
|
nodeb.append(addtrunk)
|
|
2992
|
-
edgec.append(
|
|
2993
|
-
edgec.append(
|
|
3393
|
+
edgec.append(0)
|
|
3394
|
+
edgec.append(0)
|
|
2994
3395
|
else:
|
|
2995
3396
|
nodea.append(nodesa[i])
|
|
2996
3397
|
nodeb.append(nodesb[i])
|
|
2997
3398
|
edgec.append(edgesc[i])
|
|
2998
3399
|
|
|
2999
|
-
self.
|
|
3000
|
-
|
|
3001
|
-
self.network, _ = network_analysis.weighted_network(self._network_lists)
|
|
3400
|
+
self.network_lists = [nodea, nodeb, edgec]
|
|
3002
3401
|
|
|
3003
3402
|
self._node_centroids[addtrunk] = self._edge_centroids[trunk]
|
|
3004
3403
|
|
|
@@ -3013,6 +3412,19 @@ class Network_3D:
|
|
|
3013
3412
|
else:
|
|
3014
3413
|
self._node_identities[addtrunk] = "Trunk"
|
|
3015
3414
|
|
|
3415
|
+
if self._edges is not None and self._nodes is not None:
|
|
3416
|
+
|
|
3417
|
+
node_bools = self._nodes == 0
|
|
3418
|
+
|
|
3419
|
+
trunk = self._edges == trunk
|
|
3420
|
+
|
|
3421
|
+
trunk = trunk * addtrunk
|
|
3422
|
+
|
|
3423
|
+
trunk = trunk * node_bools
|
|
3424
|
+
|
|
3425
|
+
self._nodes = self._nodes + trunk
|
|
3426
|
+
|
|
3427
|
+
|
|
3016
3428
|
|
|
3017
3429
|
|
|
3018
3430
|
|
|
@@ -3480,28 +3892,43 @@ class Network_3D:
|
|
|
3480
3892
|
mothers, overlay = community_extractor.extract_mothers(self._nodes, self._network, centroid_dic = self._node_centroids, directory = directory, louvain = louvain, called = called)
|
|
3481
3893
|
return mothers, overlay
|
|
3482
3894
|
|
|
3483
|
-
def extract_communities(self, directory = None, down_factor = 1, color_code = True):
|
|
3484
|
-
"""
|
|
3485
|
-
Method to generate overlays that relate community detection in a network to the 3D structure.
|
|
3486
|
-
Overlays include a grayscale image where nodes are assigned a grayscale value corresponding to their community, a numerical index where numbers are drawn at nodes corresponding to their community, and a
|
|
3487
|
-
color coded overlay where a nodes color corresponds to its community. Community detection will be done with label propogation.
|
|
3488
|
-
These will be saved to the active directory if none is specified.
|
|
3489
|
-
:param directory: (Optional - Val = None; string). A path to a directory to save outputs.
|
|
3490
|
-
:param down_factor: (Optional - Val = 1; int). A factor to downsample nodes by while drawing overlays. Note this option REQUIRES node_centroids to already be set.
|
|
3491
|
-
:param color code: (Optional - Val = True; boolean). If set to False, the color-coded overlay will not be drawn.
|
|
3492
|
-
:returns: A dictionary where nodes are grouped by community.
|
|
3493
|
-
"""
|
|
3494
|
-
if down_factor > 1:
|
|
3495
|
-
centroids = self._node_centroids.copy()
|
|
3496
|
-
for item in self._node_centroids:
|
|
3497
|
-
centroids[item] = np.round((self._node_centroids[item]) / down_factor)
|
|
3498
|
-
nodes = downsample(self._nodes, down_factor)
|
|
3499
|
-
partition = network_analysis.community_partition_simple(nodes, self._network, directory = directory, centroids = centroids, color_code = color_code)
|
|
3500
3895
|
|
|
3896
|
+
def isolate_hubs(self, proportion = 0.1, retimg = True):
|
|
3897
|
+
|
|
3898
|
+
hubs = community_extractor.find_hub_nodes(self._network, proportion)
|
|
3899
|
+
|
|
3900
|
+
if retimg:
|
|
3901
|
+
|
|
3902
|
+
hub_img = np.isin(self._nodes, hubs) * self._nodes
|
|
3501
3903
|
else:
|
|
3502
|
-
|
|
3904
|
+
hub_iimg = None
|
|
3905
|
+
|
|
3906
|
+
return hubs, hub_img
|
|
3907
|
+
|
|
3908
|
+
|
|
3909
|
+
def extract_communities(self, color_code = True, down_factor = None):
|
|
3910
|
+
|
|
3911
|
+
if down_factor is not None:
|
|
3912
|
+
original_shape = self._nodes.shape
|
|
3913
|
+
temp = downsample(self._nodes, down_factor)
|
|
3914
|
+
if color_code:
|
|
3915
|
+
image = community_extractor.assign_community_colors(self.communities, temp)
|
|
3916
|
+
else:
|
|
3917
|
+
image = community_extractor.assign_community_grays(self.communities, temp)
|
|
3918
|
+
image = upsample_with_padding(image, down_factor, original_shape)
|
|
3919
|
+
else:
|
|
3920
|
+
|
|
3921
|
+
if color_code:
|
|
3922
|
+
image = community_extractor.assign_community_colors(self.communities, self._nodes)
|
|
3923
|
+
else:
|
|
3924
|
+
image = community_extractor.assign_community_grays(self.communities, self._nodes)
|
|
3925
|
+
|
|
3926
|
+
|
|
3927
|
+
return image
|
|
3928
|
+
|
|
3929
|
+
|
|
3930
|
+
|
|
3503
3931
|
|
|
3504
|
-
return partition
|
|
3505
3932
|
|
|
3506
3933
|
def extract_communities_louvain(self, directory = None, down_factor = 1, color_code = True):
|
|
3507
3934
|
"""
|
|
@@ -3561,9 +3988,9 @@ class Network_3D:
|
|
|
3561
3988
|
:returns: an equivalent random networkx graph object
|
|
3562
3989
|
"""
|
|
3563
3990
|
|
|
3564
|
-
G = network_analysis.generate_random(self._network, self._network_lists, weighted = weighted)
|
|
3991
|
+
G, df = network_analysis.generate_random(self._network, self._network_lists, weighted = weighted)
|
|
3565
3992
|
|
|
3566
|
-
return G
|
|
3993
|
+
return G, df
|
|
3567
3994
|
|
|
3568
3995
|
def degree_distribution(self, directory = None):
|
|
3569
3996
|
"""
|
|
@@ -3649,8 +4076,65 @@ class Network_3D:
|
|
|
3649
4076
|
return stats
|
|
3650
4077
|
|
|
3651
4078
|
|
|
4079
|
+
def neighborhood_identities(self, root, directory = None, mode = 0, search = 0):
|
|
4080
|
+
|
|
3652
4081
|
|
|
3653
|
-
|
|
4082
|
+
|
|
4083
|
+
targets = []
|
|
4084
|
+
total_dict = {}
|
|
4085
|
+
neighborhood_dict = {}
|
|
4086
|
+
proportion_dict = {}
|
|
4087
|
+
G = self._network
|
|
4088
|
+
node_identities = self._node_identities
|
|
4089
|
+
for val in set(node_identities.values()):
|
|
4090
|
+
total_dict[val] = 0
|
|
4091
|
+
neighborhood_dict[val] = 0
|
|
4092
|
+
|
|
4093
|
+
for node in node_identities:
|
|
4094
|
+
nodeid = node_identities[node]
|
|
4095
|
+
total_dict[nodeid] += 1
|
|
4096
|
+
if nodeid == root:
|
|
4097
|
+
targets.append(node)
|
|
4098
|
+
|
|
4099
|
+
|
|
4100
|
+
if mode == 0: #search neighbor ids within the network
|
|
4101
|
+
|
|
4102
|
+
|
|
4103
|
+
for node in G.nodes():
|
|
4104
|
+
nodeid = node_identities[node]
|
|
4105
|
+
neighbors = list(G.neighbors(node))
|
|
4106
|
+
for subnode in neighbors:
|
|
4107
|
+
subnodeid = node_identities[subnode]
|
|
4108
|
+
if subnodeid == root:
|
|
4109
|
+
neighborhood_dict[nodeid] += 1
|
|
4110
|
+
break
|
|
4111
|
+
|
|
4112
|
+
title1 = f'Neighborhood Distribution of Nodes in Network from Nodes: {root}'
|
|
4113
|
+
title2 = f'Neighborhood Distribution of Nodes in Network from Nodes {root} as a proportion of total nodes of that ID'
|
|
4114
|
+
|
|
4115
|
+
|
|
4116
|
+
elif mode == 1: #Search neighborhoods morphologically, obtain densities
|
|
4117
|
+
neighborhood_dict, total_dict = morphology.search_neighbor_ids(self._nodes, targets, node_identities, neighborhood_dict, total_dict, search, self._xy_scale, self._z_scale)
|
|
4118
|
+
title1 = f'Volumetric Neighborhood Distribution of Nodes in image from Nodes: {root}'
|
|
4119
|
+
title2 = f'Density Distribution of Nodes in image from Nodes {root} as a proportion of total node volume of that ID'
|
|
4120
|
+
|
|
4121
|
+
|
|
4122
|
+
for identity in neighborhood_dict:
|
|
4123
|
+
proportion_dict[identity] = neighborhood_dict[identity]/total_dict[identity]
|
|
4124
|
+
|
|
4125
|
+
network_analysis.create_bar_graph(neighborhood_dict, title1, "Node Identity", "Amount", directory=directory)
|
|
4126
|
+
|
|
4127
|
+
network_analysis.create_bar_graph(proportion_dict, title2, "Node Identity", "Proportion", directory=directory)
|
|
4128
|
+
|
|
4129
|
+
|
|
4130
|
+
|
|
4131
|
+
|
|
4132
|
+
|
|
4133
|
+
return neighborhood_dict, proportion_dict, title1, title2
|
|
4134
|
+
|
|
4135
|
+
|
|
4136
|
+
|
|
4137
|
+
#Morphological stats or network linking:
|
|
3654
4138
|
|
|
3655
4139
|
def volumes(self, sort = 'nodes'):
|
|
3656
4140
|
|
|
@@ -3664,6 +4148,16 @@ class Network_3D:
|
|
|
3664
4148
|
|
|
3665
4149
|
return morphology.calculate_voxel_volumes(self._edges, self._xy_scale, self._z_scale)
|
|
3666
4150
|
|
|
4151
|
+
elif sort == 'network_overlay':
|
|
4152
|
+
|
|
4153
|
+
return morphology.calculate_voxel_volumes(self._network_overlay, self._xy_scale, self._z_scale)
|
|
4154
|
+
|
|
4155
|
+
elif sort == 'id_overlay':
|
|
4156
|
+
|
|
4157
|
+
return morphology.calculate_voxel_volumes(self._id_overlay, self._xy_scale, self._z_scale)
|
|
4158
|
+
|
|
4159
|
+
|
|
4160
|
+
|
|
3667
4161
|
|
|
3668
4162
|
def interactions(self, search = 0, cores = 0, resize = None, save = False, skele = False):
|
|
3669
4163
|
|
|
@@ -3671,13 +4165,13 @@ class Network_3D:
|
|
|
3671
4165
|
|
|
3672
4166
|
|
|
3673
4167
|
|
|
3674
|
-
def morph_proximity(self, search = 0):
|
|
4168
|
+
def morph_proximity(self, search = 0, targets = None):
|
|
3675
4169
|
|
|
3676
4170
|
search_x, search_z = dilation_length_to_pixels(self._xy_scale, self._z_scale, search, search)
|
|
3677
4171
|
|
|
3678
4172
|
num_nodes = np.max(self._nodes)
|
|
3679
4173
|
|
|
3680
|
-
my_dict = proximity.create_node_dictionary(self._nodes, num_nodes, search_x, search_z)
|
|
4174
|
+
my_dict = proximity.create_node_dictionary(self._nodes, num_nodes, search_x, search_z, targets = targets)
|
|
3681
4175
|
|
|
3682
4176
|
my_dict = proximity.find_shared_value_pairs(my_dict)
|
|
3683
4177
|
|