nettracer3d 0.2.6__py3-none-any.whl → 0.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nettracer3d/community_extractor.py +237 -0
- nettracer3d/hub_getter.py +1 -1
- nettracer3d/modularity.py +263 -19
- nettracer3d/morphology.py +149 -7
- nettracer3d/nettracer.py +573 -52
- nettracer3d/nettracer_gui.py +1850 -210
- nettracer3d/network_analysis.py +155 -30
- nettracer3d/node_draw.py +3 -1
- nettracer3d/proximity.py +66 -6
- nettracer3d/simple_network.py +82 -72
- {nettracer3d-0.2.6.dist-info → nettracer3d-0.2.8.dist-info}/METADATA +1 -1
- nettracer3d-0.2.8.dist-info/RECORD +18 -0
- nettracer3d-0.2.6.dist-info/RECORD +0 -18
- {nettracer3d-0.2.6.dist-info → nettracer3d-0.2.8.dist-info}/LICENSE +0 -0
- {nettracer3d-0.2.6.dist-info → nettracer3d-0.2.8.dist-info}/WHEEL +0 -0
- {nettracer3d-0.2.6.dist-info → nettracer3d-0.2.8.dist-info}/top_level.txt +0 -0
nettracer3d/nettracer.py
CHANGED
|
@@ -362,51 +362,108 @@ def establish_inner_edges(nodes, edge):
|
|
|
362
362
|
return inner_edges
|
|
363
363
|
|
|
364
364
|
|
|
365
|
-
def upsample_with_padding(data, factor, original_shape):
|
|
366
|
-
"""
|
|
367
|
-
|
|
365
|
+
def upsample_with_padding(data, factor=None, original_shape=None):
|
|
366
|
+
"""
|
|
367
|
+
Upsample a 3D or 4D array with optional different scaling factors per dimension.
|
|
368
|
+
|
|
369
|
+
Parameters:
|
|
370
|
+
-----------
|
|
371
|
+
data : ndarray
|
|
372
|
+
Input 3D array or 4D array (where 4th dimension is RGB) to be upsampled
|
|
373
|
+
factor : float or tuple, optional
|
|
374
|
+
Upsampling factor. If float, same factor is applied to all dimensions.
|
|
375
|
+
If tuple, should contain three values for z, y, x dimensions respectively.
|
|
376
|
+
If None, factor is calculated from original_shape.
|
|
377
|
+
original_shape : tuple, optional
|
|
378
|
+
Target shape for the output array. Used to calculate factors if factor is None.
|
|
379
|
+
|
|
380
|
+
Returns:
|
|
381
|
+
--------
|
|
382
|
+
ndarray
|
|
383
|
+
Upsampled and padded array matching the original shape
|
|
384
|
+
"""
|
|
385
|
+
if original_shape is None:
|
|
386
|
+
raise ValueError("original_shape must be provided")
|
|
387
|
+
|
|
388
|
+
# Handle 4D color arrays
|
|
389
|
+
is_color = len(data.shape) == 4 and data.shape[-1] == 3
|
|
390
|
+
if is_color:
|
|
391
|
+
# Split into separate color channels
|
|
392
|
+
channels = [data[..., i] for i in range(3)]
|
|
393
|
+
upsampled_channels = []
|
|
394
|
+
|
|
395
|
+
for channel in channels:
|
|
396
|
+
# Upsample each channel separately
|
|
397
|
+
upsampled_channel = _upsample_3d_array(channel, factor, original_shape)
|
|
398
|
+
upsampled_channels.append(upsampled_channel)
|
|
399
|
+
|
|
400
|
+
# Stack the channels back together
|
|
401
|
+
return np.stack(upsampled_channels, axis=-1)
|
|
402
|
+
else:
|
|
403
|
+
# Handle regular 3D array
|
|
404
|
+
return _upsample_3d_array(data, factor, original_shape)
|
|
368
405
|
|
|
369
|
-
|
|
406
|
+
def _upsample_3d_array(data, factor, original_shape):
|
|
407
|
+
"""Helper function to handle the upsampling of a single 3D array"""
|
|
370
408
|
original_shape = np.array(original_shape)
|
|
409
|
+
current_shape = np.array(data.shape)
|
|
410
|
+
|
|
411
|
+
# Calculate factors if not provided
|
|
412
|
+
if factor is None:
|
|
413
|
+
# Compute the ratio between original and current shape for each dimension
|
|
414
|
+
factors = [os / cs for os, cs in zip(original_shape, current_shape)]
|
|
415
|
+
# If all factors are the same, use a single number for efficiency
|
|
416
|
+
if len(set(factors)) == 1:
|
|
417
|
+
factor = factors[0]
|
|
418
|
+
else:
|
|
419
|
+
factor = tuple(factors)
|
|
420
|
+
elif isinstance(factor, (int, float)):
|
|
421
|
+
factor = factor # Keep it as a single number
|
|
422
|
+
|
|
423
|
+
# Upsample the input array
|
|
371
424
|
binary_array = zoom(data, factor, order=0)
|
|
372
425
|
upsampled_shape = np.array(binary_array.shape)
|
|
373
|
-
|
|
426
|
+
|
|
374
427
|
# Calculate the positive differences in dimensions
|
|
375
428
|
difference_dims = original_shape - upsampled_shape
|
|
376
|
-
|
|
429
|
+
|
|
377
430
|
# Calculate the padding amounts for each dimension
|
|
378
431
|
padding_dims = np.maximum(difference_dims, 0)
|
|
379
432
|
padding_before = padding_dims // 2
|
|
380
433
|
padding_after = padding_dims - padding_before
|
|
381
|
-
|
|
434
|
+
|
|
382
435
|
# Pad the binary array along each dimension
|
|
383
|
-
padded_array = np.pad(binary_array,
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
436
|
+
padded_array = np.pad(binary_array,
|
|
437
|
+
[(padding_before[0], padding_after[0]),
|
|
438
|
+
(padding_before[1], padding_after[1]),
|
|
439
|
+
(padding_before[2], padding_after[2])],
|
|
440
|
+
mode='constant',
|
|
441
|
+
constant_values=0)
|
|
442
|
+
|
|
387
443
|
# Calculate the subtraction amounts for each dimension
|
|
388
444
|
sub_dims = np.maximum(-difference_dims, 0)
|
|
389
445
|
sub_before = sub_dims // 2
|
|
390
446
|
sub_after = sub_dims - sub_before
|
|
391
|
-
|
|
447
|
+
|
|
448
|
+
# Remove excess dimensions sequentially
|
|
392
449
|
# Remove planes from the beginning and end
|
|
393
450
|
if sub_dims[0] == 0:
|
|
394
451
|
trimmed_planes = padded_array
|
|
395
452
|
else:
|
|
396
453
|
trimmed_planes = padded_array[sub_before[0]:-sub_after[0], :, :]
|
|
397
|
-
|
|
454
|
+
|
|
398
455
|
# Remove rows from the beginning and end
|
|
399
456
|
if sub_dims[1] == 0:
|
|
400
457
|
trimmed_rows = trimmed_planes
|
|
401
458
|
else:
|
|
402
459
|
trimmed_rows = trimmed_planes[:, sub_before[1]:-sub_after[1], :]
|
|
403
|
-
|
|
460
|
+
|
|
404
461
|
# Remove columns from the beginning and end
|
|
405
462
|
if sub_dims[2] == 0:
|
|
406
463
|
trimmed_array = trimmed_rows
|
|
407
464
|
else:
|
|
408
465
|
trimmed_array = trimmed_rows[:, :, sub_before[2]:-sub_after[2]]
|
|
409
|
-
|
|
466
|
+
|
|
410
467
|
return trimmed_array
|
|
411
468
|
|
|
412
469
|
def remove_branches(skeleton, length):
|
|
@@ -556,6 +613,104 @@ def threshold(arr, proportion, custom_rad = None):
|
|
|
556
613
|
|
|
557
614
|
return arr
|
|
558
615
|
|
|
616
|
+
def z_project(array3d, method='max'):
|
|
617
|
+
"""
|
|
618
|
+
Project a 3D numpy array along the Z axis to create a 2D array.
|
|
619
|
+
|
|
620
|
+
Parameters:
|
|
621
|
+
array3d (numpy.ndarray): 3D input array with shape (Z, Y, X)
|
|
622
|
+
method (str): Projection method - 'max', 'mean', 'min', 'sum', or 'std'
|
|
623
|
+
|
|
624
|
+
Returns:
|
|
625
|
+
numpy.ndarray: 2D projected array with shape (Y, X)
|
|
626
|
+
"""
|
|
627
|
+
if not isinstance(array3d, np.ndarray) or array3d.ndim != 3:
|
|
628
|
+
raise ValueError("Input must be a 3D numpy array")
|
|
629
|
+
|
|
630
|
+
if method == 'max':
|
|
631
|
+
return np.max(array3d, axis=0)
|
|
632
|
+
elif method == 'mean':
|
|
633
|
+
return np.mean(array3d, axis=0)
|
|
634
|
+
elif method == 'min':
|
|
635
|
+
return np.min(array3d, axis=0)
|
|
636
|
+
elif method == 'sum':
|
|
637
|
+
return np.sum(array3d, axis=0)
|
|
638
|
+
elif method == 'std':
|
|
639
|
+
return np.std(array3d, axis=0)
|
|
640
|
+
else:
|
|
641
|
+
raise ValueError("Method must be one of: 'max', 'mean', 'min', 'sum', 'std'")
|
|
642
|
+
|
|
643
|
+
def fill_holes_3d(array):
|
|
644
|
+
|
|
645
|
+
def process_slice(slice_2d, border_threshold=0.08):
|
|
646
|
+
"""
|
|
647
|
+
Process a 2D slice, considering components that touch less than border_threshold
|
|
648
|
+
of any border length as potential holes.
|
|
649
|
+
|
|
650
|
+
Args:
|
|
651
|
+
slice_2d: 2D binary array
|
|
652
|
+
border_threshold: proportion of border that must be touched to be considered background
|
|
653
|
+
"""
|
|
654
|
+
slice_2d = slice_2d.astype(np.uint8)
|
|
655
|
+
labels, num_features = ndimage.label(slice_2d)
|
|
656
|
+
|
|
657
|
+
if num_features == 0:
|
|
658
|
+
return np.zeros_like(slice_2d)
|
|
659
|
+
|
|
660
|
+
# Get dimensions for threshold calculations
|
|
661
|
+
height, width = slice_2d.shape
|
|
662
|
+
|
|
663
|
+
# Dictionary to store border intersection lengths for each label
|
|
664
|
+
border_proportions = {}
|
|
665
|
+
|
|
666
|
+
for label in range(1, num_features + 1):
|
|
667
|
+
mask = labels == label
|
|
668
|
+
|
|
669
|
+
# Calculate proportion of each border this component touches
|
|
670
|
+
top_prop = np.sum(mask[0, :]) / width
|
|
671
|
+
bottom_prop = np.sum(mask[-1, :]) / width
|
|
672
|
+
left_prop = np.sum(mask[:, 0]) / height
|
|
673
|
+
right_prop = np.sum(mask[:, -1]) / height
|
|
674
|
+
|
|
675
|
+
# If it touches any border significantly, consider it background
|
|
676
|
+
border_proportions[label] = max(top_prop, bottom_prop, left_prop, right_prop)
|
|
677
|
+
|
|
678
|
+
# Create mask of components that either don't touch borders
|
|
679
|
+
# or touch less than the threshold proportion
|
|
680
|
+
background_labels = {label for label, prop in border_proportions.items()
|
|
681
|
+
if prop > border_threshold}
|
|
682
|
+
|
|
683
|
+
holes_mask = ~np.isin(labels, list(background_labels))
|
|
684
|
+
|
|
685
|
+
return holes_mask
|
|
686
|
+
|
|
687
|
+
array = binarize(array)
|
|
688
|
+
inv_array = invert_array(array)
|
|
689
|
+
|
|
690
|
+
# Create arrays for all three planes
|
|
691
|
+
array_xy = np.zeros_like(inv_array, dtype=np.uint8)
|
|
692
|
+
array_xz = np.zeros_like(inv_array, dtype=np.uint8)
|
|
693
|
+
array_yz = np.zeros_like(inv_array, dtype=np.uint8)
|
|
694
|
+
|
|
695
|
+
# Process XY plane
|
|
696
|
+
for z in range(inv_array.shape[0]):
|
|
697
|
+
array_xy[z] = process_slice(inv_array[z])
|
|
698
|
+
|
|
699
|
+
# Process XZ plane
|
|
700
|
+
for y in range(inv_array.shape[1]):
|
|
701
|
+
slice_xz = inv_array[:, y, :]
|
|
702
|
+
array_xz[:, y, :] = process_slice(slice_xz)
|
|
703
|
+
|
|
704
|
+
# Process YZ plane
|
|
705
|
+
for x in range(inv_array.shape[2]):
|
|
706
|
+
slice_yz = inv_array[:, :, x]
|
|
707
|
+
array_yz[:, :, x] = process_slice(slice_yz)
|
|
708
|
+
|
|
709
|
+
# Combine results from all three planes
|
|
710
|
+
filled = (array_xy | array_xz | array_yz) * 255
|
|
711
|
+
return array + filled
|
|
712
|
+
|
|
713
|
+
|
|
559
714
|
|
|
560
715
|
|
|
561
716
|
|
|
@@ -813,6 +968,205 @@ def dilate_3D(tiff_array, dilated_x, dilated_y, dilated_z):
|
|
|
813
968
|
|
|
814
969
|
return final_result
|
|
815
970
|
|
|
971
|
+
def dilate_3D_recursive(tiff_array, dilated_x, dilated_y, dilated_z, step_size=None):
|
|
972
|
+
"""Recursive 3D dilation method that handles odd-numbered dilations properly.
|
|
973
|
+
|
|
974
|
+
Args:
|
|
975
|
+
tiff_array: Input 3D array
|
|
976
|
+
dilated_x, dilated_y, dilated_z: Odd numbers representing total dilation size
|
|
977
|
+
step_size: Size of dilation step for this iteration
|
|
978
|
+
|
|
979
|
+
Each dilation parameter represents (n-1)/2 steps outward from the object.
|
|
980
|
+
"""
|
|
981
|
+
# Calculate the smallest dimension of the array
|
|
982
|
+
min_dim = min(tiff_array.shape)
|
|
983
|
+
|
|
984
|
+
# For small dilations relative to array size, don't use recursion
|
|
985
|
+
max_dilation = max(dilated_x, dilated_y, dilated_z)
|
|
986
|
+
if max_dilation < (0.2 * min_dim):
|
|
987
|
+
return dilate_3D_recursive(tiff_array, dilated_x, dilated_y, dilated_z, step_size=1)
|
|
988
|
+
|
|
989
|
+
# Initialize step_size for first call
|
|
990
|
+
if step_size is None:
|
|
991
|
+
# Start with a reasonable step size based on the largest dilation
|
|
992
|
+
step_size = min(5, max((max_dilation - 1) // 2 // 3, 1))
|
|
993
|
+
|
|
994
|
+
# Base case: if step_size is 1 or we've achieved full dilation
|
|
995
|
+
if step_size == 1 or (dilated_x <= 1 and dilated_y <= 1 and dilated_z <= 1):
|
|
996
|
+
def create_circular_kernel(diameter):
|
|
997
|
+
radius = diameter/2
|
|
998
|
+
size = radius
|
|
999
|
+
size = int(np.ceil(size))
|
|
1000
|
+
y, x = np.ogrid[-radius:radius+1, -radius:radius+1]
|
|
1001
|
+
distance = np.sqrt(x**2 + y**2)
|
|
1002
|
+
kernel = distance <= radius
|
|
1003
|
+
return kernel.astype(np.uint8)
|
|
1004
|
+
|
|
1005
|
+
def create_ellipsoidal_kernel(long_axis, short_axis):
|
|
1006
|
+
semi_major, semi_minor = long_axis / 2, short_axis / 2
|
|
1007
|
+
size_y = int(np.ceil(semi_minor))
|
|
1008
|
+
size_x = int(np.ceil(semi_major))
|
|
1009
|
+
y, x = np.ogrid[-semi_minor:semi_minor+1, -semi_major:semi_major+1]
|
|
1010
|
+
ellipse = (x**2 / semi_major**2) + (y**2 / semi_minor**2) <= 1
|
|
1011
|
+
return ellipse.astype(np.uint8)
|
|
1012
|
+
|
|
1013
|
+
def process_slice(z):
|
|
1014
|
+
tiff_slice = tiff_array[z].astype(np.uint8)
|
|
1015
|
+
dilated_slice = cv2.dilate(tiff_slice, kernel, iterations=1)
|
|
1016
|
+
return z, dilated_slice
|
|
1017
|
+
|
|
1018
|
+
def process_slice_other(y):
|
|
1019
|
+
tiff_slice = tiff_array[:, y, :].astype(np.uint8)
|
|
1020
|
+
dilated_slice = cv2.dilate(tiff_slice, kernel, iterations=1)
|
|
1021
|
+
return y, dilated_slice
|
|
1022
|
+
|
|
1023
|
+
# Create empty arrays for the dilated results
|
|
1024
|
+
dilated_xy = np.zeros_like(tiff_array, dtype=np.uint8)
|
|
1025
|
+
dilated_xz = np.zeros_like(tiff_array, dtype=np.uint8)
|
|
1026
|
+
|
|
1027
|
+
# Create kernels for final dilation
|
|
1028
|
+
kernel = create_circular_kernel(dilated_x)
|
|
1029
|
+
|
|
1030
|
+
# Process XY plane
|
|
1031
|
+
num_cores = mp.cpu_count()
|
|
1032
|
+
with ThreadPoolExecutor(max_workers=num_cores) as executor:
|
|
1033
|
+
futures = {executor.submit(process_slice, z): z for z in range(tiff_array.shape[0])}
|
|
1034
|
+
for future in as_completed(futures):
|
|
1035
|
+
z, dilated_slice = future.result()
|
|
1036
|
+
dilated_xy[z] = dilated_slice
|
|
1037
|
+
|
|
1038
|
+
# Process XZ plane
|
|
1039
|
+
kernel = create_ellipsoidal_kernel(dilated_x, dilated_z)
|
|
1040
|
+
with ThreadPoolExecutor(max_workers=num_cores) as executor:
|
|
1041
|
+
futures = {executor.submit(process_slice_other, y): y for y in range(tiff_array.shape[1])}
|
|
1042
|
+
for future in as_completed(futures):
|
|
1043
|
+
y, dilated_slice = future.result()
|
|
1044
|
+
dilated_xz[:, y, :] = dilated_slice
|
|
1045
|
+
|
|
1046
|
+
return dilated_xy | dilated_xz
|
|
1047
|
+
|
|
1048
|
+
# Calculate current iteration's dilation sizes (must be odd numbers)
|
|
1049
|
+
current_x_steps = min((dilated_x - 1) // 2, step_size)
|
|
1050
|
+
current_y_steps = min((dilated_y - 1) // 2, step_size)
|
|
1051
|
+
current_z_steps = min((dilated_z - 1) // 2, step_size)
|
|
1052
|
+
|
|
1053
|
+
current_x_dilation = current_x_steps * 2 + 1
|
|
1054
|
+
current_y_dilation = current_y_steps * 2 + 1
|
|
1055
|
+
current_z_dilation = current_z_steps * 2 + 1
|
|
1056
|
+
|
|
1057
|
+
# Perform current iteration's dilation
|
|
1058
|
+
current_result = dilate_3D_recursive(tiff_array, current_x_dilation, current_y_dilation, current_z_dilation, step_size=1)
|
|
1059
|
+
|
|
1060
|
+
# Calculate remaining dilation needed
|
|
1061
|
+
# For X and Y, use the circle radius (current_x_steps)
|
|
1062
|
+
# For Z, use the ellipse short axis (current_z_steps)
|
|
1063
|
+
remaining_x = max(1, dilated_x - (current_x_steps * 2))
|
|
1064
|
+
remaining_y = max(1, dilated_y - (current_y_steps * 2))
|
|
1065
|
+
remaining_z = max(1, dilated_z - (current_z_steps * 2))
|
|
1066
|
+
|
|
1067
|
+
# If no more dilation needed, return current result
|
|
1068
|
+
if remaining_x == 1 and remaining_y == 1 and remaining_z == 1:
|
|
1069
|
+
return current_result
|
|
1070
|
+
|
|
1071
|
+
# Recursive call with remaining dilation and decreased step size
|
|
1072
|
+
return dilate_3D_recursive(current_result, remaining_x, remaining_y, remaining_z, step_size=max(1, step_size - 1))
|
|
1073
|
+
|
|
1074
|
+
def erode_3D(tiff_array, eroded_x, eroded_y, eroded_z):
|
|
1075
|
+
"""Internal method to erode an array in 3D. Erosion this way is much faster than using a distance transform although the latter is theoretically more accurate.
|
|
1076
|
+
Arguments are an array, and the desired pixel erosion amounts in X, Y, Z."""
|
|
1077
|
+
def create_circular_kernel(diameter):
|
|
1078
|
+
"""Create a 2D circular kernel with a given radius.
|
|
1079
|
+
Parameters:
|
|
1080
|
+
radius (int or float): The radius of the circle.
|
|
1081
|
+
Returns:
|
|
1082
|
+
numpy.ndarray: A 2D numpy array representing the circular kernel.
|
|
1083
|
+
"""
|
|
1084
|
+
# Determine the size of the kernel
|
|
1085
|
+
radius = diameter/2
|
|
1086
|
+
size = radius # Diameter of the circle
|
|
1087
|
+
size = int(np.ceil(size)) # Ensure size is an integer
|
|
1088
|
+
|
|
1089
|
+
# Create a grid of (x, y) coordinates
|
|
1090
|
+
y, x = np.ogrid[-radius:radius+1, -radius:radius+1]
|
|
1091
|
+
|
|
1092
|
+
# Calculate the distance from the center (0,0)
|
|
1093
|
+
distance = np.sqrt(x**2 + y**2)
|
|
1094
|
+
|
|
1095
|
+
# Create the circular kernel: points within the radius are 1, others are 0
|
|
1096
|
+
kernel = distance <= radius
|
|
1097
|
+
|
|
1098
|
+
# Convert the boolean array to integer (0 and 1)
|
|
1099
|
+
return kernel.astype(np.uint8)
|
|
1100
|
+
|
|
1101
|
+
def create_ellipsoidal_kernel(long_axis, short_axis):
|
|
1102
|
+
"""Create a 2D ellipsoidal kernel with specified axis lengths and orientation.
|
|
1103
|
+
Parameters:
|
|
1104
|
+
long_axis (int or float): The length of the long axis.
|
|
1105
|
+
short_axis (int or float): The length of the short axis.
|
|
1106
|
+
Returns:
|
|
1107
|
+
numpy.ndarray: A 2D numpy array representing the ellipsoidal kernel.
|
|
1108
|
+
"""
|
|
1109
|
+
semi_major, semi_minor = long_axis / 2, short_axis / 2
|
|
1110
|
+
# Determine the size of the kernel
|
|
1111
|
+
size_y = int(np.ceil(semi_minor))
|
|
1112
|
+
size_x = int(np.ceil(semi_major))
|
|
1113
|
+
|
|
1114
|
+
# Create a grid of (x, y) coordinates centered at (0,0)
|
|
1115
|
+
y, x = np.ogrid[-semi_minor:semi_minor+1, -semi_major:semi_major+1]
|
|
1116
|
+
|
|
1117
|
+
# Ellipsoid equation: (x/a)^2 + (y/b)^2 <= 1
|
|
1118
|
+
ellipse = (x**2 / semi_major**2) + (y**2 / semi_minor**2) <= 1
|
|
1119
|
+
|
|
1120
|
+
return ellipse.astype(np.uint8)
|
|
1121
|
+
|
|
1122
|
+
z_depth = tiff_array.shape[0]
|
|
1123
|
+
|
|
1124
|
+
# Function to process each slice
|
|
1125
|
+
def process_slice(z):
|
|
1126
|
+
tiff_slice = tiff_array[z].astype(np.uint8)
|
|
1127
|
+
eroded_slice = cv2.erode(tiff_slice, kernel, iterations=1)
|
|
1128
|
+
return z, eroded_slice
|
|
1129
|
+
|
|
1130
|
+
def process_slice_other(y):
|
|
1131
|
+
tiff_slice = tiff_array[:, y, :].astype(np.uint8)
|
|
1132
|
+
eroded_slice = cv2.erode(tiff_slice, kernel, iterations=1)
|
|
1133
|
+
return y, eroded_slice
|
|
1134
|
+
|
|
1135
|
+
# Create empty arrays to store the eroded results for the XY and XZ planes
|
|
1136
|
+
eroded_xy = np.zeros_like(tiff_array, dtype=np.uint8)
|
|
1137
|
+
eroded_xz = np.zeros_like(tiff_array, dtype=np.uint8)
|
|
1138
|
+
|
|
1139
|
+
kernel_x = int(eroded_x)
|
|
1140
|
+
kernel = create_circular_kernel(kernel_x)
|
|
1141
|
+
|
|
1142
|
+
num_cores = mp.cpu_count()
|
|
1143
|
+
with ThreadPoolExecutor(max_workers=num_cores) as executor:
|
|
1144
|
+
futures = {executor.submit(process_slice, z): z for z in range(tiff_array.shape[0])}
|
|
1145
|
+
for future in as_completed(futures):
|
|
1146
|
+
z, eroded_slice = future.result()
|
|
1147
|
+
eroded_xy[z] = eroded_slice
|
|
1148
|
+
|
|
1149
|
+
kernel_x = int(eroded_x)
|
|
1150
|
+
kernel_z = int(eroded_z)
|
|
1151
|
+
kernel = create_ellipsoidal_kernel(kernel_x, kernel_z)
|
|
1152
|
+
|
|
1153
|
+
if z_depth != 2:
|
|
1154
|
+
|
|
1155
|
+
with ThreadPoolExecutor(max_workers=num_cores) as executor:
|
|
1156
|
+
futures = {executor.submit(process_slice_other, y): y for y in range(tiff_array.shape[1])}
|
|
1157
|
+
|
|
1158
|
+
for future in as_completed(futures):
|
|
1159
|
+
y, eroded_slice = future.result()
|
|
1160
|
+
eroded_xz[:, y, :] = eroded_slice
|
|
1161
|
+
|
|
1162
|
+
# Overlay the results using AND operation instead of OR for erosion
|
|
1163
|
+
if z_depth != 2:
|
|
1164
|
+
final_result = eroded_xy & eroded_xz
|
|
1165
|
+
else:
|
|
1166
|
+
return eroded_xy
|
|
1167
|
+
|
|
1168
|
+
return final_result
|
|
1169
|
+
|
|
816
1170
|
|
|
817
1171
|
def dilate_3D_old(tiff_array, dilated_x, dilated_y, dilated_z):
|
|
818
1172
|
"""(For cubey dilation only). Internal method to dilate an array in 3D.
|
|
@@ -947,6 +1301,10 @@ def combine_nodes(root_nodes, other_nodes, other_ID, identity_dict, root_ID = No
|
|
|
947
1301
|
identity_dict[item] = root_ID
|
|
948
1302
|
|
|
949
1303
|
for item in otherIDs: #Always adds the other vals to the dictionary
|
|
1304
|
+
try:
|
|
1305
|
+
other_ID = os.path.basename(other_ID)
|
|
1306
|
+
except:
|
|
1307
|
+
pass
|
|
950
1308
|
identity_dict[item] = other_ID
|
|
951
1309
|
|
|
952
1310
|
nodes = root_nodes + other_nodes #Combine the outer edges with the inner edges modified via the above steps
|
|
@@ -1025,7 +1383,7 @@ def binarize(arrayimage, directory = None):
|
|
|
1025
1383
|
|
|
1026
1384
|
return arrayimage
|
|
1027
1385
|
|
|
1028
|
-
def dilate(arrayimage, amount, xy_scale = 1, z_scale = 1, directory = None, fast_dil = False):
|
|
1386
|
+
def dilate(arrayimage, amount, xy_scale = 1, z_scale = 1, directory = None, fast_dil = False, recursive = False):
|
|
1029
1387
|
"""
|
|
1030
1388
|
Can be used to dilate a binary image in 3D. Dilated output will be saved to the active directory if none is specified. Note that dilation is done with single-instance kernels and not iterations, and therefore
|
|
1031
1389
|
objects will lose their shape somewhat and become cube-ish if the 'amount' param is ever significantly larger than the objects in quesiton.
|
|
@@ -1050,13 +1408,15 @@ def dilate(arrayimage, amount, xy_scale = 1, z_scale = 1, directory = None, fast
|
|
|
1050
1408
|
if len(np.unique(arrayimage)) > 2: #binarize
|
|
1051
1409
|
arrayimage = binarize(arrayimage)
|
|
1052
1410
|
|
|
1053
|
-
if not fast_dil:
|
|
1411
|
+
if not fast_dil and not recursive:
|
|
1054
1412
|
arrayimage = (dilate_3D(arrayimage, dilate_xy, dilate_xy, dilate_z)) * 255
|
|
1055
1413
|
if np.max(arrayimage) == 1:
|
|
1056
1414
|
arrayimage = arrayimage * 255
|
|
1057
|
-
|
|
1058
|
-
else:
|
|
1415
|
+
elif not recursive:
|
|
1059
1416
|
arrayimage = (dilate_3D_old(arrayimage, dilate_xy, dilate_xy, dilate_z)) * 255
|
|
1417
|
+
else:
|
|
1418
|
+
arrayimage = (dilate_3D_recursive(arrayimage, dilate_xy, dilate_xy, dilate_z)) * 255
|
|
1419
|
+
|
|
1060
1420
|
|
|
1061
1421
|
|
|
1062
1422
|
if type(image) == str:
|
|
@@ -1070,6 +1430,23 @@ def dilate(arrayimage, amount, xy_scale = 1, z_scale = 1, directory = None, fast
|
|
|
1070
1430
|
|
|
1071
1431
|
return arrayimage
|
|
1072
1432
|
|
|
1433
|
+
def erode(arrayimage, amount, xy_scale = 1, z_scale = 1):
|
|
1434
|
+
if len(np.unique(arrayimage)) > 2: #binarize
|
|
1435
|
+
arrayimage = binarize(arrayimage)
|
|
1436
|
+
erode_xy, erode_z = dilation_length_to_pixels(xy_scale, z_scale, amount, amount)
|
|
1437
|
+
|
|
1438
|
+
if len(np.unique(arrayimage)) > 2: #binarize
|
|
1439
|
+
arrayimage = binarize(arrayimage)
|
|
1440
|
+
|
|
1441
|
+
arrayimage = (erode_3D(arrayimage, erode_xy, erode_xy, erode_z)) * 255
|
|
1442
|
+
if np.max(arrayimage) == 1:
|
|
1443
|
+
arrayimage = arrayimage * 255
|
|
1444
|
+
|
|
1445
|
+
return arrayimage
|
|
1446
|
+
|
|
1447
|
+
|
|
1448
|
+
|
|
1449
|
+
|
|
1073
1450
|
|
|
1074
1451
|
def skeletonize(arrayimage, directory = None):
|
|
1075
1452
|
"""
|
|
@@ -1560,6 +1937,9 @@ def encapsulate(parent_dir = None, name = None):
|
|
|
1560
1937
|
|
|
1561
1938
|
return new_folder_path
|
|
1562
1939
|
|
|
1940
|
+
|
|
1941
|
+
|
|
1942
|
+
|
|
1563
1943
|
#THE 3D NETWORK CLASS
|
|
1564
1944
|
|
|
1565
1945
|
class Network_3D:
|
|
@@ -1617,7 +1997,8 @@ class Network_3D:
|
|
|
1617
1997
|
if array is not None and not isinstance(array, np.ndarray):
|
|
1618
1998
|
raise ValueError("nodes must be a (preferably labelled) numpy array.")
|
|
1619
1999
|
if array is not None and len(array.shape) == 2: #For dealing with 2D images
|
|
1620
|
-
array = np.stack((array, array), axis = 0)
|
|
2000
|
+
#array = np.stack((array, array), axis = 0)
|
|
2001
|
+
array = np.expand_dims(array, axis=0)
|
|
1621
2002
|
self._nodes = array
|
|
1622
2003
|
|
|
1623
2004
|
@nodes.deleter
|
|
@@ -1735,7 +2116,8 @@ class Network_3D:
|
|
|
1735
2116
|
if array is not None and not isinstance(array, np.ndarray):
|
|
1736
2117
|
raise ValueError("edges must be a (preferably labelled) numpy array.")
|
|
1737
2118
|
if array is not None and len(array.shape) == 2: #For dealing with 2D images
|
|
1738
|
-
array = np.stack((array, array), axis = 0)
|
|
2119
|
+
#array = np.stack((array, array), axis = 0)
|
|
2120
|
+
array = np.expand_dims(array, axis=0)
|
|
1739
2121
|
self._edges = array
|
|
1740
2122
|
|
|
1741
2123
|
@edges.deleter
|
|
@@ -1756,6 +2138,9 @@ class Network_3D:
|
|
|
1756
2138
|
"""Sets the search_region property"""
|
|
1757
2139
|
if array is not None and not isinstance(array, np.ndarray):
|
|
1758
2140
|
raise ValueError("search_region must be a (preferably labelled) numpy array.")
|
|
2141
|
+
if array is not None and len(array.shape) == 2: #For dealing with 2D images
|
|
2142
|
+
#array = np.stack((array, array), axis = 0)
|
|
2143
|
+
array = np.expand_dims(array, axis=0)
|
|
1759
2144
|
self._search_region = array
|
|
1760
2145
|
|
|
1761
2146
|
@search_region.deleter
|
|
@@ -1836,6 +2221,9 @@ class Network_3D:
|
|
|
1836
2221
|
"""Sets the nodes property"""
|
|
1837
2222
|
if array is not None and not isinstance(array, np.ndarray):
|
|
1838
2223
|
raise ValueError("network overlay must be a (preferably labelled) numpy array.")
|
|
2224
|
+
if array is not None and len(array.shape) == 2: #For dealing with 2D images
|
|
2225
|
+
#array = np.stack((array, array), axis = 0)
|
|
2226
|
+
array = np.expand_dims(array, axis=0)
|
|
1839
2227
|
|
|
1840
2228
|
self._network_overlay = array
|
|
1841
2229
|
|
|
@@ -1852,6 +2240,9 @@ class Network_3D:
|
|
|
1852
2240
|
"""Sets the nodes property"""
|
|
1853
2241
|
if array is not None and not isinstance(array, np.ndarray):
|
|
1854
2242
|
raise ValueError("id overlay must be a (preferably labelled) numpy array.")
|
|
2243
|
+
if array is not None and len(array.shape) == 2: #For dealing with 2D images
|
|
2244
|
+
#array = np.stack((array, array), axis = 0)
|
|
2245
|
+
array = np.expand_dims(array, axis=0)
|
|
1855
2246
|
|
|
1856
2247
|
self._id_overlay = array
|
|
1857
2248
|
|
|
@@ -2868,12 +3259,14 @@ class Network_3D:
|
|
|
2868
3259
|
|
|
2869
3260
|
#Some methods that may be useful:
|
|
2870
3261
|
|
|
2871
|
-
def community_partition(self, weighted = False, style = 0):
|
|
3262
|
+
def community_partition(self, weighted = False, style = 0, dostats = True):
|
|
2872
3263
|
"""
|
|
2873
3264
|
Sets the communities attribute by splitting the network into communities
|
|
2874
3265
|
"""
|
|
2875
3266
|
|
|
2876
|
-
self._communities, self.normalized_weights = modularity.community_partition(self._network_lists, weighted = weighted, style = style)
|
|
3267
|
+
self._communities, self.normalized_weights, stats = modularity.community_partition(self._network_lists, weighted = weighted, style = style, dostats = dostats)
|
|
3268
|
+
|
|
3269
|
+
return stats
|
|
2877
3270
|
|
|
2878
3271
|
def remove_edge_weights(self):
|
|
2879
3272
|
"""
|
|
@@ -2966,6 +3359,19 @@ class Network_3D:
|
|
|
2966
3359
|
elif num_edge < 65536:
|
|
2967
3360
|
self._edges = self._edges.astype(np.uint16)
|
|
2968
3361
|
|
|
3362
|
+
node_bools = self._nodes == 0
|
|
3363
|
+
|
|
3364
|
+
self._nodes = self._nodes.astype(np.uint32)
|
|
3365
|
+
self._edges = self._edges * node_bools
|
|
3366
|
+
self._nodes = self._nodes + self._edges
|
|
3367
|
+
num_node = np.max(self._nodes)
|
|
3368
|
+
|
|
3369
|
+
if num_node < 256:
|
|
3370
|
+
self._nodes = self._nodes.astype(np.uint8)
|
|
3371
|
+
elif num_node < 65536:
|
|
3372
|
+
self._nodes = self._nodes.astype(np.uint16)
|
|
3373
|
+
|
|
3374
|
+
|
|
2969
3375
|
def trunk_to_node(self):
|
|
2970
3376
|
"""
|
|
2971
3377
|
Converts the edge 'trunk' into a node. In this case, the trunk is the edge that creates the most node-node connections. There may be times when many nodes are connected by a single, expansive edge that obfuscates the rest of the edges. Converting the trunk to a node can better reveal these edges.
|
|
@@ -2989,16 +3395,14 @@ class Network_3D:
|
|
|
2989
3395
|
nodeb.append(addtrunk)
|
|
2990
3396
|
nodea.append(nodesb[i])
|
|
2991
3397
|
nodeb.append(addtrunk)
|
|
2992
|
-
edgec.append(
|
|
2993
|
-
edgec.append(
|
|
3398
|
+
edgec.append(0)
|
|
3399
|
+
edgec.append(0)
|
|
2994
3400
|
else:
|
|
2995
3401
|
nodea.append(nodesa[i])
|
|
2996
3402
|
nodeb.append(nodesb[i])
|
|
2997
3403
|
edgec.append(edgesc[i])
|
|
2998
3404
|
|
|
2999
|
-
self.
|
|
3000
|
-
|
|
3001
|
-
self.network, _ = network_analysis.weighted_network(self._network_lists)
|
|
3405
|
+
self.network_lists = [nodea, nodeb, edgec]
|
|
3002
3406
|
|
|
3003
3407
|
self._node_centroids[addtrunk] = self._edge_centroids[trunk]
|
|
3004
3408
|
|
|
@@ -3013,6 +3417,19 @@ class Network_3D:
|
|
|
3013
3417
|
else:
|
|
3014
3418
|
self._node_identities[addtrunk] = "Trunk"
|
|
3015
3419
|
|
|
3420
|
+
if self._edges is not None and self._nodes is not None:
|
|
3421
|
+
|
|
3422
|
+
node_bools = self._nodes == 0
|
|
3423
|
+
|
|
3424
|
+
trunk = self._edges == trunk
|
|
3425
|
+
|
|
3426
|
+
trunk = trunk * addtrunk
|
|
3427
|
+
|
|
3428
|
+
trunk = trunk * node_bools
|
|
3429
|
+
|
|
3430
|
+
self._nodes = self._nodes + trunk
|
|
3431
|
+
|
|
3432
|
+
|
|
3016
3433
|
|
|
3017
3434
|
|
|
3018
3435
|
|
|
@@ -3480,28 +3897,55 @@ class Network_3D:
|
|
|
3480
3897
|
mothers, overlay = community_extractor.extract_mothers(self._nodes, self._network, centroid_dic = self._node_centroids, directory = directory, louvain = louvain, called = called)
|
|
3481
3898
|
return mothers, overlay
|
|
3482
3899
|
|
|
3483
|
-
def extract_communities(self, directory = None, down_factor = 1, color_code = True):
|
|
3484
|
-
"""
|
|
3485
|
-
Method to generate overlays that relate community detection in a network to the 3D structure.
|
|
3486
|
-
Overlays include a grayscale image where nodes are assigned a grayscale value corresponding to their community, a numerical index where numbers are drawn at nodes corresponding to their community, and a
|
|
3487
|
-
color coded overlay where a nodes color corresponds to its community. Community detection will be done with label propogation.
|
|
3488
|
-
These will be saved to the active directory if none is specified.
|
|
3489
|
-
:param directory: (Optional - Val = None; string). A path to a directory to save outputs.
|
|
3490
|
-
:param down_factor: (Optional - Val = 1; int). A factor to downsample nodes by while drawing overlays. Note this option REQUIRES node_centroids to already be set.
|
|
3491
|
-
:param color code: (Optional - Val = True; boolean). If set to False, the color-coded overlay will not be drawn.
|
|
3492
|
-
:returns: A dictionary where nodes are grouped by community.
|
|
3493
|
-
"""
|
|
3494
|
-
if down_factor > 1:
|
|
3495
|
-
centroids = self._node_centroids.copy()
|
|
3496
|
-
for item in self._node_centroids:
|
|
3497
|
-
centroids[item] = np.round((self._node_centroids[item]) / down_factor)
|
|
3498
|
-
nodes = downsample(self._nodes, down_factor)
|
|
3499
|
-
partition = network_analysis.community_partition_simple(nodes, self._network, directory = directory, centroids = centroids, color_code = color_code)
|
|
3500
3900
|
|
|
3901
|
+
def isolate_hubs(self, proportion = 0.1, retimg = True):
|
|
3902
|
+
|
|
3903
|
+
hubs = community_extractor.find_hub_nodes(self._network, proportion)
|
|
3904
|
+
|
|
3905
|
+
if retimg:
|
|
3906
|
+
|
|
3907
|
+
hub_img = np.isin(self._nodes, hubs) * self._nodes
|
|
3501
3908
|
else:
|
|
3502
|
-
|
|
3909
|
+
hub_iimg = None
|
|
3910
|
+
|
|
3911
|
+
return hubs, hub_img
|
|
3912
|
+
|
|
3913
|
+
|
|
3914
|
+
def extract_communities(self, color_code = True, down_factor = None, identities = False):
|
|
3915
|
+
|
|
3916
|
+
if down_factor is not None:
|
|
3917
|
+
original_shape = self._nodes.shape
|
|
3918
|
+
temp = downsample(self._nodes, down_factor)
|
|
3919
|
+
if color_code:
|
|
3920
|
+
if not identities:
|
|
3921
|
+
image, output = community_extractor.assign_community_colors(self.communities, temp)
|
|
3922
|
+
else:
|
|
3923
|
+
image, output = community_extractor.assign_community_colors(self.node_identities, temp)
|
|
3924
|
+
else:
|
|
3925
|
+
if not identities:
|
|
3926
|
+
image, output = community_extractor.assign_community_grays(self.communities, temp)
|
|
3927
|
+
else:
|
|
3928
|
+
image, output = community_extractor.assign_community_grays(self.node_identities, temp)
|
|
3929
|
+
image = upsample_with_padding(image, down_factor, original_shape)
|
|
3930
|
+
else:
|
|
3931
|
+
|
|
3932
|
+
if color_code:
|
|
3933
|
+
if not identities:
|
|
3934
|
+
image, output = community_extractor.assign_community_colors(self.communities, self._nodes)
|
|
3935
|
+
else:
|
|
3936
|
+
image, output = community_extractor.assign_community_colors(self.node_identities, self._nodes)
|
|
3937
|
+
else:
|
|
3938
|
+
if not identities:
|
|
3939
|
+
image, output = community_extractor.assign_community_grays(self.communities, self._nodes)
|
|
3940
|
+
else:
|
|
3941
|
+
image, output = community_extractor.assign_community_grays(self.node_identities, self._nodes)
|
|
3942
|
+
|
|
3943
|
+
|
|
3944
|
+
return image, output
|
|
3945
|
+
|
|
3946
|
+
|
|
3947
|
+
|
|
3503
3948
|
|
|
3504
|
-
return partition
|
|
3505
3949
|
|
|
3506
3950
|
def extract_communities_louvain(self, directory = None, down_factor = 1, color_code = True):
|
|
3507
3951
|
"""
|
|
@@ -3561,9 +4005,9 @@ class Network_3D:
|
|
|
3561
4005
|
:returns: an equivalent random networkx graph object
|
|
3562
4006
|
"""
|
|
3563
4007
|
|
|
3564
|
-
G = network_analysis.generate_random(self._network, self._network_lists, weighted = weighted)
|
|
4008
|
+
G, df = network_analysis.generate_random(self._network, self._network_lists, weighted = weighted)
|
|
3565
4009
|
|
|
3566
|
-
return G
|
|
4010
|
+
return G, df
|
|
3567
4011
|
|
|
3568
4012
|
def degree_distribution(self, directory = None):
|
|
3569
4013
|
"""
|
|
@@ -3645,12 +4089,79 @@ class Network_3D:
|
|
|
3645
4089
|
except:
|
|
3646
4090
|
stats['degree_assortativity'] = "Failed to compute"
|
|
3647
4091
|
|
|
4092
|
+
try:
|
|
4093
|
+
nodes = np.unique(self._nodes)
|
|
4094
|
+
if nodes[0] == 0:
|
|
4095
|
+
nodes = np.delete(nodes, 0)
|
|
4096
|
+
stats['Unconnected nodes (left out from node image)'] = (len(nodes) - len(G.nodes()))
|
|
4097
|
+
except:
|
|
4098
|
+
stats['Unconnected nodes (left out from node image)'] = "Failed to compute"
|
|
4099
|
+
|
|
3648
4100
|
|
|
3649
4101
|
return stats
|
|
3650
4102
|
|
|
3651
4103
|
|
|
4104
|
+
def neighborhood_identities(self, root, directory = None, mode = 0, search = 0):
|
|
4105
|
+
|
|
4106
|
+
|
|
4107
|
+
|
|
4108
|
+
targets = []
|
|
4109
|
+
total_dict = {}
|
|
4110
|
+
neighborhood_dict = {}
|
|
4111
|
+
proportion_dict = {}
|
|
4112
|
+
G = self._network
|
|
4113
|
+
node_identities = self._node_identities
|
|
4114
|
+
for val in set(node_identities.values()):
|
|
4115
|
+
total_dict[val] = 0
|
|
4116
|
+
neighborhood_dict[val] = 0
|
|
4117
|
+
|
|
4118
|
+
for node in node_identities:
|
|
4119
|
+
nodeid = node_identities[node]
|
|
4120
|
+
total_dict[nodeid] += 1
|
|
4121
|
+
if nodeid == root:
|
|
4122
|
+
targets.append(node)
|
|
4123
|
+
|
|
4124
|
+
|
|
4125
|
+
if mode == 0: #search neighbor ids within the network
|
|
4126
|
+
|
|
4127
|
+
|
|
4128
|
+
for node in G.nodes():
|
|
4129
|
+
nodeid = node_identities[node]
|
|
4130
|
+
neighbors = list(G.neighbors(node))
|
|
4131
|
+
for subnode in neighbors:
|
|
4132
|
+
subnodeid = node_identities[subnode]
|
|
4133
|
+
if subnodeid == root:
|
|
4134
|
+
neighborhood_dict[nodeid] += 1
|
|
4135
|
+
break
|
|
4136
|
+
|
|
4137
|
+
title1 = f'Neighborhood Distribution of Nodes in Network from Nodes: {root}'
|
|
4138
|
+
title2 = f'Neighborhood Distribution of Nodes in Network from Nodes {root} as a proportion of total nodes of that ID'
|
|
4139
|
+
|
|
4140
|
+
|
|
4141
|
+
elif mode == 1: #Search neighborhoods morphologically, obtain densities
|
|
4142
|
+
neighborhood_dict, total_dict, densities = morphology.search_neighbor_ids(self._nodes, targets, node_identities, neighborhood_dict, total_dict, search, self._xy_scale, self._z_scale, root)
|
|
4143
|
+
title1 = f'Volumetric Neighborhood Distribution of Nodes in image that are {search} from nodes: {root}'
|
|
4144
|
+
title2 = f'Density Distribution of Nodes in image that are {search} from Nodes {root} as a proportion of total node volume of that ID'
|
|
4145
|
+
|
|
4146
|
+
|
|
4147
|
+
for identity in neighborhood_dict:
|
|
4148
|
+
proportion_dict[identity] = neighborhood_dict[identity]/total_dict[identity]
|
|
3652
4149
|
|
|
3653
|
-
|
|
4150
|
+
network_analysis.create_bar_graph(neighborhood_dict, title1, "Node Identity", "Amount", directory=directory)
|
|
4151
|
+
|
|
4152
|
+
network_analysis.create_bar_graph(proportion_dict, title2, "Node Identity", "Proportion", directory=directory)
|
|
4153
|
+
|
|
4154
|
+
try:
|
|
4155
|
+
network_analysis.create_bar_graph(densities, f'Clustering Factor of Node Identities with {search} from nodes {root}', "Node Identity", "Density Search/Density Total", directory=directory)
|
|
4156
|
+
except:
|
|
4157
|
+
densities = None
|
|
4158
|
+
|
|
4159
|
+
|
|
4160
|
+
return neighborhood_dict, proportion_dict, title1, title2, densities
|
|
4161
|
+
|
|
4162
|
+
|
|
4163
|
+
|
|
4164
|
+
#Morphological stats or network linking:
|
|
3654
4165
|
|
|
3655
4166
|
def volumes(self, sort = 'nodes'):
|
|
3656
4167
|
|
|
@@ -3664,6 +4175,16 @@ class Network_3D:
|
|
|
3664
4175
|
|
|
3665
4176
|
return morphology.calculate_voxel_volumes(self._edges, self._xy_scale, self._z_scale)
|
|
3666
4177
|
|
|
4178
|
+
elif sort == 'network_overlay':
|
|
4179
|
+
|
|
4180
|
+
return morphology.calculate_voxel_volumes(self._network_overlay, self._xy_scale, self._z_scale)
|
|
4181
|
+
|
|
4182
|
+
elif sort == 'id_overlay':
|
|
4183
|
+
|
|
4184
|
+
return morphology.calculate_voxel_volumes(self._id_overlay, self._xy_scale, self._z_scale)
|
|
4185
|
+
|
|
4186
|
+
|
|
4187
|
+
|
|
3667
4188
|
|
|
3668
4189
|
def interactions(self, search = 0, cores = 0, resize = None, save = False, skele = False):
|
|
3669
4190
|
|
|
@@ -3671,13 +4192,13 @@ class Network_3D:
|
|
|
3671
4192
|
|
|
3672
4193
|
|
|
3673
4194
|
|
|
3674
|
-
def morph_proximity(self, search = 0):
|
|
4195
|
+
def morph_proximity(self, search = 0, targets = None):
|
|
3675
4196
|
|
|
3676
4197
|
search_x, search_z = dilation_length_to_pixels(self._xy_scale, self._z_scale, search, search)
|
|
3677
4198
|
|
|
3678
4199
|
num_nodes = np.max(self._nodes)
|
|
3679
4200
|
|
|
3680
|
-
my_dict = proximity.create_node_dictionary(self._nodes, num_nodes, search_x, search_z)
|
|
4201
|
+
my_dict = proximity.create_node_dictionary(self._nodes, num_nodes, search_x, search_z, targets = targets)
|
|
3681
4202
|
|
|
3682
4203
|
my_dict = proximity.find_shared_value_pairs(my_dict)
|
|
3683
4204
|
|