nettracer3d 0.6.8__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nettracer3d/modularity.py +23 -24
- nettracer3d/morphology.py +14 -9
- nettracer3d/nettracer.py +271 -44
- nettracer3d/nettracer_gui.py +502 -64
- nettracer3d/proximity.py +376 -16
- nettracer3d/segmenter.py +87 -4
- nettracer3d/smart_dilate.py +23 -24
- {nettracer3d-0.6.8.dist-info → nettracer3d-0.7.0.dist-info}/METADATA +42 -9
- nettracer3d-0.7.0.dist-info/RECORD +20 -0
- {nettracer3d-0.6.8.dist-info → nettracer3d-0.7.0.dist-info}/WHEEL +1 -1
- nettracer3d-0.6.8.dist-info/RECORD +0 -20
- {nettracer3d-0.6.8.dist-info → nettracer3d-0.7.0.dist-info}/entry_points.txt +0 -0
- {nettracer3d-0.6.8.dist-info → nettracer3d-0.7.0.dist-info}/licenses/LICENSE +0 -0
- {nettracer3d-0.6.8.dist-info → nettracer3d-0.7.0.dist-info}/top_level.txt +0 -0
nettracer3d/modularity.py
CHANGED
|
@@ -446,31 +446,11 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
446
446
|
except:
|
|
447
447
|
pass
|
|
448
448
|
|
|
449
|
-
try:
|
|
450
|
-
# Per-community statistics
|
|
451
|
-
for i, com in enumerate(communities):
|
|
452
|
-
subgraph = G.subgraph(com)
|
|
453
|
-
|
|
454
|
-
# Basic community metrics
|
|
455
|
-
stats[f'Community {i+1} Density'] = nx.density(subgraph)
|
|
456
|
-
stats[f'Community {i+1} Conductance'] = nx.conductance(G, com)
|
|
457
|
-
stats[f'Community {i+1} Avg Clustering'] = nx.average_clustering(subgraph)
|
|
458
|
-
|
|
459
|
-
# Degree centrality
|
|
460
|
-
degree_cent = nx.degree_centrality(subgraph)
|
|
461
|
-
stats[f'Community {i+1} Avg Degree Centrality'] = np.mean(list(degree_cent.values()))
|
|
462
|
-
|
|
463
|
-
# Average path length (only for connected subgraphs)
|
|
464
|
-
if nx.is_connected(subgraph):
|
|
465
|
-
stats[f'Community {i+1} Avg Path Length'] = nx.average_shortest_path_length(subgraph)
|
|
466
|
-
except:
|
|
467
|
-
pass
|
|
468
|
-
|
|
469
|
-
try:
|
|
449
|
+
#try:
|
|
470
450
|
# Add some Louvain-specific statistics
|
|
471
|
-
stats['Partition Resolution'] = 1.0 # Default resolution parameter
|
|
472
|
-
except:
|
|
473
|
-
pass
|
|
451
|
+
#stats['Partition Resolution'] = 1.0 # Default resolution parameter
|
|
452
|
+
#except:
|
|
453
|
+
#pass
|
|
474
454
|
try:
|
|
475
455
|
stats['Number of Iterations'] = len(set(partition.values()))
|
|
476
456
|
except:
|
|
@@ -514,6 +494,25 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
514
494
|
except:
|
|
515
495
|
pass
|
|
516
496
|
|
|
497
|
+
try:
|
|
498
|
+
# Per-community statistics
|
|
499
|
+
for i, com in enumerate(communities):
|
|
500
|
+
subgraph = G.subgraph(com)
|
|
501
|
+
|
|
502
|
+
# Basic community metrics
|
|
503
|
+
stats[f'Community {i+1} Density'] = nx.density(subgraph)
|
|
504
|
+
stats[f'Community {i+1} Conductance'] = nx.conductance(G, com)
|
|
505
|
+
stats[f'Community {i+1} Avg Clustering'] = nx.average_clustering(subgraph)
|
|
506
|
+
|
|
507
|
+
# Degree centrality
|
|
508
|
+
degree_cent = nx.degree_centrality(subgraph)
|
|
509
|
+
stats[f'Community {i+1} Avg Degree Centrality'] = np.mean(list(degree_cent.values()))
|
|
510
|
+
|
|
511
|
+
# Average path length (only for connected subgraphs)
|
|
512
|
+
if nx.is_connected(subgraph):
|
|
513
|
+
stats[f'Community {i+1} Avg Path Length'] = nx.average_shortest_path_length(subgraph)
|
|
514
|
+
except:
|
|
515
|
+
pass
|
|
517
516
|
|
|
518
517
|
return stats
|
|
519
518
|
|
nettracer3d/morphology.py
CHANGED
|
@@ -354,6 +354,7 @@ def process_object_cpu(label, objects, labeled_array, xy_scale = 1, z_scale = 1)
|
|
|
354
354
|
mask = (subarray == label)
|
|
355
355
|
|
|
356
356
|
|
|
357
|
+
"""
|
|
357
358
|
# Determine which dimension needs resampling
|
|
358
359
|
if (z_scale > xy_scale) and mask.shape[0] != 1:
|
|
359
360
|
# Z dimension needs to be stretched
|
|
@@ -371,13 +372,13 @@ def process_object_cpu(label, objects, labeled_array, xy_scale = 1, z_scale = 1)
|
|
|
371
372
|
# Resample the mask if needed
|
|
372
373
|
if zoom_factor:
|
|
373
374
|
mask = ndimage.zoom(mask, zoom_factor, order=0) # Use order=0 for binary masks
|
|
374
|
-
|
|
375
|
+
"""
|
|
375
376
|
|
|
376
377
|
# Compute distance transform on the smaller mask
|
|
377
|
-
dist_transform = compute_distance_transform_distance(mask)
|
|
378
|
+
dist_transform = compute_distance_transform_distance(mask, sampling = [z_scale, xy_scale, xy_scale])
|
|
378
379
|
|
|
379
380
|
# Filter out small values near the edge to focus on more central regions
|
|
380
|
-
radius = np.max(dist_transform)
|
|
381
|
+
radius = np.max(dist_transform)
|
|
381
382
|
|
|
382
383
|
return label, radius
|
|
383
384
|
|
|
@@ -474,6 +475,7 @@ def estimate_object_radii_gpu(labeled_array, xy_scale = 1, z_scale = 1):
|
|
|
474
475
|
# Create binary mask for this object (directly on GPU)
|
|
475
476
|
mask_gpu = (labeled_array_gpu[tuple(padded_slices)] == label)
|
|
476
477
|
|
|
478
|
+
"""
|
|
477
479
|
# Determine which dimension needs resampling
|
|
478
480
|
if (z_scale > xy_scale) and mask_gpu.shape[0] != 1:
|
|
479
481
|
# Z dimension needs to be stretched
|
|
@@ -491,11 +493,12 @@ def estimate_object_radii_gpu(labeled_array, xy_scale = 1, z_scale = 1):
|
|
|
491
493
|
# Resample the mask if needed
|
|
492
494
|
if zoom_factor:
|
|
493
495
|
mask_gpu = cpx.zoom(mask_gpu, zoom_factor, order=0) # Use order=0 for binary masks
|
|
496
|
+
"""
|
|
494
497
|
|
|
495
498
|
# Compute distance transform on GPU
|
|
496
|
-
dist_transform_gpu = compute_distance_transform_distance_GPU(mask_gpu)
|
|
499
|
+
dist_transform_gpu = compute_distance_transform_distance_GPU(mask_gpu, sampling = [z_scale, xy_scale, xy_scale])
|
|
497
500
|
|
|
498
|
-
radius = float(cp.max(dist_transform_gpu).get())
|
|
501
|
+
radius = float(cp.max(dist_transform_gpu).get())
|
|
499
502
|
|
|
500
503
|
|
|
501
504
|
# Store the radius and the scaled radius
|
|
@@ -510,14 +513,15 @@ def estimate_object_radii_gpu(labeled_array, xy_scale = 1, z_scale = 1):
|
|
|
510
513
|
print(f"GPU calculation failed, trying CPU instead -> {e}")
|
|
511
514
|
return estimate_object_radii_cpu(labeled_array)
|
|
512
515
|
|
|
513
|
-
def compute_distance_transform_distance_GPU(nodes):
|
|
516
|
+
def compute_distance_transform_distance_GPU(nodes, sampling = [1,1,1]):
|
|
514
517
|
|
|
515
518
|
is_pseudo_3d = nodes.shape[0] == 1
|
|
516
519
|
if is_pseudo_3d:
|
|
517
520
|
nodes = cp.squeeze(nodes) # Convert to 2D for processing
|
|
521
|
+
del sampling[0]
|
|
518
522
|
|
|
519
523
|
# Compute the distance transform on the GPU
|
|
520
|
-
distance = cpx.distance_transform_edt(nodes)
|
|
524
|
+
distance = cpx.distance_transform_edt(nodes, sampling = sampling)
|
|
521
525
|
|
|
522
526
|
if is_pseudo_3d:
|
|
523
527
|
cp.expand_dims(distance, axis = 0)
|
|
@@ -525,14 +529,15 @@ def compute_distance_transform_distance_GPU(nodes):
|
|
|
525
529
|
return distance
|
|
526
530
|
|
|
527
531
|
|
|
528
|
-
def compute_distance_transform_distance(nodes):
|
|
532
|
+
def compute_distance_transform_distance(nodes, sampling = [1,1,1]):
|
|
529
533
|
|
|
530
534
|
is_pseudo_3d = nodes.shape[0] == 1
|
|
531
535
|
if is_pseudo_3d:
|
|
532
536
|
nodes = np.squeeze(nodes) # Convert to 2D for processing
|
|
537
|
+
del sampling[0]
|
|
533
538
|
|
|
534
539
|
# Fallback to CPU if there's an issue with GPU computation
|
|
535
|
-
distance = ndimage.distance_transform_edt(nodes)
|
|
540
|
+
distance = ndimage.distance_transform_edt(nodes, sampling = sampling)
|
|
536
541
|
if is_pseudo_3d:
|
|
537
542
|
np.expand_dims(distance, axis = 0)
|
|
538
543
|
return distance
|
nettracer3d/nettracer.py
CHANGED
|
@@ -987,6 +987,7 @@ def dilate_3D_dt(array, search_distance, xy_scaling=1.0, z_scaling=1.0):
|
|
|
987
987
|
|
|
988
988
|
del array
|
|
989
989
|
|
|
990
|
+
"""
|
|
990
991
|
# Determine which dimension needs resampling
|
|
991
992
|
if (z_scaling > xy_scaling):
|
|
992
993
|
# Z dimension needs to be stretched
|
|
@@ -1007,17 +1008,18 @@ def dilate_3D_dt(array, search_distance, xy_scaling=1.0, z_scaling=1.0):
|
|
|
1007
1008
|
# Resample the mask if needed
|
|
1008
1009
|
if zoom_factor:
|
|
1009
1010
|
inv = ndimage.zoom(inv, zoom_factor, order=0) # Use order=0 for binary masks
|
|
1010
|
-
|
|
1011
|
+
"""
|
|
1012
|
+
|
|
1011
1013
|
# Compute distance transform (Euclidean)
|
|
1012
|
-
inv = smart_dilate.compute_distance_transform_distance(inv)
|
|
1014
|
+
inv = smart_dilate.compute_distance_transform_distance(inv, sampling = [z_scaling, xy_scaling, xy_scaling])
|
|
1013
1015
|
|
|
1014
|
-
inv = inv * cardinal
|
|
1016
|
+
#inv = inv * cardinal
|
|
1015
1017
|
|
|
1016
1018
|
# Threshold the distance transform to get dilated result
|
|
1017
1019
|
inv = inv <= search_distance
|
|
1018
1020
|
|
|
1019
|
-
if rev_factor:
|
|
1020
|
-
inv = ndimage.zoom(inv, rev_factor, order=0) # Use order=0 for binary masks
|
|
1021
|
+
#if rev_factor:
|
|
1022
|
+
#inv = ndimage.zoom(inv, rev_factor, order=0) # Use order=0 for binary masks
|
|
1021
1023
|
|
|
1022
1024
|
return inv.astype(np.uint8)
|
|
1023
1025
|
|
|
@@ -1043,6 +1045,7 @@ def erode_3D_dt(array, search_distance, xy_scaling=1.0, z_scaling=1.0):
|
|
|
1043
1045
|
|
|
1044
1046
|
# For erosion, we work directly with the foreground (no inversion needed)
|
|
1045
1047
|
|
|
1048
|
+
"""
|
|
1046
1049
|
# Determine which dimension needs resampling
|
|
1047
1050
|
if (z_scaling > xy_scaling):
|
|
1048
1051
|
# Z dimension needs to be stretched
|
|
@@ -1063,21 +1066,22 @@ def erode_3D_dt(array, search_distance, xy_scaling=1.0, z_scaling=1.0):
|
|
|
1063
1066
|
# Resample the mask if needed
|
|
1064
1067
|
if zoom_factor:
|
|
1065
1068
|
array = ndimage.zoom(array, zoom_factor, order=0) # Use order=0 for binary masks
|
|
1069
|
+
"""
|
|
1066
1070
|
|
|
1067
1071
|
print("Computing a distance transform for a perfect erosion...")
|
|
1068
1072
|
|
|
1069
|
-
array = smart_dilate.compute_distance_transform_distance(array)
|
|
1073
|
+
array = smart_dilate.compute_distance_transform_distance(array, sampling = [z_scaling, xy_scaling, xy_scaling])
|
|
1070
1074
|
|
|
1071
1075
|
# Apply scaling factor
|
|
1072
|
-
array = array * cardinal
|
|
1076
|
+
#array = array * cardinal
|
|
1073
1077
|
|
|
1074
1078
|
# Threshold the distance transform to get eroded result
|
|
1075
1079
|
# For erosion, we keep only the points that are at least search_distance from the boundary
|
|
1076
1080
|
array = array >= search_distance
|
|
1077
1081
|
|
|
1078
1082
|
# Resample back to original dimensions if needed
|
|
1079
|
-
if rev_factor:
|
|
1080
|
-
array = ndimage.zoom(array, rev_factor, order=0) # Use order=0 for binary masks
|
|
1083
|
+
#if rev_factor:
|
|
1084
|
+
#array = ndimage.zoom(array, rev_factor, order=0) # Use order=0 for binary masks
|
|
1081
1085
|
|
|
1082
1086
|
return array.astype(np.uint8)
|
|
1083
1087
|
|
|
@@ -2061,7 +2065,24 @@ def mask(image, mask, directory = None):
|
|
|
2061
2065
|
|
|
2062
2066
|
mask = mask != 0
|
|
2063
2067
|
|
|
2064
|
-
|
|
2068
|
+
if len(image.shape) == 3:
|
|
2069
|
+
|
|
2070
|
+
image = image * mask
|
|
2071
|
+
else:
|
|
2072
|
+
# Split into separate color channels
|
|
2073
|
+
channels = [image[..., i] for i in range(3)]
|
|
2074
|
+
masked_channels = []
|
|
2075
|
+
|
|
2076
|
+
for image in channels:
|
|
2077
|
+
# Upsample each channel separately
|
|
2078
|
+
if len(image.shape) == 2:
|
|
2079
|
+
np.expand_dims(image, axis = 0)
|
|
2080
|
+
image = image * mask
|
|
2081
|
+
masked_channels.append(image)
|
|
2082
|
+
|
|
2083
|
+
# Stack the channels back together
|
|
2084
|
+
image = np.stack(masked_channels, axis=-1)
|
|
2085
|
+
|
|
2065
2086
|
|
|
2066
2087
|
if string_bool:
|
|
2067
2088
|
if directory is None:
|
|
@@ -3363,16 +3384,17 @@ class Network_3D:
|
|
|
3363
3384
|
:param skeletonize: (Optional - Val = False, boolean) - A boolean of whether to skeletonize the edges when using them.
|
|
3364
3385
|
"""
|
|
3365
3386
|
|
|
3366
|
-
|
|
3367
|
-
|
|
3387
|
+
if directory is not None:
|
|
3388
|
+
directory = encapsulate()
|
|
3368
3389
|
|
|
3369
3390
|
self._xy_scale = xy_scale
|
|
3370
3391
|
self._z_scale = z_scale
|
|
3371
3392
|
|
|
3372
|
-
|
|
3373
|
-
|
|
3374
|
-
|
|
3375
|
-
|
|
3393
|
+
if directory is not None:
|
|
3394
|
+
try:
|
|
3395
|
+
self.save_scaling(directory)
|
|
3396
|
+
except:
|
|
3397
|
+
pass
|
|
3376
3398
|
|
|
3377
3399
|
if search is None and ignore_search_region == False:
|
|
3378
3400
|
search = 0
|
|
@@ -3383,19 +3405,23 @@ class Network_3D:
|
|
|
3383
3405
|
self._nodes = nodes
|
|
3384
3406
|
del nodes
|
|
3385
3407
|
|
|
3408
|
+
if self._nodes.shape[0] == 1:
|
|
3409
|
+
fast_dil = True #Set this to true because the 2D algo always uses the distance transform and doesnt need this special ver
|
|
3410
|
+
|
|
3386
3411
|
if label_nodes:
|
|
3387
3412
|
self._nodes, num_nodes = label_objects(self._nodes)
|
|
3388
3413
|
if other_nodes is not None:
|
|
3389
3414
|
self.merge_nodes(other_nodes, label_nodes)
|
|
3390
3415
|
|
|
3391
|
-
|
|
3392
|
-
|
|
3393
|
-
|
|
3394
|
-
|
|
3395
|
-
|
|
3396
|
-
|
|
3397
|
-
|
|
3398
|
-
|
|
3416
|
+
if directory is not None:
|
|
3417
|
+
try:
|
|
3418
|
+
self.save_nodes(directory)
|
|
3419
|
+
except:
|
|
3420
|
+
pass
|
|
3421
|
+
try:
|
|
3422
|
+
self.save_node_identities(directory)
|
|
3423
|
+
except:
|
|
3424
|
+
pass
|
|
3399
3425
|
|
|
3400
3426
|
if not ignore_search_region:
|
|
3401
3427
|
self.calculate_search_region(search, GPU = GPU, fast_dil = fast_dil, GPU_downsample = GPU_downsample)
|
|
@@ -3408,31 +3434,35 @@ class Network_3D:
|
|
|
3408
3434
|
|
|
3409
3435
|
self.calculate_edges(edges, diledge = diledge, inners = inners, hash_inner_edges = hash_inners, search = search, remove_edgetrunk = remove_trunk, GPU = GPU, fast_dil = fast_dil, skeletonized = skeletonize)
|
|
3410
3436
|
del edges
|
|
3411
|
-
|
|
3412
|
-
|
|
3413
|
-
|
|
3414
|
-
|
|
3437
|
+
if directory is not None:
|
|
3438
|
+
try:
|
|
3439
|
+
self.save_edges(directory)
|
|
3440
|
+
except:
|
|
3441
|
+
pass
|
|
3415
3442
|
|
|
3416
3443
|
self.calculate_network(search = search, ignore_search_region = ignore_search_region)
|
|
3417
3444
|
|
|
3418
|
-
|
|
3419
|
-
|
|
3420
|
-
|
|
3421
|
-
|
|
3445
|
+
if directory is not None:
|
|
3446
|
+
try:
|
|
3447
|
+
self.save_network(directory)
|
|
3448
|
+
except:
|
|
3449
|
+
pass
|
|
3422
3450
|
|
|
3423
3451
|
if self._nodes is None:
|
|
3424
3452
|
self.load_nodes(directory)
|
|
3425
3453
|
|
|
3426
3454
|
self.calculate_node_centroids(down_factor)
|
|
3427
|
-
|
|
3428
|
-
|
|
3429
|
-
|
|
3430
|
-
|
|
3455
|
+
if directory is not None:
|
|
3456
|
+
try:
|
|
3457
|
+
self.save_node_centroids(directory)
|
|
3458
|
+
except:
|
|
3459
|
+
pass
|
|
3431
3460
|
self.calculate_edge_centroids(down_factor)
|
|
3432
|
-
|
|
3433
|
-
|
|
3434
|
-
|
|
3435
|
-
|
|
3461
|
+
if directory is not None:
|
|
3462
|
+
try:
|
|
3463
|
+
self.save_edge_centroids(directory)
|
|
3464
|
+
except:
|
|
3465
|
+
pass
|
|
3436
3466
|
|
|
3437
3467
|
|
|
3438
3468
|
def draw_network(self, directory = None, down_factor = None, GPU = False):
|
|
@@ -4401,6 +4431,101 @@ class Network_3D:
|
|
|
4401
4431
|
return neighborhood_dict, proportion_dict, title1, title2, densities
|
|
4402
4432
|
|
|
4403
4433
|
|
|
4434
|
+
def get_ripley(self, root = None, targ = None, distance = 1, edgecorrect = True, bounds = None, ignore_dims = False, proportion = 0.5):
|
|
4435
|
+
|
|
4436
|
+
|
|
4437
|
+
if root is None or targ is None: #Self clustering in this case
|
|
4438
|
+
roots = self._node_centroids.values()
|
|
4439
|
+
targs = self._node_centroids.values()
|
|
4440
|
+
else:
|
|
4441
|
+
roots = []
|
|
4442
|
+
targs = []
|
|
4443
|
+
|
|
4444
|
+
for node, nodeid in self.node_identities.items(): #Otherwise we need to pull out this info
|
|
4445
|
+
if nodeid == root:
|
|
4446
|
+
roots.append(self._node_centroids[node])
|
|
4447
|
+
elif nodeid == targ:
|
|
4448
|
+
targs.append(self._node_centroids[node])
|
|
4449
|
+
|
|
4450
|
+
rooties = proximity.convert_centroids_to_array(roots, xy_scale = self.xy_scale, z_scale = self.z_scale)
|
|
4451
|
+
targs = proximity.convert_centroids_to_array(roots, xy_scale = self.xy_scale, z_scale = self.z_scale)
|
|
4452
|
+
points_array = np.vstack((rooties, targs))
|
|
4453
|
+
del rooties
|
|
4454
|
+
|
|
4455
|
+
try:
|
|
4456
|
+
if self.nodes.shape[0] == 1:
|
|
4457
|
+
dim = 2
|
|
4458
|
+
else:
|
|
4459
|
+
dim = 3
|
|
4460
|
+
except:
|
|
4461
|
+
dim = 2
|
|
4462
|
+
for centroid in self.node_centroids.values():
|
|
4463
|
+
if centroid[0] != 0:
|
|
4464
|
+
dim = 3
|
|
4465
|
+
break
|
|
4466
|
+
|
|
4467
|
+
|
|
4468
|
+
if ignore_dims:
|
|
4469
|
+
|
|
4470
|
+
factor = 0.25
|
|
4471
|
+
|
|
4472
|
+
|
|
4473
|
+
if bounds is None:
|
|
4474
|
+
if dim == 2:
|
|
4475
|
+
min_coords = np.array([0,0])
|
|
4476
|
+
else:
|
|
4477
|
+
min_coords = np.array([0,0,0])
|
|
4478
|
+
max_coords = np.max(points_array, axis=0)
|
|
4479
|
+
max_coords = np.flip(max_coords)
|
|
4480
|
+
bounds = (min_coords, max_coords)
|
|
4481
|
+
else:
|
|
4482
|
+
min_coords, max_coords = bounds
|
|
4483
|
+
|
|
4484
|
+
dim_list = max_coords - min_coords
|
|
4485
|
+
|
|
4486
|
+
new_list = []
|
|
4487
|
+
|
|
4488
|
+
|
|
4489
|
+
if dim == 3:
|
|
4490
|
+
for centroid in roots:
|
|
4491
|
+
|
|
4492
|
+
if ((centroid[2] - min_coords[0]) > dim_list[0] * factor) and ((max_coords[0] - centroid[2]) > dim_list[0] * factor) and ((centroid[1] - min_coords[1]) > dim_list[1] * factor) and ((max_coords[1] - centroid[1]) > dim_list[1] * factor) and ((centroid[0] - min_coords[2]) > dim_list[2] * factor) and ((max_coords[2] - centroid[0]) > dim_list[2] * factor):
|
|
4493
|
+
new_list.append(centroid)
|
|
4494
|
+
#print(f"dim_list: {dim_list}, centroid: {centroid}, min_coords: {min_coords}, max_coords: {max_coords}")
|
|
4495
|
+
else:
|
|
4496
|
+
for centroid in roots:
|
|
4497
|
+
|
|
4498
|
+
if ((centroid[2] - min_coords[0]) > dim_list[0] * factor) and ((max_coords[0] - centroid[2]) > dim_list[0] * factor) and ((centroid[1] - min_coords[1]) > dim_list[1] * factor) and ((max_coords[1] - centroid[1]) > dim_list[1] * factor):
|
|
4499
|
+
new_list.append(centroid)
|
|
4500
|
+
|
|
4501
|
+
roots = new_list
|
|
4502
|
+
print(f"Utilizing {len(roots)} root points. Note that low n values are unstable.")
|
|
4503
|
+
is_subset = True
|
|
4504
|
+
else:
|
|
4505
|
+
is_subset = False
|
|
4506
|
+
|
|
4507
|
+
|
|
4508
|
+
|
|
4509
|
+
|
|
4510
|
+
roots = proximity.convert_centroids_to_array(roots, xy_scale = self.xy_scale, z_scale = self.z_scale)
|
|
4511
|
+
|
|
4512
|
+
|
|
4513
|
+
if dim == 2:
|
|
4514
|
+
roots = proximity.convert_augmented_array_to_points(roots)
|
|
4515
|
+
targs = proximity.convert_augmented_array_to_points(targs)
|
|
4516
|
+
|
|
4517
|
+
r_vals = proximity.generate_r_values(points_array, distance, bounds = bounds, dim = dim, max_proportion=proportion)
|
|
4518
|
+
|
|
4519
|
+
k_vals = proximity.optimized_ripleys_k(roots, targs, r_vals, bounds=bounds, edge_correction=edgecorrect, dim = dim, is_subset = is_subset)
|
|
4520
|
+
|
|
4521
|
+
h_vals = proximity.compute_ripleys_h(k_vals, r_vals, dim)
|
|
4522
|
+
|
|
4523
|
+
proximity.plot_ripley_functions(r_vals, k_vals, h_vals, dim)
|
|
4524
|
+
|
|
4525
|
+
return r_vals, k_vals, h_vals
|
|
4526
|
+
|
|
4527
|
+
|
|
4528
|
+
|
|
4404
4529
|
|
|
4405
4530
|
#Morphological stats or network linking:
|
|
4406
4531
|
|
|
@@ -4460,6 +4585,87 @@ class Network_3D:
|
|
|
4460
4585
|
|
|
4461
4586
|
return array
|
|
4462
4587
|
|
|
4588
|
+
|
|
4589
|
+
|
|
4590
|
+
def random_nodes(self, bounds = None, mask = None):
|
|
4591
|
+
|
|
4592
|
+
if self.nodes is not None:
|
|
4593
|
+
try:
|
|
4594
|
+
self.nodes = np.zeros_like(self.nodes)
|
|
4595
|
+
except:
|
|
4596
|
+
pass
|
|
4597
|
+
|
|
4598
|
+
|
|
4599
|
+
if mask is not None:
|
|
4600
|
+
coords = np.argwhere(mask != 0)
|
|
4601
|
+
else:
|
|
4602
|
+
if bounds is not None:
|
|
4603
|
+
(z1, y1, x1), (z2, y2, x2) = bounds
|
|
4604
|
+
z1, y1, x1 = int(z1), int(y1), int(x1)
|
|
4605
|
+
z2, y2, x2 = int(z2), int(y2), int(x2)
|
|
4606
|
+
z_range = np.arange(z1, z2 + 1)
|
|
4607
|
+
y_range = np.arange(y1, y2 + 1)
|
|
4608
|
+
x_range = np.arange(x1, x2 + 1)
|
|
4609
|
+
z_grid, y_grid, x_grid = np.meshgrid(z_range, y_range, x_range, indexing='ij')
|
|
4610
|
+
del z_range
|
|
4611
|
+
del y_range
|
|
4612
|
+
del x_range
|
|
4613
|
+
coords = np.stack([z_grid.flatten(), y_grid.flatten(), x_grid.flatten()], axis=1)
|
|
4614
|
+
del z_grid
|
|
4615
|
+
del y_grid
|
|
4616
|
+
del x_grid
|
|
4617
|
+
else:
|
|
4618
|
+
shape = ()
|
|
4619
|
+
try:
|
|
4620
|
+
shape = self.nodes.shape
|
|
4621
|
+
except:
|
|
4622
|
+
try:
|
|
4623
|
+
shape = self.edges.shape
|
|
4624
|
+
except:
|
|
4625
|
+
try:
|
|
4626
|
+
shape = self._network_overlay.shape
|
|
4627
|
+
except:
|
|
4628
|
+
try:
|
|
4629
|
+
shape = self._id_overlay.shape
|
|
4630
|
+
except:
|
|
4631
|
+
pass
|
|
4632
|
+
|
|
4633
|
+
ranges = [np.arange(s) for s in shape]
|
|
4634
|
+
|
|
4635
|
+
# Create meshgrid
|
|
4636
|
+
mesh = np.meshgrid(*ranges, indexing='ij')
|
|
4637
|
+
del ranges
|
|
4638
|
+
|
|
4639
|
+
# Stack and reshape
|
|
4640
|
+
coords = np.stack(mesh, axis=-1).reshape(-1, len(shape))
|
|
4641
|
+
del mesh
|
|
4642
|
+
|
|
4643
|
+
if len(coords) < len(self.node_centroids):
|
|
4644
|
+
print(f"Warning: Only {len(coords)} positions available for {len(self.node_centroids)} labels")
|
|
4645
|
+
|
|
4646
|
+
new_centroids = {}
|
|
4647
|
+
|
|
4648
|
+
# Generate random indices without replacement
|
|
4649
|
+
available_count = min(len(coords), len(self.node_centroids))
|
|
4650
|
+
rand_indices = np.random.choice(len(coords), available_count, replace=False)
|
|
4651
|
+
|
|
4652
|
+
# Assign random positions to labels
|
|
4653
|
+
for i, label in enumerate(self.node_centroids.keys()):
|
|
4654
|
+
if i < len(rand_indices):
|
|
4655
|
+
centroid = coords[rand_indices[i]]
|
|
4656
|
+
new_centroids[label] = centroid
|
|
4657
|
+
z, y, x = centroid
|
|
4658
|
+
try:
|
|
4659
|
+
self.nodes[z, y, x] = label
|
|
4660
|
+
except:
|
|
4661
|
+
pass
|
|
4662
|
+
|
|
4663
|
+
# Update the centroids dictionary
|
|
4664
|
+
self.node_centroids = new_centroids
|
|
4665
|
+
|
|
4666
|
+
return self.node_centroids, self._nodes
|
|
4667
|
+
|
|
4668
|
+
|
|
4463
4669
|
def community_id_info(self):
|
|
4464
4670
|
def invert_dict(d):
|
|
4465
4671
|
inverted = {}
|
|
@@ -4506,11 +4712,28 @@ class Network_3D:
|
|
|
4506
4712
|
|
|
4507
4713
|
|
|
4508
4714
|
|
|
4509
|
-
def kd_network(self, distance = 100, targets = None):
|
|
4715
|
+
def kd_network(self, distance = 100, targets = None, make_array = False):
|
|
4510
4716
|
|
|
4511
|
-
|
|
4717
|
+
centroids = copy.deepcopy(self._node_centroids)
|
|
4718
|
+
|
|
4719
|
+
if self._xy_scale == self._z_scale:
|
|
4720
|
+
upsample = None
|
|
4721
|
+
distance = distance/self._xy_scale # Account for scaling
|
|
4722
|
+
else:
|
|
4723
|
+
upsample = [self._xy_scale, self._z_scale] # This means resolutions have to be normalized
|
|
4724
|
+
if self._xy_scale < self._z_scale:
|
|
4725
|
+
distance = distance/self._xy_scale # We always upsample to normalize
|
|
4726
|
+
refactor = self._z_scale/self._xy_scale
|
|
4727
|
+
for node, centroid in centroids.items():
|
|
4728
|
+
centroids[node] = [centroid[0] * refactor, centroid[1], centroid[2]]
|
|
4729
|
+
elif self._z_scale < self._xy_scale:
|
|
4730
|
+
distance = distance/self._z_scale
|
|
4731
|
+
refactor = self._xy_scale/self._z_scale
|
|
4732
|
+
for node, centroid in centroids.items():
|
|
4733
|
+
centroids[node] = [centroid[0], centroid[1] * refactor, centroid[2] * refactor]
|
|
4512
4734
|
|
|
4513
|
-
|
|
4735
|
+
|
|
4736
|
+
neighbors = proximity.find_neighbors_kdtree(distance, targets = targets, centroids = centroids)
|
|
4514
4737
|
|
|
4515
4738
|
network = create_and_save_dataframe(neighbors)
|
|
4516
4739
|
|
|
@@ -4520,7 +4743,11 @@ class Network_3D:
|
|
|
4520
4743
|
|
|
4521
4744
|
self.remove_edge_weights()
|
|
4522
4745
|
|
|
4523
|
-
|
|
4746
|
+
if make_array:
|
|
4747
|
+
|
|
4748
|
+
array = self.centroid_array()
|
|
4749
|
+
|
|
4750
|
+
return array
|
|
4524
4751
|
|
|
4525
4752
|
|
|
4526
4753
|
|