nettracer3d 0.7.9__py3-none-any.whl → 0.8.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nettracer3d/community_extractor.py +17 -26
- nettracer3d/neighborhoods.py +395 -58
- nettracer3d/nettracer.py +230 -39
- nettracer3d/nettracer_gui.py +1195 -202
- nettracer3d/node_draw.py +22 -12
- nettracer3d/proximity.py +83 -6
- nettracer3d/segmenter.py +1 -1
- nettracer3d/segmenter_GPU.py +1 -1
- nettracer3d/simple_network.py +43 -25
- {nettracer3d-0.7.9.dist-info → nettracer3d-0.8.1.dist-info}/METADATA +5 -3
- nettracer3d-0.8.1.dist-info/RECORD +23 -0
- nettracer3d-0.7.9.dist-info/RECORD +0 -23
- {nettracer3d-0.7.9.dist-info → nettracer3d-0.8.1.dist-info}/WHEEL +0 -0
- {nettracer3d-0.7.9.dist-info → nettracer3d-0.8.1.dist-info}/entry_points.txt +0 -0
- {nettracer3d-0.7.9.dist-info → nettracer3d-0.8.1.dist-info}/licenses/LICENSE +0 -0
- {nettracer3d-0.7.9.dist-info → nettracer3d-0.8.1.dist-info}/top_level.txt +0 -0
nettracer3d/nettracer.py
CHANGED
|
@@ -548,6 +548,7 @@ def remove_branches(skeleton, length):
|
|
|
548
548
|
return image_copy
|
|
549
549
|
|
|
550
550
|
|
|
551
|
+
|
|
551
552
|
def estimate_object_radii(labeled_array, gpu=False, n_jobs=None, xy_scale = 1, z_scale = 1):
|
|
552
553
|
"""
|
|
553
554
|
Estimate the radii of labeled objects in a 3D numpy array.
|
|
@@ -1485,21 +1486,21 @@ def remove_zeros(input_list):
|
|
|
1485
1486
|
|
|
1486
1487
|
|
|
1487
1488
|
def combine_edges(edge_labels_1, edge_labels_2):
|
|
1488
|
-
"""
|
|
1489
|
-
|
|
1490
|
-
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
|
|
1494
|
-
|
|
1495
|
-
|
|
1496
|
-
|
|
1497
|
-
|
|
1498
|
-
|
|
1499
|
-
|
|
1500
|
-
|
|
1501
|
-
|
|
1502
|
-
|
|
1489
|
+
"""
|
|
1490
|
+
let NumPy handle promotion automatically
|
|
1491
|
+
"""
|
|
1492
|
+
# Early exit if no combination needed
|
|
1493
|
+
mask = (edge_labels_1 == 0) & (edge_labels_2 > 0)
|
|
1494
|
+
if not np.any(mask):
|
|
1495
|
+
return edge_labels_1.copy()
|
|
1496
|
+
|
|
1497
|
+
max_val = np.max(edge_labels_1)
|
|
1498
|
+
|
|
1499
|
+
# Let NumPy handle dtype promotion automatically
|
|
1500
|
+
# This will promote to the smallest type that can handle the operation
|
|
1501
|
+
offset_labels = edge_labels_2 + max_val
|
|
1502
|
+
|
|
1503
|
+
return np.where(mask, offset_labels, edge_labels_1)
|
|
1503
1504
|
|
|
1504
1505
|
def combine_nodes(root_nodes, other_nodes, other_ID, identity_dict, root_ID = None):
|
|
1505
1506
|
|
|
@@ -1507,15 +1508,10 @@ def combine_nodes(root_nodes, other_nodes, other_ID, identity_dict, root_ID = No
|
|
|
1507
1508
|
|
|
1508
1509
|
print("Combining node arrays")
|
|
1509
1510
|
|
|
1510
|
-
|
|
1511
|
-
|
|
1512
|
-
|
|
1513
|
-
|
|
1514
|
-
root_bools = root_nodes == 0 #Get boolean mask where root nodes do not exist.
|
|
1515
|
-
other_bools = other_nodes > 0 #Get boolean mask where other nodes exist.
|
|
1516
|
-
other_nodes = other_nodes + max_val #Add the maximum root node labels to other nodes so the two can be merged without overriding eachother
|
|
1517
|
-
other_nodes = other_nodes * other_bools #Eliminate any indices that should be 0 from other_nodes.
|
|
1518
|
-
other_nodes = other_nodes * root_bools #Eliminate any indices where other nodes overlap root nodes (root node are giving overlap priority)
|
|
1511
|
+
mask = (root_nodes == 0) & (other_nodes > 0)
|
|
1512
|
+
if np.any(mask):
|
|
1513
|
+
max_val = np.max(root_nodes)
|
|
1514
|
+
other_nodes[:] = np.where(mask, other_nodes + max_val, 0)
|
|
1519
1515
|
|
|
1520
1516
|
if root_ID is not None:
|
|
1521
1517
|
rootIDs = list(np.unique(root_nodes)) #Sets up adding these vals to the identitiy dictionary. Gets skipped if this has already been done.
|
|
@@ -1530,6 +1526,11 @@ def combine_nodes(root_nodes, other_nodes, other_ID, identity_dict, root_ID = No
|
|
|
1530
1526
|
|
|
1531
1527
|
if root_ID is not None: #Adds the root vals to the dictionary if it hasn't already
|
|
1532
1528
|
|
|
1529
|
+
if other_ID.endswith('.tiff'):
|
|
1530
|
+
other_ID = other_ID[:-5]
|
|
1531
|
+
elif other_ID.endswith('.tif'):
|
|
1532
|
+
other_ID = other_ID[:-4]
|
|
1533
|
+
|
|
1533
1534
|
for item in rootIDs:
|
|
1534
1535
|
identity_dict[item] = root_ID
|
|
1535
1536
|
|
|
@@ -1538,6 +1539,11 @@ def combine_nodes(root_nodes, other_nodes, other_ID, identity_dict, root_ID = No
|
|
|
1538
1539
|
other_ID = os.path.basename(other_ID)
|
|
1539
1540
|
except:
|
|
1540
1541
|
pass
|
|
1542
|
+
if other_ID.endswith('.tiff'):
|
|
1543
|
+
other_ID = other_ID[:-5]
|
|
1544
|
+
elif other_ID.endswith('.tif'):
|
|
1545
|
+
other_ID = other_ID[:-4]
|
|
1546
|
+
|
|
1541
1547
|
identity_dict[item] = other_ID
|
|
1542
1548
|
|
|
1543
1549
|
nodes = root_nodes + other_nodes #Combine the outer edges with the inner edges modified via the above steps
|
|
@@ -1609,6 +1615,17 @@ def downsample(data, factor, directory=None, order=0):
|
|
|
1609
1615
|
|
|
1610
1616
|
return data
|
|
1611
1617
|
|
|
1618
|
+
|
|
1619
|
+
def otsu_binarize(image_array):
|
|
1620
|
+
|
|
1621
|
+
"""Automated binarize method for seperating the foreground"""
|
|
1622
|
+
|
|
1623
|
+
from skimage.filters import threshold_otsu
|
|
1624
|
+
|
|
1625
|
+
threshold = threshold_otsu(image_array)
|
|
1626
|
+
binary_mask = image_array > threshold
|
|
1627
|
+
return binary_mask
|
|
1628
|
+
|
|
1612
1629
|
def binarize(arrayimage, directory = None):
|
|
1613
1630
|
"""
|
|
1614
1631
|
Can be used to binarize an image. Binary output will be saved to the active directory if none is specified.
|
|
@@ -3428,7 +3445,7 @@ class Network_3D:
|
|
|
3428
3445
|
"""
|
|
3429
3446
|
self._nodes, num_nodes = label_objects(nodes, structure_3d)
|
|
3430
3447
|
|
|
3431
|
-
def merge_nodes(self, addn_nodes_name, label_nodes = True):
|
|
3448
|
+
def merge_nodes(self, addn_nodes_name, label_nodes = True, root_id = "Root_Nodes"):
|
|
3432
3449
|
"""
|
|
3433
3450
|
Merges the self._nodes attribute with alternate labelled node images. The alternate nodes can be inputted as a string for a filepath to a tif,
|
|
3434
3451
|
or as a directory address containing only tif images, which will merge the _nodes attribute with all tifs in the folder. The _node_identities attribute
|
|
@@ -3439,8 +3456,13 @@ class Network_3D:
|
|
|
3439
3456
|
:param label_nodes: (Optional - Val = True; Boolean). Will label all discrete objects in each node file being merged if True. If False, will not label.
|
|
3440
3457
|
"""
|
|
3441
3458
|
|
|
3442
|
-
nodes_name =
|
|
3459
|
+
nodes_name = root_id
|
|
3443
3460
|
|
|
3461
|
+
try:
|
|
3462
|
+
nodes_name = os.path.splitext(os.path.basename(nodes_name))[0]
|
|
3463
|
+
except:
|
|
3464
|
+
pass
|
|
3465
|
+
|
|
3444
3466
|
identity_dict = {} #A dictionary to deliniate the node identities
|
|
3445
3467
|
|
|
3446
3468
|
try: #Try presumes the input is a tif
|
|
@@ -3462,7 +3484,10 @@ class Network_3D:
|
|
|
3462
3484
|
for i, addn_nodes in enumerate(addn_nodes_list):
|
|
3463
3485
|
try:
|
|
3464
3486
|
addn_nodes_ID = addn_nodes
|
|
3465
|
-
|
|
3487
|
+
try:
|
|
3488
|
+
addn_nodes = tifffile.imread(f'{addn_nodes_name}/{addn_nodes}')
|
|
3489
|
+
except:
|
|
3490
|
+
continue
|
|
3466
3491
|
|
|
3467
3492
|
if label_nodes is True:
|
|
3468
3493
|
addn_nodes, num_nodes2 = label_objects(addn_nodes) # Label the node objects. Note this presumes no overlap between node masks.
|
|
@@ -4266,7 +4291,7 @@ class Network_3D:
|
|
|
4266
4291
|
|
|
4267
4292
|
|
|
4268
4293
|
|
|
4269
|
-
def get_degrees(self, down_factor = 1, directory = None, called = False, no_img = 0):
|
|
4294
|
+
def get_degrees(self, down_factor = 1, directory = None, called = False, no_img = 0, heatmap = False):
|
|
4270
4295
|
"""
|
|
4271
4296
|
Method to obtain information on the degrees of nodes in the network, also generating overlays that relate this information to the 3D structure.
|
|
4272
4297
|
Overlays include a grayscale image where nodes are assigned a grayscale value corresponding to their degree, and a numerical index where numbers are drawn at nodes corresponding to their degree.
|
|
@@ -4276,6 +4301,27 @@ class Network_3D:
|
|
|
4276
4301
|
:returns: A dictionary of degree values for each node.
|
|
4277
4302
|
"""
|
|
4278
4303
|
|
|
4304
|
+
if heatmap:
|
|
4305
|
+
import statistics
|
|
4306
|
+
degrees_dict = {node: val for (node, val) in self.network.degree()}
|
|
4307
|
+
pred = statistics.mean(list(degrees_dict.values()))
|
|
4308
|
+
|
|
4309
|
+
node_intensity = {}
|
|
4310
|
+
import math
|
|
4311
|
+
node_centroids = {}
|
|
4312
|
+
|
|
4313
|
+
for node in list(self.network.nodes()):
|
|
4314
|
+
node_intensity[node] = math.log(self.network.degree(node)/pred)
|
|
4315
|
+
node_centroids[node] = self.node_centroids[node]
|
|
4316
|
+
|
|
4317
|
+
from . import neighborhoods
|
|
4318
|
+
|
|
4319
|
+
overlay = neighborhoods.create_node_heatmap(node_intensity, node_centroids, shape = self.nodes.shape, is_3d=True, labeled_array = self.nodes)
|
|
4320
|
+
|
|
4321
|
+
return degrees_dict, overlay
|
|
4322
|
+
|
|
4323
|
+
|
|
4324
|
+
|
|
4279
4325
|
if down_factor > 1:
|
|
4280
4326
|
centroids = self._node_centroids.copy()
|
|
4281
4327
|
for item in self._node_centroids:
|
|
@@ -4632,24 +4678,27 @@ class Network_3D:
|
|
|
4632
4678
|
dim = 3
|
|
4633
4679
|
break
|
|
4634
4680
|
|
|
4635
|
-
|
|
4636
4681
|
if ignore_dims:
|
|
4637
4682
|
|
|
4638
4683
|
factor = 0.25
|
|
4639
4684
|
|
|
4685
|
+
big_array = proximity.convert_centroids_to_array(list(self.node_centroids.values()))
|
|
4640
4686
|
|
|
4641
4687
|
if bounds is None:
|
|
4642
|
-
|
|
4643
|
-
|
|
4644
|
-
|
|
4645
|
-
min_coords = np.array([0,0,0])
|
|
4646
|
-
max_coords = np.max(points_array, axis=0)
|
|
4688
|
+
min_coords = np.array([0,0,0])
|
|
4689
|
+
max_coords = [np.max(big_array[:, 0]), np.max(big_array[:, 1]), np.max(big_array[:, 2])]
|
|
4690
|
+
del big_array
|
|
4647
4691
|
max_coords = np.flip(max_coords)
|
|
4648
4692
|
bounds = (min_coords, max_coords)
|
|
4649
4693
|
else:
|
|
4650
4694
|
min_coords, max_coords = bounds
|
|
4651
4695
|
|
|
4652
|
-
|
|
4696
|
+
try:
|
|
4697
|
+
dim_list = max_coords - min_coords
|
|
4698
|
+
except:
|
|
4699
|
+
min_coords = np.array([0,0,0])
|
|
4700
|
+
bounds = (min_coords, max_coords)
|
|
4701
|
+
dim_list = max_coords - min_coords
|
|
4653
4702
|
|
|
4654
4703
|
new_list = []
|
|
4655
4704
|
|
|
@@ -4659,6 +4708,10 @@ class Network_3D:
|
|
|
4659
4708
|
|
|
4660
4709
|
if ((centroid[2] - min_coords[0]) > dim_list[0] * factor) and ((max_coords[0] - centroid[2]) > dim_list[0] * factor) and ((centroid[1] - min_coords[1]) > dim_list[1] * factor) and ((max_coords[1] - centroid[1]) > dim_list[1] * factor) and ((centroid[0] - min_coords[2]) > dim_list[2] * factor) and ((max_coords[2] - centroid[0]) > dim_list[2] * factor):
|
|
4661
4710
|
new_list.append(centroid)
|
|
4711
|
+
|
|
4712
|
+
|
|
4713
|
+
#if ((centroid[2] - min_coords[0]) > dim_list[0] * factor) and ((max_coords[0] - centroid[2]) > dim_list[0] * factor) and ((centroid[1] - min_coords[1]) > dim_list[1] * factor) and ((max_coords[1] - centroid[1]) > dim_list[1] * factor) and ((centroid[0] - min_coords[2]) > dim_list[2] * factor) and ((max_coords[2] - centroid[0]) > dim_list[2] * factor):
|
|
4714
|
+
#new_list.append(centroid)
|
|
4662
4715
|
#print(f"dim_list: {dim_list}, centroid: {centroid}, min_coords: {min_coords}, max_coords: {max_coords}")
|
|
4663
4716
|
else:
|
|
4664
4717
|
for centroid in roots:
|
|
@@ -4688,7 +4741,7 @@ class Network_3D:
|
|
|
4688
4741
|
|
|
4689
4742
|
h_vals = proximity.compute_ripleys_h(k_vals, r_vals, dim)
|
|
4690
4743
|
|
|
4691
|
-
proximity.plot_ripley_functions(r_vals, k_vals, h_vals, dim)
|
|
4744
|
+
proximity.plot_ripley_functions(r_vals, k_vals, h_vals, dim, root, targ)
|
|
4692
4745
|
|
|
4693
4746
|
return r_vals, k_vals, h_vals
|
|
4694
4747
|
|
|
@@ -5039,7 +5092,7 @@ class Network_3D:
|
|
|
5039
5092
|
|
|
5040
5093
|
self.communities = invert_dict(com_dict)
|
|
5041
5094
|
|
|
5042
|
-
def community_heatmap(self, num_nodes = None, is3d = True):
|
|
5095
|
+
def community_heatmap(self, num_nodes = None, is3d = True, numpy = False):
|
|
5043
5096
|
|
|
5044
5097
|
import math
|
|
5045
5098
|
|
|
@@ -5077,10 +5130,148 @@ class Network_3D:
|
|
|
5077
5130
|
for com, nodes in coms.items():
|
|
5078
5131
|
heat_dict[com] = math.log(len(nodes)/rand_dens)
|
|
5079
5132
|
|
|
5133
|
+
try:
|
|
5134
|
+
shape = self.nodes.shape
|
|
5135
|
+
except:
|
|
5136
|
+
big_array = proximity.convert_centroids_to_array(list(self.node_centroids.values()))
|
|
5137
|
+
shape = [np.max(big_array[0, :]) + 1, np.max(big_array[1, :]) + 1, np.max(big_array[2, :]) + 1]
|
|
5138
|
+
|
|
5080
5139
|
from . import neighborhoods
|
|
5081
|
-
|
|
5140
|
+
if not numpy:
|
|
5141
|
+
neighborhoods.create_community_heatmap(heat_dict, self.communities, self.node_centroids, shape = shape, is_3d=is3d)
|
|
5142
|
+
|
|
5143
|
+
return heat_dict
|
|
5144
|
+
else:
|
|
5145
|
+
overlay = neighborhoods.create_community_heatmap(heat_dict, self.communities, self.node_centroids, shape = shape, is_3d=is3d, labeled_array = self.nodes)
|
|
5146
|
+
return heat_dict, overlay
|
|
5147
|
+
|
|
5148
|
+
|
|
5149
|
+
def merge_node_ids(self, path, data):
|
|
5150
|
+
|
|
5151
|
+
if self.node_identities is None: # Prepare modular dict
|
|
5152
|
+
|
|
5153
|
+
self.node_identities = {}
|
|
5154
|
+
|
|
5155
|
+
nodes = list(np.unique(data))
|
|
5156
|
+
if 0 in nodes:
|
|
5157
|
+
del nodes[0]
|
|
5158
|
+
|
|
5159
|
+
for node in nodes:
|
|
5160
|
+
self.node_identities[node] = ''
|
|
5161
|
+
|
|
5162
|
+
img_list = directory_info(path)
|
|
5163
|
+
|
|
5164
|
+
for i, img in enumerate(img_list):
|
|
5165
|
+
mask = tifffile.imread(f'{path}/{img}')
|
|
5166
|
+
|
|
5167
|
+
if len(np.unique(mask)) != 2:
|
|
5168
|
+
|
|
5169
|
+
mask = otsu_binarize(mask)
|
|
5170
|
+
|
|
5171
|
+
nodes = data * mask
|
|
5172
|
+
nodes = list(np.unique(nodes))
|
|
5173
|
+
if 0 in nodes:
|
|
5174
|
+
del nodes[0]
|
|
5175
|
+
|
|
5176
|
+
if img.endswith('.tiff'):
|
|
5177
|
+
base_name = img[:-5]
|
|
5178
|
+
elif img.endswith('.tif'):
|
|
5179
|
+
base_name = img[:-4]
|
|
5180
|
+
else:
|
|
5181
|
+
base_name = img
|
|
5182
|
+
|
|
5183
|
+
for node in self.node_identities.keys():
|
|
5184
|
+
|
|
5185
|
+
try:
|
|
5186
|
+
|
|
5187
|
+
if node in nodes:
|
|
5188
|
+
|
|
5189
|
+
self.node_identities[node] += f" {base_name}+"
|
|
5190
|
+
|
|
5191
|
+
else:
|
|
5192
|
+
|
|
5193
|
+
self.node_identities[node] += f" {base_name}-"
|
|
5194
|
+
|
|
5195
|
+
except:
|
|
5196
|
+
pass
|
|
5197
|
+
|
|
5198
|
+
|
|
5199
|
+
def nearest_neighbors_avg(self, root, targ, xy_scale = 1, z_scale = 1, num = 1, heatmap = False, threed = True, numpy = False):
|
|
5200
|
+
|
|
5201
|
+
root_set = []
|
|
5202
|
+
|
|
5203
|
+
compare_set = []
|
|
5204
|
+
|
|
5205
|
+
if root is None:
|
|
5206
|
+
|
|
5207
|
+
root_set = list(self.node_centroids.keys())
|
|
5208
|
+
compare_set = root_set
|
|
5209
|
+
title = "Nearest Neighbors Between Nodes Heatmap"
|
|
5210
|
+
|
|
5211
|
+
else:
|
|
5212
|
+
|
|
5213
|
+
title = f"Nearest Neighbors of ID {targ} from ID {root} Heatmap"
|
|
5214
|
+
|
|
5215
|
+
for node, iden in self.node_identities.items():
|
|
5216
|
+
|
|
5217
|
+
if iden == root:
|
|
5218
|
+
|
|
5219
|
+
root_set.append(node)
|
|
5220
|
+
|
|
5221
|
+
elif (iden == targ) or (targ == 'All Others (Excluding Self)'):
|
|
5222
|
+
|
|
5223
|
+
compare_set.append(node)
|
|
5224
|
+
|
|
5225
|
+
if root == targ:
|
|
5226
|
+
|
|
5227
|
+
compare_set = root_set
|
|
5228
|
+
if len(compare_set) - 1 < num:
|
|
5229
|
+
|
|
5230
|
+
print("Error: Not enough neighbor nodes for requested number of neighbors")
|
|
5231
|
+
return
|
|
5232
|
+
|
|
5233
|
+
if len(compare_set) < num:
|
|
5234
|
+
|
|
5235
|
+
print("Error: Not enough neighbor nodes for requested number of neighbors")
|
|
5236
|
+
return
|
|
5237
|
+
|
|
5238
|
+
avg, output = proximity.average_nearest_neighbor_distances(self.node_centroids, root_set, compare_set, xy_scale=self.xy_scale, z_scale=self.z_scale, num = num)
|
|
5239
|
+
|
|
5240
|
+
if heatmap:
|
|
5241
|
+
|
|
5242
|
+
|
|
5243
|
+
from . import neighborhoods
|
|
5244
|
+
try:
|
|
5245
|
+
shape = self.nodes.shape
|
|
5246
|
+
except:
|
|
5247
|
+
big_array = proximity.convert_centroids_to_array(list(self.node_centroids.values()))
|
|
5248
|
+
shape = [np.max(big_array[0, :]) + 1, np.max(big_array[1, :]) + 1, np.max(big_array[2, :]) + 1]
|
|
5249
|
+
|
|
5250
|
+
pred = avg
|
|
5251
|
+
|
|
5252
|
+
node_intensity = {}
|
|
5253
|
+
import math
|
|
5254
|
+
node_centroids = {}
|
|
5255
|
+
|
|
5256
|
+
for node in root_set:
|
|
5257
|
+
node_intensity[node] = math.log(pred/output[node])
|
|
5258
|
+
node_centroids[node] = self.node_centroids[node]
|
|
5259
|
+
|
|
5260
|
+
if numpy:
|
|
5261
|
+
|
|
5262
|
+
overlay = neighborhoods.create_node_heatmap(node_intensity, node_centroids, shape = shape, is_3d=threed, labeled_array = self.nodes, colorbar_label="Clustering Intensity", title = title)
|
|
5263
|
+
|
|
5264
|
+
return avg, output, overlay
|
|
5265
|
+
|
|
5266
|
+
else:
|
|
5267
|
+
neighborhoods.create_node_heatmap(node_intensity, node_centroids, shape = shape, is_3d=threed, labeled_array = None, colorbar_label="Clustering Intensity", title = title)
|
|
5268
|
+
|
|
5269
|
+
return avg, output
|
|
5270
|
+
|
|
5271
|
+
|
|
5272
|
+
|
|
5273
|
+
|
|
5082
5274
|
|
|
5083
|
-
return heat_dict
|
|
5084
5275
|
|
|
5085
5276
|
|
|
5086
5277
|
|