nettracer3d 0.8.0__py3-none-any.whl → 0.8.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nettracer3d/cellpose_manager.py +161 -0
- nettracer3d/community_extractor.py +97 -20
- nettracer3d/neighborhoods.py +617 -81
- nettracer3d/nettracer.py +282 -74
- nettracer3d/nettracer_gui.py +860 -281
- nettracer3d/network_analysis.py +222 -230
- nettracer3d/node_draw.py +22 -12
- nettracer3d/proximity.py +254 -30
- nettracer3d-0.8.2.dist-info/METADATA +117 -0
- nettracer3d-0.8.2.dist-info/RECORD +24 -0
- nettracer3d-0.8.0.dist-info/METADATA +0 -83
- nettracer3d-0.8.0.dist-info/RECORD +0 -23
- {nettracer3d-0.8.0.dist-info → nettracer3d-0.8.2.dist-info}/WHEEL +0 -0
- {nettracer3d-0.8.0.dist-info → nettracer3d-0.8.2.dist-info}/entry_points.txt +0 -0
- {nettracer3d-0.8.0.dist-info → nettracer3d-0.8.2.dist-info}/licenses/LICENSE +0 -0
- {nettracer3d-0.8.0.dist-info → nettracer3d-0.8.2.dist-info}/top_level.txt +0 -0
nettracer3d/nettracer.py
CHANGED
|
@@ -348,6 +348,11 @@ def create_and_save_dataframe(pairwise_connections, excel_filename = None):
|
|
|
348
348
|
|
|
349
349
|
#General supporting methods below:
|
|
350
350
|
|
|
351
|
+
def invert_dict(d):
|
|
352
|
+
inverted = {}
|
|
353
|
+
for key, value in d.items():
|
|
354
|
+
inverted.setdefault(value, []).append(key)
|
|
355
|
+
return inverted
|
|
351
356
|
|
|
352
357
|
def invert_array(array):
|
|
353
358
|
"""Internal method used to flip node array indices. 0 becomes 255 and vice versa."""
|
|
@@ -1484,6 +1489,13 @@ def remove_zeros(input_list):
|
|
|
1484
1489
|
return result_array
|
|
1485
1490
|
|
|
1486
1491
|
|
|
1492
|
+
def overlay_arrays_simple(edge_labels_1, edge_labels_2):
|
|
1493
|
+
"""
|
|
1494
|
+
Superimpose edge_labels_2 on top of edge_labels_1 without any offset.
|
|
1495
|
+
Where edge_labels_2 > 0, use those values directly.
|
|
1496
|
+
"""
|
|
1497
|
+
mask = edge_labels_1 > 0
|
|
1498
|
+
return np.where(mask, edge_labels_1, edge_labels_2)
|
|
1487
1499
|
|
|
1488
1500
|
def combine_edges(edge_labels_1, edge_labels_2):
|
|
1489
1501
|
"""
|
|
@@ -1616,7 +1628,7 @@ def downsample(data, factor, directory=None, order=0):
|
|
|
1616
1628
|
return data
|
|
1617
1629
|
|
|
1618
1630
|
|
|
1619
|
-
def otsu_binarize(image_array):
|
|
1631
|
+
def otsu_binarize(image_array, non_bool = False):
|
|
1620
1632
|
|
|
1621
1633
|
"""Automated binarize method for seperating the foreground"""
|
|
1622
1634
|
|
|
@@ -1624,6 +1636,10 @@ def otsu_binarize(image_array):
|
|
|
1624
1636
|
|
|
1625
1637
|
threshold = threshold_otsu(image_array)
|
|
1626
1638
|
binary_mask = image_array > threshold
|
|
1639
|
+
|
|
1640
|
+
if non_bool:
|
|
1641
|
+
binary_mask = binary_mask * 255
|
|
1642
|
+
|
|
1627
1643
|
return binary_mask
|
|
1628
1644
|
|
|
1629
1645
|
def binarize(arrayimage, directory = None):
|
|
@@ -1814,12 +1830,6 @@ def label_branches(array, peaks = 0, branch_removal = 0, comp_dil = 0, max_vol =
|
|
|
1814
1830
|
|
|
1815
1831
|
def fix_branches_network(array, G, communities, fix_val = None):
|
|
1816
1832
|
|
|
1817
|
-
def invert_dict(d):
|
|
1818
|
-
inverted = {}
|
|
1819
|
-
for key, value in d.items():
|
|
1820
|
-
inverted.setdefault(value, []).append(key)
|
|
1821
|
-
return inverted
|
|
1822
|
-
|
|
1823
1833
|
def get_degree_threshold(community_degrees):
|
|
1824
1834
|
degrees = np.array(community_degrees, dtype=float)
|
|
1825
1835
|
hist, bins = np.histogram(degrees, bins='auto')
|
|
@@ -3893,11 +3903,6 @@ class Network_3D:
|
|
|
3893
3903
|
|
|
3894
3904
|
def com_to_node(self, targets = None):
|
|
3895
3905
|
|
|
3896
|
-
def invert_dict(d):
|
|
3897
|
-
inverted = {}
|
|
3898
|
-
for key, value in d.items():
|
|
3899
|
-
inverted.setdefault(value, []).append(key)
|
|
3900
|
-
return inverted
|
|
3901
3906
|
|
|
3902
3907
|
def update_array(array_3d, value_dict, targets = None):
|
|
3903
3908
|
ref_array = copy.deepcopy(array_3d)
|
|
@@ -4291,7 +4296,7 @@ class Network_3D:
|
|
|
4291
4296
|
|
|
4292
4297
|
|
|
4293
4298
|
|
|
4294
|
-
def get_degrees(self, down_factor = 1, directory = None, called = False, no_img = 0):
|
|
4299
|
+
def get_degrees(self, down_factor = 1, directory = None, called = False, no_img = 0, heatmap = False):
|
|
4295
4300
|
"""
|
|
4296
4301
|
Method to obtain information on the degrees of nodes in the network, also generating overlays that relate this information to the 3D structure.
|
|
4297
4302
|
Overlays include a grayscale image where nodes are assigned a grayscale value corresponding to their degree, and a numerical index where numbers are drawn at nodes corresponding to their degree.
|
|
@@ -4301,6 +4306,27 @@ class Network_3D:
|
|
|
4301
4306
|
:returns: A dictionary of degree values for each node.
|
|
4302
4307
|
"""
|
|
4303
4308
|
|
|
4309
|
+
if heatmap:
|
|
4310
|
+
import statistics
|
|
4311
|
+
degrees_dict = {node: val for (node, val) in self.network.degree()}
|
|
4312
|
+
pred = statistics.mean(list(degrees_dict.values()))
|
|
4313
|
+
|
|
4314
|
+
node_intensity = {}
|
|
4315
|
+
import math
|
|
4316
|
+
node_centroids = {}
|
|
4317
|
+
|
|
4318
|
+
for node in list(self.network.nodes()):
|
|
4319
|
+
node_intensity[node] = math.log(self.network.degree(node)/pred)
|
|
4320
|
+
node_centroids[node] = self.node_centroids[node]
|
|
4321
|
+
|
|
4322
|
+
from . import neighborhoods
|
|
4323
|
+
|
|
4324
|
+
overlay = neighborhoods.create_node_heatmap(node_intensity, node_centroids, shape = self.nodes.shape, is_3d=True, labeled_array = self.nodes)
|
|
4325
|
+
|
|
4326
|
+
return degrees_dict, overlay
|
|
4327
|
+
|
|
4328
|
+
|
|
4329
|
+
|
|
4304
4330
|
if down_factor > 1:
|
|
4305
4331
|
centroids = self._node_centroids.copy()
|
|
4306
4332
|
for item in self._node_centroids:
|
|
@@ -4657,18 +4683,16 @@ class Network_3D:
|
|
|
4657
4683
|
dim = 3
|
|
4658
4684
|
break
|
|
4659
4685
|
|
|
4660
|
-
|
|
4661
4686
|
if ignore_dims:
|
|
4662
4687
|
|
|
4663
4688
|
factor = 0.25
|
|
4664
4689
|
|
|
4690
|
+
big_array = proximity.convert_centroids_to_array(list(self.node_centroids.values()))
|
|
4665
4691
|
|
|
4666
4692
|
if bounds is None:
|
|
4667
|
-
|
|
4668
|
-
|
|
4669
|
-
|
|
4670
|
-
min_coords = np.array([0,0,0])
|
|
4671
|
-
max_coords = np.max(points_array, axis=0)
|
|
4693
|
+
min_coords = np.array([0,0,0])
|
|
4694
|
+
max_coords = [np.max(big_array[:, 0]), np.max(big_array[:, 1]), np.max(big_array[:, 2])]
|
|
4695
|
+
del big_array
|
|
4672
4696
|
max_coords = np.flip(max_coords)
|
|
4673
4697
|
bounds = (min_coords, max_coords)
|
|
4674
4698
|
else:
|
|
@@ -4683,11 +4707,16 @@ class Network_3D:
|
|
|
4683
4707
|
|
|
4684
4708
|
new_list = []
|
|
4685
4709
|
|
|
4710
|
+
|
|
4686
4711
|
if dim == 3:
|
|
4687
4712
|
for centroid in roots:
|
|
4688
4713
|
|
|
4689
4714
|
if ((centroid[2] - min_coords[0]) > dim_list[0] * factor) and ((max_coords[0] - centroid[2]) > dim_list[0] * factor) and ((centroid[1] - min_coords[1]) > dim_list[1] * factor) and ((max_coords[1] - centroid[1]) > dim_list[1] * factor) and ((centroid[0] - min_coords[2]) > dim_list[2] * factor) and ((max_coords[2] - centroid[0]) > dim_list[2] * factor):
|
|
4690
4715
|
new_list.append(centroid)
|
|
4716
|
+
|
|
4717
|
+
|
|
4718
|
+
#if ((centroid[2] - min_coords[0]) > dim_list[0] * factor) and ((max_coords[0] - centroid[2]) > dim_list[0] * factor) and ((centroid[1] - min_coords[1]) > dim_list[1] * factor) and ((max_coords[1] - centroid[1]) > dim_list[1] * factor) and ((centroid[0] - min_coords[2]) > dim_list[2] * factor) and ((max_coords[2] - centroid[0]) > dim_list[2] * factor):
|
|
4719
|
+
#new_list.append(centroid)
|
|
4691
4720
|
#print(f"dim_list: {dim_list}, centroid: {centroid}, min_coords: {min_coords}, max_coords: {max_coords}")
|
|
4692
4721
|
else:
|
|
4693
4722
|
for centroid in roots:
|
|
@@ -4807,9 +4836,9 @@ class Network_3D:
|
|
|
4807
4836
|
(z1, y1, x1), (z2, y2, x2) = bounds
|
|
4808
4837
|
z1, y1, x1 = int(z1), int(y1), int(x1)
|
|
4809
4838
|
z2, y2, x2 = int(z2), int(y2), int(x2)
|
|
4810
|
-
z_range = np.arange(z1, z2 + 1)
|
|
4811
|
-
y_range = np.arange(y1, y2 + 1)
|
|
4812
|
-
x_range = np.arange(x1, x2 + 1)
|
|
4839
|
+
z_range = np.arange(z1, z2 + 1 )
|
|
4840
|
+
y_range = np.arange(y1, y2 + 1 )
|
|
4841
|
+
x_range = np.arange(x1, x2 + 1 )
|
|
4813
4842
|
z_grid, y_grid, x_grid = np.meshgrid(z_range, y_range, x_range, indexing='ij')
|
|
4814
4843
|
del z_range
|
|
4815
4844
|
del y_range
|
|
@@ -4872,11 +4901,6 @@ class Network_3D:
|
|
|
4872
4901
|
|
|
4873
4902
|
def community_id_info(self):
|
|
4874
4903
|
|
|
4875
|
-
def invert_dict(d):
|
|
4876
|
-
inverted = {}
|
|
4877
|
-
for key, value in d.items():
|
|
4878
|
-
inverted.setdefault(value, []).append(key)
|
|
4879
|
-
return inverted
|
|
4880
4904
|
|
|
4881
4905
|
community_dict = invert_dict(self.communities)
|
|
4882
4906
|
summation = 0
|
|
@@ -4914,13 +4938,7 @@ class Network_3D:
|
|
|
4914
4938
|
|
|
4915
4939
|
return output
|
|
4916
4940
|
|
|
4917
|
-
def community_id_info_per_com(self, umap = False, label = False):
|
|
4918
|
-
|
|
4919
|
-
def invert_dict(d):
|
|
4920
|
-
inverted = {}
|
|
4921
|
-
for key, value in d.items():
|
|
4922
|
-
inverted.setdefault(value, []).append(key)
|
|
4923
|
-
return inverted
|
|
4941
|
+
def community_id_info_per_com(self, umap = False, label = False, limit = 0, proportional = False):
|
|
4924
4942
|
|
|
4925
4943
|
community_dict = invert_dict(self.communities)
|
|
4926
4944
|
summation = 0
|
|
@@ -4930,62 +4948,122 @@ class Network_3D:
|
|
|
4930
4948
|
id_dict[iden] = i
|
|
4931
4949
|
|
|
4932
4950
|
output = {}
|
|
4951
|
+
umap_dict = {}
|
|
4933
4952
|
|
|
4934
|
-
|
|
4953
|
+
if not proportional:
|
|
4935
4954
|
|
|
4936
|
-
|
|
4955
|
+
for community in community_dict:
|
|
4937
4956
|
|
|
4938
|
-
|
|
4939
|
-
size = len(nodes)
|
|
4957
|
+
counter = np.zeros(len(id_set))
|
|
4940
4958
|
|
|
4941
|
-
|
|
4942
|
-
|
|
4943
|
-
|
|
4959
|
+
nodes = community_dict[community]
|
|
4960
|
+
size = len(nodes)
|
|
4961
|
+
|
|
4962
|
+
# Count identities in this community
|
|
4963
|
+
for node in nodes:
|
|
4964
|
+
counter[id_dict[self.node_identities[node]]] += 1 # Keep them as arrays
|
|
4965
|
+
|
|
4966
|
+
for i in range(len(counter)): # Translate them into proportions out of 1
|
|
4967
|
+
|
|
4968
|
+
counter[i] = counter[i]/size
|
|
4944
4969
|
|
|
4945
|
-
|
|
4970
|
+
output[community] = counter #Assign the finding here
|
|
4946
4971
|
|
|
4947
|
-
|
|
4972
|
+
if size >= limit:
|
|
4973
|
+
umap_dict[community] = counter
|
|
4974
|
+
|
|
4975
|
+
else:
|
|
4976
|
+
idens = invert_dict(self.node_identities)
|
|
4977
|
+
iden_count = {}
|
|
4978
|
+
template = {}
|
|
4979
|
+
node_count = len(list(self.communities.keys()))
|
|
4980
|
+
|
|
4981
|
+
|
|
4982
|
+
for iden in id_set:
|
|
4983
|
+
template[iden] = 0
|
|
4984
|
+
|
|
4985
|
+
for iden, nodes in idens.items():
|
|
4986
|
+
iden_count[iden] = len(nodes)
|
|
4987
|
+
|
|
4988
|
+
for community in community_dict:
|
|
4989
|
+
|
|
4990
|
+
iden_tracker = copy.deepcopy(template)
|
|
4991
|
+
|
|
4992
|
+
nodes = community_dict[community]
|
|
4993
|
+
size = len(nodes)
|
|
4994
|
+
counter = np.zeros(len(id_set))
|
|
4995
|
+
|
|
4996
|
+
for node in nodes:
|
|
4997
|
+
iden_tracker[self.node_identities[node]] += 1
|
|
4998
|
+
|
|
4999
|
+
i = 0
|
|
5000
|
+
|
|
5001
|
+
if not umap: # External calls just get the proportion for now
|
|
5002
|
+
|
|
5003
|
+
for iden, val in iden_tracker.items(): # Translate them into proportions of total number of that node of all nodes of that ID
|
|
5004
|
+
|
|
5005
|
+
counter[i] = (val/iden_count[iden])
|
|
5006
|
+
i += 1
|
|
5007
|
+
|
|
5008
|
+
output[community] = counter #Assign the finding here
|
|
5009
|
+
|
|
5010
|
+
if size >= limit:
|
|
5011
|
+
umap_dict[community] = counter
|
|
5012
|
+
|
|
5013
|
+
else: # Internal calls for the umap get the relative proportion, demonstrating overrepresentation per community
|
|
5014
|
+
|
|
5015
|
+
|
|
5016
|
+
for iden, val in iden_tracker.items(): # Translate them into proportions of total number of that node of all nodes of that ID
|
|
5017
|
+
|
|
5018
|
+
counter[i] = (val/iden_count[iden])/(size/node_count) # The proportion of that ID in the community vs all of that ID divided by the proportion of that community size vs all the nodes
|
|
5019
|
+
i += 1
|
|
5020
|
+
|
|
5021
|
+
output[community] = counter #Assign the finding here
|
|
5022
|
+
|
|
5023
|
+
if size >= limit:
|
|
5024
|
+
umap_dict[community] = counter
|
|
4948
5025
|
|
|
4949
|
-
output[community] = counter #Assign the finding here
|
|
4950
5026
|
|
|
4951
5027
|
if umap:
|
|
4952
5028
|
from . import neighborhoods
|
|
4953
|
-
|
|
5029
|
+
|
|
5030
|
+
neighborhoods.visualize_cluster_composition_umap(umap_dict, id_set, label = label)
|
|
4954
5031
|
|
|
4955
5032
|
return output, id_set
|
|
4956
5033
|
|
|
4957
5034
|
|
|
4958
|
-
def assign_neighborhoods(self, seed, count, limit = None, prev_coms = None):
|
|
5035
|
+
def assign_neighborhoods(self, seed, count, limit = None, prev_coms = None, proportional = False, mode = 0):
|
|
4959
5036
|
|
|
4960
5037
|
from . import neighborhoods
|
|
4961
5038
|
|
|
4962
|
-
def invert_dict(d):
|
|
4963
|
-
inverted = {}
|
|
4964
|
-
for key, value in d.items():
|
|
4965
|
-
inverted.setdefault(value, []).append(key)
|
|
4966
|
-
return inverted
|
|
4967
|
-
|
|
4968
5039
|
if prev_coms is not None:
|
|
4969
5040
|
self.communities = copy.deepcopy(prev_coms)
|
|
4970
5041
|
|
|
4971
5042
|
identities, _ = self.community_id_info_per_com()
|
|
4972
5043
|
|
|
5044
|
+
zero_group = {}
|
|
5045
|
+
|
|
5046
|
+
|
|
4973
5047
|
if limit is not None:
|
|
4974
5048
|
|
|
4975
5049
|
coms = invert_dict(self.communities)
|
|
4976
5050
|
|
|
4977
|
-
zero_group = {}
|
|
4978
5051
|
|
|
4979
5052
|
for com, nodes in coms.items():
|
|
4980
5053
|
|
|
4981
5054
|
if len(nodes) < limit:
|
|
4982
5055
|
|
|
4983
|
-
zero_group[com] = 0
|
|
4984
|
-
|
|
4985
5056
|
del identities[com]
|
|
4986
5057
|
|
|
5058
|
+
if count > len(identities):
|
|
5059
|
+
print(f"Requested neighborhoods too large for available communities. Using {len(identities)} neighborhoods (max for these coms)")
|
|
5060
|
+
count = len(identities)
|
|
5061
|
+
|
|
4987
5062
|
|
|
4988
|
-
|
|
5063
|
+
if mode == 0:
|
|
5064
|
+
clusters = neighborhoods.cluster_arrays(identities, count, seed = seed)
|
|
5065
|
+
elif mode == 1:
|
|
5066
|
+
clusters = neighborhoods.cluster_arrays_dbscan(identities, seed = seed)
|
|
4989
5067
|
|
|
4990
5068
|
coms = {}
|
|
4991
5069
|
|
|
@@ -4997,19 +5075,60 @@ class Network_3D:
|
|
|
4997
5075
|
|
|
4998
5076
|
coms[com] = i + 1
|
|
4999
5077
|
|
|
5000
|
-
|
|
5001
|
-
|
|
5078
|
+
copy_dict = copy.deepcopy(self.communities)
|
|
5079
|
+
|
|
5080
|
+
for node, com in copy_dict.items():
|
|
5081
|
+
|
|
5082
|
+
try:
|
|
5002
5083
|
|
|
5003
|
-
|
|
5084
|
+
self.communities[node] = coms[com]
|
|
5085
|
+
|
|
5086
|
+
except:
|
|
5087
|
+
del self.communities[node]
|
|
5088
|
+
zero_group[node] = 0
|
|
5089
|
+
|
|
5090
|
+
self.com_by_size()
|
|
5091
|
+
|
|
5092
|
+
|
|
5093
|
+
if len(zero_group) > 0:
|
|
5094
|
+
self.communities.update(zero_group)
|
|
5004
5095
|
|
|
5005
|
-
self.communities[node] = coms[com]
|
|
5006
5096
|
|
|
5007
5097
|
identities, id_set = self.community_id_info_per_com()
|
|
5008
5098
|
|
|
5009
|
-
|
|
5099
|
+
len_dict = {}
|
|
5100
|
+
|
|
5101
|
+
coms = invert_dict(self.communities)
|
|
5102
|
+
node_count = len(list(self.communities.keys()))
|
|
5103
|
+
|
|
5104
|
+
for com, nodes in coms.items():
|
|
5105
|
+
|
|
5106
|
+
len_dict[com] = len(nodes)/node_count
|
|
5107
|
+
|
|
5108
|
+
matrixes = []
|
|
5109
|
+
|
|
5110
|
+
output = neighborhoods.plot_dict_heatmap(identities, id_set, title = "Neighborhood Heatmap by Proportional Composition Per Neighborhood")
|
|
5111
|
+
|
|
5112
|
+
matrixes.append(output)
|
|
5113
|
+
|
|
5114
|
+
if proportional:
|
|
5115
|
+
|
|
5116
|
+
identities2, id_set2 = self.community_id_info_per_com(proportional = True)
|
|
5117
|
+
output = neighborhoods.plot_dict_heatmap(identities2, id_set2, title = "Neighborhood Heatmap by Proportional Composition of Nodes in Neighborhood vs All Nodes")
|
|
5118
|
+
matrixes.append(output)
|
|
5119
|
+
|
|
5120
|
+
identities3 = {}
|
|
5121
|
+
for iden in identities2:
|
|
5122
|
+
identities3[iden] = identities2[iden]/len_dict[iden]
|
|
5123
|
+
|
|
5124
|
+
output = neighborhoods.plot_dict_heatmap(identities3, id_set2, title = "Neighborhood Heatmap by Proportional Composition of Nodes in Neighborhood vs All Nodes Divided by Neighborhood Total Proportion of All Nodes (val < 1 = underrepresented, val > 1 = overrepresented)", center_at_one = True)
|
|
5125
|
+
matrixes.append(output)
|
|
5126
|
+
|
|
5127
|
+
return len_dict, matrixes, id_set
|
|
5010
5128
|
|
|
5011
5129
|
|
|
5012
|
-
|
|
5130
|
+
|
|
5131
|
+
def kd_network(self, distance = 100, targets = None, make_array = False, max_neighbors = None):
|
|
5013
5132
|
|
|
5014
5133
|
centroids = copy.deepcopy(self._node_centroids)
|
|
5015
5134
|
|
|
@@ -5030,14 +5149,20 @@ class Network_3D:
|
|
|
5030
5149
|
centroids[node] = [centroid[0], centroid[1] * refactor, centroid[2] * refactor]
|
|
5031
5150
|
|
|
5032
5151
|
|
|
5033
|
-
neighbors = proximity.find_neighbors_kdtree(distance, targets = targets, centroids = centroids)
|
|
5152
|
+
neighbors = proximity.find_neighbors_kdtree(distance, targets = targets, centroids = centroids, max_neighbors = max_neighbors)
|
|
5153
|
+
|
|
5154
|
+
print("Creating Dataframe")
|
|
5034
5155
|
|
|
5035
5156
|
network = create_and_save_dataframe(neighbors)
|
|
5036
5157
|
|
|
5158
|
+
print("Converting df to network")
|
|
5159
|
+
|
|
5037
5160
|
self._network_lists = network_analysis.read_excel_to_lists(network)
|
|
5038
5161
|
|
|
5039
5162
|
#self._network is a networkx graph that stores the connections
|
|
5040
5163
|
|
|
5164
|
+
print("Removing Edge Weights")
|
|
5165
|
+
|
|
5041
5166
|
self.remove_edge_weights()
|
|
5042
5167
|
|
|
5043
5168
|
if make_array:
|
|
@@ -5048,7 +5173,7 @@ class Network_3D:
|
|
|
5048
5173
|
|
|
5049
5174
|
def community_cells(self, size = 32, xy_scale = 1, z_scale = 1):
|
|
5050
5175
|
|
|
5051
|
-
def
|
|
5176
|
+
def revert_dict(d):
|
|
5052
5177
|
inverted = {}
|
|
5053
5178
|
for key, value_list in d.items():
|
|
5054
5179
|
for value in value_list:
|
|
@@ -5066,18 +5191,12 @@ class Network_3D:
|
|
|
5066
5191
|
|
|
5067
5192
|
com_dict = proximity.partition_objects_into_cells(self.node_centroids, (size_z, size_x, size_x))
|
|
5068
5193
|
|
|
5069
|
-
self.communities =
|
|
5194
|
+
self.communities = revert_dict(com_dict)
|
|
5070
5195
|
|
|
5071
|
-
def community_heatmap(self, num_nodes = None, is3d = True):
|
|
5196
|
+
def community_heatmap(self, num_nodes = None, is3d = True, numpy = False):
|
|
5072
5197
|
|
|
5073
5198
|
import math
|
|
5074
5199
|
|
|
5075
|
-
def invert_dict(d):
|
|
5076
|
-
inverted = {}
|
|
5077
|
-
for key, value in d.items():
|
|
5078
|
-
inverted.setdefault(value, []).append(key)
|
|
5079
|
-
return inverted
|
|
5080
|
-
|
|
5081
5200
|
if num_nodes == None:
|
|
5082
5201
|
|
|
5083
5202
|
try:
|
|
@@ -5106,10 +5225,21 @@ class Network_3D:
|
|
|
5106
5225
|
for com, nodes in coms.items():
|
|
5107
5226
|
heat_dict[com] = math.log(len(nodes)/rand_dens)
|
|
5108
5227
|
|
|
5228
|
+
try:
|
|
5229
|
+
shape = self.nodes.shape
|
|
5230
|
+
except:
|
|
5231
|
+
big_array = proximity.convert_centroids_to_array(list(self.node_centroids.values()))
|
|
5232
|
+
shape = [np.max(big_array[0, :]) + 1, np.max(big_array[1, :]) + 1, np.max(big_array[2, :]) + 1]
|
|
5233
|
+
|
|
5109
5234
|
from . import neighborhoods
|
|
5110
|
-
|
|
5235
|
+
if not numpy:
|
|
5236
|
+
neighborhoods.create_community_heatmap(heat_dict, self.communities, self.node_centroids, shape = shape, is_3d=is3d)
|
|
5237
|
+
|
|
5238
|
+
return heat_dict
|
|
5239
|
+
else:
|
|
5240
|
+
overlay = neighborhoods.create_community_heatmap(heat_dict, self.communities, self.node_centroids, shape = shape, is_3d=is3d, labeled_array = self.nodes)
|
|
5241
|
+
return heat_dict, overlay
|
|
5111
5242
|
|
|
5112
|
-
return heat_dict
|
|
5113
5243
|
|
|
5114
5244
|
def merge_node_ids(self, path, data):
|
|
5115
5245
|
|
|
@@ -5161,6 +5291,84 @@ class Network_3D:
|
|
|
5161
5291
|
pass
|
|
5162
5292
|
|
|
5163
5293
|
|
|
5294
|
+
def nearest_neighbors_avg(self, root, targ, xy_scale = 1, z_scale = 1, num = 1, heatmap = False, threed = True, numpy = False):
|
|
5295
|
+
|
|
5296
|
+
root_set = []
|
|
5297
|
+
|
|
5298
|
+
compare_set = []
|
|
5299
|
+
|
|
5300
|
+
if root is None:
|
|
5301
|
+
|
|
5302
|
+
root_set = list(self.node_centroids.keys())
|
|
5303
|
+
compare_set = root_set
|
|
5304
|
+
title = "Nearest Neighbors Between Nodes Heatmap"
|
|
5305
|
+
|
|
5306
|
+
else:
|
|
5307
|
+
|
|
5308
|
+
title = f"Nearest Neighbors of ID {targ} from ID {root} Heatmap"
|
|
5309
|
+
|
|
5310
|
+
for node, iden in self.node_identities.items():
|
|
5311
|
+
|
|
5312
|
+
if iden == root:
|
|
5313
|
+
|
|
5314
|
+
root_set.append(node)
|
|
5315
|
+
|
|
5316
|
+
elif (iden == targ) or (targ == 'All Others (Excluding Self)'):
|
|
5317
|
+
|
|
5318
|
+
compare_set.append(node)
|
|
5319
|
+
|
|
5320
|
+
if root == targ:
|
|
5321
|
+
|
|
5322
|
+
compare_set = root_set
|
|
5323
|
+
if len(compare_set) - 1 < num:
|
|
5324
|
+
|
|
5325
|
+
num = len(compare_set) - 1
|
|
5326
|
+
|
|
5327
|
+
print(f"Error: Not enough neighbor nodes for requested number of neighbors. Using max available neighbors: {num}")
|
|
5328
|
+
|
|
5329
|
+
|
|
5330
|
+
if len(compare_set) < num:
|
|
5331
|
+
|
|
5332
|
+
num = len(compare_set)
|
|
5333
|
+
|
|
5334
|
+
print(f"Error: Not enough neighbor nodes for requested number of neighbors. Using max available neighbors: {num}")
|
|
5335
|
+
|
|
5336
|
+
avg, output = proximity.average_nearest_neighbor_distances(self.node_centroids, root_set, compare_set, xy_scale=self.xy_scale, z_scale=self.z_scale, num = num)
|
|
5337
|
+
|
|
5338
|
+
if heatmap:
|
|
5339
|
+
|
|
5340
|
+
|
|
5341
|
+
from . import neighborhoods
|
|
5342
|
+
try:
|
|
5343
|
+
shape = self.nodes.shape
|
|
5344
|
+
except:
|
|
5345
|
+
big_array = proximity.convert_centroids_to_array(list(self.node_centroids.values()))
|
|
5346
|
+
shape = [np.max(big_array[0, :]) + 1, np.max(big_array[1, :]) + 1, np.max(big_array[2, :]) + 1]
|
|
5347
|
+
|
|
5348
|
+
pred = avg
|
|
5349
|
+
|
|
5350
|
+
node_intensity = {}
|
|
5351
|
+
import math
|
|
5352
|
+
node_centroids = {}
|
|
5353
|
+
|
|
5354
|
+
for node in root_set:
|
|
5355
|
+
node_intensity[node] = math.log(pred/output[node])
|
|
5356
|
+
node_centroids[node] = self.node_centroids[node]
|
|
5357
|
+
|
|
5358
|
+
if numpy:
|
|
5359
|
+
|
|
5360
|
+
overlay = neighborhoods.create_node_heatmap(node_intensity, node_centroids, shape = shape, is_3d=threed, labeled_array = self.nodes, colorbar_label="Clustering Intensity", title = title)
|
|
5361
|
+
|
|
5362
|
+
return avg, output, overlay
|
|
5363
|
+
|
|
5364
|
+
else:
|
|
5365
|
+
neighborhoods.create_node_heatmap(node_intensity, node_centroids, shape = shape, is_3d=threed, labeled_array = None, colorbar_label="Clustering Intensity", title = title)
|
|
5366
|
+
|
|
5367
|
+
return avg, output
|
|
5368
|
+
|
|
5369
|
+
|
|
5370
|
+
|
|
5371
|
+
|
|
5164
5372
|
|
|
5165
5373
|
|
|
5166
5374
|
|