nettracer3d 0.6.0__py3-none-any.whl → 0.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nettracer3d/nettracer.py +0 -23
- nettracer3d/nettracer_gui.py +211 -21
- nettracer3d/segmenter.py +670 -54
- {nettracer3d-0.6.0.dist-info → nettracer3d-0.6.1.dist-info}/METADATA +7 -3
- {nettracer3d-0.6.0.dist-info → nettracer3d-0.6.1.dist-info}/RECORD +9 -10
- nettracer3d/hub_getter.py +0 -248
- {nettracer3d-0.6.0.dist-info → nettracer3d-0.6.1.dist-info}/LICENSE +0 -0
- {nettracer3d-0.6.0.dist-info → nettracer3d-0.6.1.dist-info}/WHEEL +0 -0
- {nettracer3d-0.6.0.dist-info → nettracer3d-0.6.1.dist-info}/entry_points.txt +0 -0
- {nettracer3d-0.6.0.dist-info → nettracer3d-0.6.1.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: nettracer3d
|
|
3
|
-
Version: 0.6.
|
|
3
|
+
Version: 0.6.1
|
|
4
4
|
Summary: Scripts for intializing and analyzing networks from segmentations of three dimensional images.
|
|
5
5
|
Author-email: Liam McLaughlin <mclaughlinliam99@gmail.com>
|
|
6
6
|
Project-URL: User_Tutorial, https://www.youtube.com/watch?v=cRatn5VTWDY
|
|
@@ -44,6 +44,10 @@ NetTracer3D is free to use/fork for academic/nonprofit use so long as citation i
|
|
|
44
44
|
|
|
45
45
|
NetTracer3D was developed by Liam McLaughlin while working under Dr. Sanjay Jain at Washington University School of Medicine.
|
|
46
46
|
|
|
47
|
-
-- Version 0.6.
|
|
47
|
+
-- Version 0.6.1 updates --
|
|
48
48
|
|
|
49
|
-
1.
|
|
49
|
+
1. New feature for the machine learning segmenter. Now has a RAM lock mode which will always limit it to computing 1 chunk at a time, in both the interactive segmenter and the gross-segmenter. Feature map calculation within the chunk is made parallel to compensate which should allow this to function more optimally with RAM without really sacrificing performance. This should prevent the segmenter from majorly leaking memory in large arrays.
|
|
50
|
+
2. New function - 'Image' -> 'Select Objects'. Essentially just arbitrary selects/deselects lists of nodes or edges from the user in case there are some they are interested in but can't conveniently find. Allows imports from spreadsheets in case the user preorganizes some set of objects they want to select/deselect.
|
|
51
|
+
3. Brightness/Contrast now shades out of 65,535 instead of 255 which should allow better brightening options to images above 8bit depth.
|
|
52
|
+
4. Select all function updated to use the mini highlight overlay in larger images. Also reports the number of nodes/edges in the array in the cmd window when used.
|
|
53
|
+
5. Deleted the now unused 'hub_getter.py' script from the package.
|
|
@@ -1,21 +1,20 @@
|
|
|
1
1
|
nettracer3d/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
2
|
nettracer3d/community_extractor.py,sha256=Zq8ZM595CTzeR6zLEZ4I6KvhkNfCPUReWvAKxTlaVfk,33495
|
|
3
|
-
nettracer3d/hub_getter.py,sha256=KiNtxdajLkwB1ftslvrh1FE1Ch9ZCFEmHSEEotwR-To,8298
|
|
4
3
|
nettracer3d/modularity.py,sha256=V1f3s_vGd8EuVz27mzq6ycIGr0BWIpH7c7NU4QjgAHU,30247
|
|
5
4
|
nettracer3d/morphology.py,sha256=yQ0GuieMVXOQpaohZlPnkEXEuCUjf8Fg352axyK8nbM,10755
|
|
6
|
-
nettracer3d/nettracer.py,sha256=
|
|
7
|
-
nettracer3d/nettracer_gui.py,sha256=
|
|
5
|
+
nettracer3d/nettracer.py,sha256=AS3r7Wg3MWv-FARApRCrzyuzb0l0_h944EfXFI75lng,207912
|
|
6
|
+
nettracer3d/nettracer_gui.py,sha256=MSV0nod7qP_OP6PThRXdi4NB_dZFEo2-QZUqBBvpVmw,377440
|
|
8
7
|
nettracer3d/network_analysis.py,sha256=MJBBjslA1k_R8ymid77U-qGSgzxFVfzGVQhE0IdhnbE,48046
|
|
9
8
|
nettracer3d/network_draw.py,sha256=F7fw6Pcf4qWOhdKwLmhwqWdschbDlHzwCVolQC9imeU,14117
|
|
10
9
|
nettracer3d/node_draw.py,sha256=k3sCTfUCJs3aH1C1q1gTNxDz9EAQbBd1hsUIJajxRx8,9823
|
|
11
10
|
nettracer3d/proximity.py,sha256=FnIiI_AzfXd22HwCIFIyQRZxKYJ8YscIDdPnIv-wsO4,10560
|
|
12
11
|
nettracer3d/run.py,sha256=xYeaAc8FCx8MuzTGyL3NR3mK7WZzffAYAH23bNRZYO4,127
|
|
13
|
-
nettracer3d/segmenter.py,sha256=
|
|
12
|
+
nettracer3d/segmenter.py,sha256=YE4S2DJwLrvYpSzFkTKZLCyh14JKZmgDfGbB4_5EaU0,80163
|
|
14
13
|
nettracer3d/simple_network.py,sha256=fP1gkDdtQcHruEZpUdasKdZeVacoLOxKhR3bY0L1CAQ,15426
|
|
15
14
|
nettracer3d/smart_dilate.py,sha256=Kekm6YIVlJniMvJMG6_AwwNmCqK2l4Qtvg9VzzqPKMw,24600
|
|
16
|
-
nettracer3d-0.6.
|
|
17
|
-
nettracer3d-0.6.
|
|
18
|
-
nettracer3d-0.6.
|
|
19
|
-
nettracer3d-0.6.
|
|
20
|
-
nettracer3d-0.6.
|
|
21
|
-
nettracer3d-0.6.
|
|
15
|
+
nettracer3d-0.6.1.dist-info/LICENSE,sha256=gM207DhJjWrxLuEWXl0Qz5ISbtWDmADfjHp3yC2XISs,888
|
|
16
|
+
nettracer3d-0.6.1.dist-info/METADATA,sha256=XNg9JY1EOoiZ8g5hqcfxe4kjy4_amS8JWWsAUZyK8do,4111
|
|
17
|
+
nettracer3d-0.6.1.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
|
|
18
|
+
nettracer3d-0.6.1.dist-info/entry_points.txt,sha256=Nx1rr_0QhJXDBHAQg2vcqCzLMKBzSHfwy3xwGkueVyc,53
|
|
19
|
+
nettracer3d-0.6.1.dist-info/top_level.txt,sha256=zsYy9rZwirfCEOubolhee4TyzqBAL5gSUeFMzhFTX8c,12
|
|
20
|
+
nettracer3d-0.6.1.dist-info/RECORD,,
|
nettracer3d/hub_getter.py
DELETED
|
@@ -1,248 +0,0 @@
|
|
|
1
|
-
import networkx as nx
|
|
2
|
-
import pandas as pd
|
|
3
|
-
import numpy as np
|
|
4
|
-
import tifffile
|
|
5
|
-
from scipy import ndimage
|
|
6
|
-
from . import network_analysis
|
|
7
|
-
from . import node_draw
|
|
8
|
-
|
|
9
|
-
def upsample_with_padding(data, factor, original_shape):
|
|
10
|
-
# Upsample the input binary array while adding padding to match the original shape
|
|
11
|
-
|
|
12
|
-
# Get the dimensions of the original and upsampled arrays
|
|
13
|
-
original_shape = np.array(original_shape)
|
|
14
|
-
binary_array = zoom(data, factor, order=0)
|
|
15
|
-
upsampled_shape = np.array(binary_array.shape)
|
|
16
|
-
|
|
17
|
-
# Calculate the positive differences in dimensions
|
|
18
|
-
difference_dims = original_shape - upsampled_shape
|
|
19
|
-
|
|
20
|
-
# Calculate the padding amounts for each dimension
|
|
21
|
-
padding_dims = np.maximum(difference_dims, 0)
|
|
22
|
-
padding_before = padding_dims // 2
|
|
23
|
-
padding_after = padding_dims - padding_before
|
|
24
|
-
|
|
25
|
-
# Pad the binary array along each dimension
|
|
26
|
-
padded_array = np.pad(binary_array, [(padding_before[0], padding_after[0]),
|
|
27
|
-
(padding_before[1], padding_after[1]),
|
|
28
|
-
(padding_before[2], padding_after[2])], mode='constant', constant_values=0)
|
|
29
|
-
|
|
30
|
-
# Calculate the subtraction amounts for each dimension
|
|
31
|
-
sub_dims = np.maximum(-difference_dims, 0)
|
|
32
|
-
sub_before = sub_dims // 2
|
|
33
|
-
sub_after = sub_dims - sub_before
|
|
34
|
-
|
|
35
|
-
# Remove planes from the beginning and end
|
|
36
|
-
if sub_dims[0] == 0:
|
|
37
|
-
trimmed_planes = padded_array
|
|
38
|
-
else:
|
|
39
|
-
trimmed_planes = padded_array[sub_before[0]:-sub_after[0], :, :]
|
|
40
|
-
|
|
41
|
-
# Remove rows from the beginning and end
|
|
42
|
-
if sub_dims[1] == 0:
|
|
43
|
-
trimmed_rows = trimmed_planes
|
|
44
|
-
else:
|
|
45
|
-
trimmed_rows = trimmed_planes[:, sub_before[1]:-sub_after[1], :]
|
|
46
|
-
|
|
47
|
-
# Remove columns from the beginning and end
|
|
48
|
-
if sub_dims[2] == 0:
|
|
49
|
-
trimmed_array = trimmed_rows
|
|
50
|
-
else:
|
|
51
|
-
trimmed_array = trimmed_rows[:, :, sub_before[2]:-sub_after[2]]
|
|
52
|
-
|
|
53
|
-
return trimmed_array
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
def weighted_network(excel_file_path):
|
|
57
|
-
"""creates a network where the edges have weights proportional to the number of connections they make between the same structure"""
|
|
58
|
-
# Read the Excel file into a pandas DataFrame
|
|
59
|
-
master_list = read_excel_to_lists(excel_file_path)
|
|
60
|
-
|
|
61
|
-
# Create a graph
|
|
62
|
-
G = nx.Graph()
|
|
63
|
-
|
|
64
|
-
# Create a dictionary to store edge weights based on node pairs
|
|
65
|
-
edge_weights = {}
|
|
66
|
-
|
|
67
|
-
nodes_a = master_list[0]
|
|
68
|
-
nodes_b = master_list[1]
|
|
69
|
-
|
|
70
|
-
# Iterate over the DataFrame rows and update edge weights
|
|
71
|
-
for i in range(len(nodes_a)):
|
|
72
|
-
node1, node2 = nodes_a[i], nodes_b[i]
|
|
73
|
-
edge = (node1, node2) if node1 < node2 else (node2, node1) # Ensure consistent order
|
|
74
|
-
edge_weights[edge] = edge_weights.get(edge, 0) + 1
|
|
75
|
-
|
|
76
|
-
# Add edges to the graph with weights
|
|
77
|
-
for edge, weight in edge_weights.items():
|
|
78
|
-
G.add_edge(edge[0], edge[1], weight=weight)
|
|
79
|
-
|
|
80
|
-
return G, edge_weights
|
|
81
|
-
|
|
82
|
-
def read_excel_to_lists(file_path, sheet_name=0):
|
|
83
|
-
"""Convert a pd dataframe to lists"""
|
|
84
|
-
# Read the Excel file into a DataFrame without headers
|
|
85
|
-
df = pd.read_excel(file_path, header=None, sheet_name=sheet_name)
|
|
86
|
-
|
|
87
|
-
df = df.drop(0)
|
|
88
|
-
|
|
89
|
-
# Initialize an empty list to store the lists of values
|
|
90
|
-
data_lists = []
|
|
91
|
-
|
|
92
|
-
# Iterate over each column in the DataFrame
|
|
93
|
-
for column_name, column_data in df.items():
|
|
94
|
-
# Convert the column values to a list and append to the data_lists
|
|
95
|
-
data_lists.append(column_data.tolist())
|
|
96
|
-
|
|
97
|
-
master_list = [[], [], []]
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
for i in range(0, len(data_lists), 3):
|
|
101
|
-
|
|
102
|
-
master_list[0].extend(data_lists[i])
|
|
103
|
-
master_list[1].extend(data_lists[i+1])
|
|
104
|
-
|
|
105
|
-
try:
|
|
106
|
-
master_list[2].extend(data_lists[i+2])
|
|
107
|
-
except IndexError:
|
|
108
|
-
pass
|
|
109
|
-
|
|
110
|
-
return master_list
|
|
111
|
-
|
|
112
|
-
def labels_to_boolean(label_array, labels_list):
|
|
113
|
-
# Use np.isin to create a boolean array with a single operation
|
|
114
|
-
boolean_array = np.isin(label_array, labels_list)
|
|
115
|
-
|
|
116
|
-
return boolean_array
|
|
117
|
-
|
|
118
|
-
def get_hubs(nodepath, network, proportion = None, directory = None, centroids = None, gen_more_images = False):
|
|
119
|
-
|
|
120
|
-
if type(nodepath) == str:
|
|
121
|
-
nodepath = tifffile.imread(nodepath)
|
|
122
|
-
|
|
123
|
-
if len(np.unique(nodepath)) < 3:
|
|
124
|
-
|
|
125
|
-
structure_3d = np.ones((3, 3, 3), dtype=int)
|
|
126
|
-
nodepath, num_nodes = ndimage.label(nodepath, structure=structure_3d)
|
|
127
|
-
|
|
128
|
-
if type(network) == str:
|
|
129
|
-
G, weights = weighted_network(network)
|
|
130
|
-
else:
|
|
131
|
-
G = network
|
|
132
|
-
|
|
133
|
-
if proportion is None:
|
|
134
|
-
proportion = 0.10
|
|
135
|
-
print("Isolating top 0.10 high degree nodes by default. Specify 'proportion = 0.x' for custom node isolation.")
|
|
136
|
-
else:
|
|
137
|
-
print(f"Isolating top {proportion} high degree nodes")
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
node_list = list(G.nodes)
|
|
141
|
-
node_dict = {}
|
|
142
|
-
|
|
143
|
-
for node in node_list:
|
|
144
|
-
node_dict[node] = (G.degree(node))
|
|
145
|
-
|
|
146
|
-
# Calculate the number of top proportion% entries
|
|
147
|
-
num_items = len(node_dict)
|
|
148
|
-
num_top_10_percent = max(1, int(num_items * proportion)) # Ensure at least one item
|
|
149
|
-
|
|
150
|
-
# Sort the dictionary by values in descending order and get the top 10%
|
|
151
|
-
sorted_items = sorted(node_dict.items(), key=lambda item: item[1], reverse=True)
|
|
152
|
-
top_10_percent_items = sorted_items[:num_top_10_percent]
|
|
153
|
-
|
|
154
|
-
# Extract the keys from the top proportion% items
|
|
155
|
-
top_10_percent_keys = [key for key, value in top_10_percent_items]
|
|
156
|
-
|
|
157
|
-
masks = labels_to_boolean(nodepath, top_10_percent_keys)
|
|
158
|
-
|
|
159
|
-
masks = masks * nodepath #Makes it save with labels
|
|
160
|
-
|
|
161
|
-
# Convert boolean values to 0 and 255
|
|
162
|
-
#masks = masks.astype(np.uint8) * 255
|
|
163
|
-
|
|
164
|
-
if directory is None:
|
|
165
|
-
|
|
166
|
-
tifffile.imwrite("isolated_hubs.tif", masks)
|
|
167
|
-
print(f"Isolated hubs saved to isolated_hubs.tif")
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
else:
|
|
171
|
-
|
|
172
|
-
tifffile.imwrite(f"{directory}/isolated_hubs.tif", masks)
|
|
173
|
-
print(f"Isolated hubs saved to {directory}/isolated_hubs.tif")
|
|
174
|
-
|
|
175
|
-
if centroids is None:
|
|
176
|
-
for item in nodepath.shape:
|
|
177
|
-
if item < 5:
|
|
178
|
-
down_factor = 1
|
|
179
|
-
break
|
|
180
|
-
else:
|
|
181
|
-
down_factor = 5
|
|
182
|
-
|
|
183
|
-
centroids = network_analysis._find_centroids(masks, top_10_percent_keys, down_factor = down_factor)
|
|
184
|
-
|
|
185
|
-
degree_dict = {}
|
|
186
|
-
|
|
187
|
-
for node in top_10_percent_keys:
|
|
188
|
-
degree_dict[node] = G.degree(node)
|
|
189
|
-
|
|
190
|
-
labels = node_draw.degree_draw(degree_dict, centroids, masks)
|
|
191
|
-
|
|
192
|
-
if directory is None:
|
|
193
|
-
|
|
194
|
-
tifffile.imwrite("hub_degree_labels.tif", labels)
|
|
195
|
-
print("Node hub labels saved to hub_degree_labels.tif")
|
|
196
|
-
|
|
197
|
-
else:
|
|
198
|
-
tifffile.imwrite(f"{directory}/hub_degree_labels.tif", labels)
|
|
199
|
-
print(f"Node hub labels saved to {directory}/hub_degree_labels.tif")
|
|
200
|
-
|
|
201
|
-
masks = node_draw.degree_infect(degree_dict, masks)
|
|
202
|
-
|
|
203
|
-
if directory is None:
|
|
204
|
-
|
|
205
|
-
tifffile.imwrite("hub_degree_labels_grayscale.tif", masks)
|
|
206
|
-
print(f"Node hub grayscale labels saved to hub_degree_labels_grayscale.tif")
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
else:
|
|
210
|
-
tifffile.imwrite(f"{directory}/hub_degree_labels_grayscale.tif", masks)
|
|
211
|
-
print(f"Node hub grayscale labels saved to {directory}/hub_degree_labels_grayscale.tif")
|
|
212
|
-
|
|
213
|
-
return top_10_percent_keys
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
if __name__ == "__main__":
|
|
218
|
-
|
|
219
|
-
masks = input("Labelled nodes?: ")
|
|
220
|
-
outer_net = input('outer edges?: ')
|
|
221
|
-
|
|
222
|
-
masks = tifffile.imread(masks)
|
|
223
|
-
outer_G, weights = weighted_network(outer_net)
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
node_list = list(outer_G.nodes)
|
|
227
|
-
node_dict = {}
|
|
228
|
-
|
|
229
|
-
for node in node_list:
|
|
230
|
-
node_dict[node] = (outer_G.degree(node))
|
|
231
|
-
|
|
232
|
-
# Calculate the number of top 10% entries
|
|
233
|
-
num_items = len(node_dict)
|
|
234
|
-
num_top_10_percent = max(1, int(num_items * 0.10)) # Ensure at least one item
|
|
235
|
-
|
|
236
|
-
# Sort the dictionary by values in descending order and get the top 10%
|
|
237
|
-
sorted_items = sorted(node_dict.items(), key=lambda item: item[1], reverse=True)
|
|
238
|
-
top_10_percent_items = sorted_items[:num_top_10_percent]
|
|
239
|
-
|
|
240
|
-
# Extract the keys from the top 10% items
|
|
241
|
-
top_10_percent_keys = [key for key, value in top_10_percent_items]
|
|
242
|
-
|
|
243
|
-
mask2 = labels_to_boolean(masks, top_10_percent_keys)
|
|
244
|
-
|
|
245
|
-
# Convert boolean values to 0 and 255
|
|
246
|
-
mask2 = mask2.astype(np.uint8) * 255
|
|
247
|
-
|
|
248
|
-
tifffile.imwrite("isolated_vertices.tif", mask2)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|