nettracer3d 0.2.5__py3-none-any.whl → 0.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -36,6 +36,52 @@ def compute_centroid(binary_stack, label):
36
36
 
37
37
  return centroid
38
38
 
39
+ def create_bar_graph(data_dict, title, x_label, y_label, directory=None):
40
+ """
41
+ Create a bar graph from a dictionary where keys are bar names and values are heights.
42
+
43
+ Parameters:
44
+ data_dict (dict): Dictionary with bar names as keys and heights as values
45
+ title (str): Title of the graph
46
+ x_label (str): Label for x-axis
47
+ y_label (str): Label for y-axis
48
+ directory (str, optional): Directory path to save the plot. If None, plot is not saved
49
+ """
50
+ import matplotlib.pyplot as plt
51
+
52
+ # Create figure and axis
53
+ plt.figure(figsize=(10, 6))
54
+
55
+ # Create bars
56
+ plt.bar(list(data_dict.keys()), list(data_dict.values()))
57
+
58
+ # Add labels and title
59
+ plt.title(title)
60
+ plt.xlabel(x_label)
61
+ plt.ylabel(y_label)
62
+
63
+ # Rotate x-axis labels if there are many bars
64
+ plt.xticks(rotation=45, ha='right')
65
+
66
+ # Adjust layout to prevent label cutoff
67
+ plt.tight_layout()
68
+
69
+ try:
70
+
71
+ # Save plot if directory is specified
72
+ if directory:
73
+ plt.savefig(f"{directory}/bar_graph.png")
74
+
75
+ except:
76
+ pass
77
+
78
+ try:
79
+
80
+ # Display the plot
81
+ plt.show()
82
+ except:
83
+ pass
84
+
39
85
  def open_network(excel_file_path):
40
86
  """opens an unweighted network from the network excel file"""
41
87
 
@@ -836,7 +882,10 @@ def radial_analysis(nodes, network, rad_dist, xy_scale = None, z_scale = None, c
836
882
  dist_list = get_distance_list(centroids, network, xy_scale, z_scale)
837
883
  x_vals, y_vals = buckets(dist_list, num_objects, rad_dist, directory = directory)
838
884
  histogram(x_vals, y_vals, directory = directory)
839
- return dist_list
885
+ output = {}
886
+ for i in range(len(x_vals)):
887
+ output[y_vals[i]] = x_vals[i]
888
+ return output
840
889
 
841
890
  def buckets(dists, num_objects, rad_dist, directory = None):
842
891
  y_vals = []
@@ -859,13 +908,17 @@ def buckets(dists, num_objects, rad_dist, directory = None):
859
908
  data = {'Radial Distance From Any Node': y_vals, 'Average Number of Neighboring Nodes': x_vals}
860
909
  df = pd.DataFrame(data)
861
910
 
862
- if directory is None:
863
- # Save the DataFrame to an Excel file
864
- df.to_excel('radial_distribution.xlsx', index=False)
865
- print("Radial distribution saved to radial_distribution.xlsx")
866
- else:
867
- df.to_excel(f'{directory}/radial_distribution.xlsx', index=False)
868
- print(f"Radial distribution saved to {directory}/radial_distribution.xlsx")
911
+ try:
912
+
913
+ if directory is None:
914
+ # Save the DataFrame to an Excel file
915
+ df.to_excel('radial_distribution.xlsx', index=False)
916
+ print("Radial distribution saved to radial_distribution.xlsx")
917
+ else:
918
+ df.to_excel(f'{directory}/radial_distribution.xlsx', index=False)
919
+ print(f"Radial distribution saved to {directory}/radial_distribution.xlsx")
920
+ except:
921
+ pass
869
922
 
870
923
  return x_vals, y_vals
871
924
 
@@ -881,8 +934,14 @@ def histogram(counts, y_vals, directory = None):
881
934
  plt.xlabel('Distance from any node')
882
935
  plt.ylabel('Avg Number of Neigbhoring Vertices')
883
936
 
884
- if directory is not None:
885
- plt.savefig(f'{directory}/radial_plot.png')
937
+ try:
938
+
939
+ if directory is not None:
940
+ plt.savefig(f'{directory}/radial_plot.png')
941
+ else:
942
+ plt.savefig('radial_plot.png')
943
+ except:
944
+ pass
886
945
 
887
946
  # Show the plot
888
947
  plt.show()
@@ -1090,8 +1149,8 @@ def edge_to_node(network, node_identities = None):
1090
1149
  alledges = set(edgesC)
1091
1150
 
1092
1151
  for i in range(len(edgesC)):
1093
- newpair1 = [nodesA[i], edgesC[i], None]
1094
- newpair2 = [edgesC[i], nodesB[i], None]
1152
+ newpair1 = [nodesA[i], edgesC[i], 0]
1153
+ newpair2 = [edgesC[i], nodesB[i], 0]
1095
1154
  new_network.append(newpair1)
1096
1155
  new_network.append(newpair2)
1097
1156
 
@@ -1140,7 +1199,7 @@ def rand_net_weighted(num_rows, num_nodes, nodes):
1140
1199
  while random_partner == nodes[i]:
1141
1200
  random_partner = random.randint(0, len(nodes)-1)
1142
1201
  random_partner = nodes[random_partner]
1143
- random_pair = [nodes[i], random_partner, None]
1202
+ random_pair = [nodes[i], random_partner, 0]
1144
1203
  random_network.append(random_pair)
1145
1204
  k+= 1
1146
1205
  if k == num_rows:
@@ -1152,7 +1211,7 @@ def rand_net_weighted(num_rows, num_nodes, nodes):
1152
1211
 
1153
1212
  G, edge_weights = weighted_network(df)
1154
1213
 
1155
- return G
1214
+ return G, df
1156
1215
 
1157
1216
  def rand_net(num_rows, num_nodes, nodes):
1158
1217
  random_network = []
@@ -1167,10 +1226,10 @@ def rand_net(num_rows, num_nodes, nodes):
1167
1226
 
1168
1227
  # Check if the random partner is different from the current node
1169
1228
  # and if the pair is not already in the network
1170
- if random_partner != nodes[i] and [nodes[i], random_partner, None] not in random_network and [random_partner, nodes[i], None] not in random_network:
1229
+ if random_partner != nodes[i] and [nodes[i], random_partner, 0] not in random_network and [random_partner, nodes[i], 0] not in random_network:
1171
1230
  break
1172
1231
 
1173
- random_pair = [nodes[i], random_partner, None]
1232
+ random_pair = [nodes[i], random_partner, 0]
1174
1233
  random_network.append(random_pair)
1175
1234
  k += 1
1176
1235
 
@@ -1187,7 +1246,7 @@ def rand_net(num_rows, num_nodes, nodes):
1187
1246
 
1188
1247
  G, edge_weights = weighted_network(df)
1189
1248
 
1190
- return G
1249
+ return G, df
1191
1250
 
1192
1251
  def generate_random(G, net_lists, weighted = True):
1193
1252
 
@@ -1250,13 +1309,17 @@ def degree_distribution(G, directory = None):
1250
1309
  'Proportion of nodes with degree (p(k))': proportion_list
1251
1310
  })
1252
1311
 
1253
- if directory is None:
1254
- # Save the DataFrame to an Excel file
1255
- df.to_excel('degree_dist.xlsx', index=False)
1256
- print("Degree distribution saved to degree_dist.xlsx")
1257
- else:
1258
- df.to_excel(f'{directory}/degree_dist.xlsx', index=False)
1259
- print(f"Degree distribution saved to {directory}/degree_dist.xlsx")
1312
+ try:
1313
+
1314
+ if directory is None:
1315
+ # Save the DataFrame to an Excel file
1316
+ df.to_excel('degree_dist.xlsx', index=False)
1317
+ print("Degree distribution saved to degree_dist.xlsx")
1318
+ else:
1319
+ df.to_excel(f'{directory}/degree_dist.xlsx', index=False)
1320
+ print(f"Degree distribution saved to {directory}/degree_dist.xlsx")
1321
+ except:
1322
+ pass
1260
1323
 
1261
1324
 
1262
1325
  power_trendline(degrees, proportion_list, directory = directory)
@@ -1310,8 +1373,15 @@ def power_trendline(x, y, directory = None):
1310
1373
  verticalalignment='top'
1311
1374
  )
1312
1375
 
1313
- if directory is not None:
1314
- plt.savefig(f'{directory}/degree_plot.png')
1376
+ try:
1377
+
1378
+ if directory is not None:
1379
+ plt.savefig(f'{directory}/degree_plot.png')
1380
+ else:
1381
+ plt.savefig('degree_plot.png')
1382
+ except:
1383
+ pass
1384
+
1315
1385
 
1316
1386
  plt.show()
1317
1387
 
nettracer3d/node_draw.py CHANGED
@@ -139,7 +139,9 @@ def degree_draw(degree_dict, centroid_dict, nodes):
139
139
  draw_array = np.zeros_like(nodes, dtype=np.uint8)
140
140
  #font_size = 24
141
141
 
142
- for node, degree in degree_dict.items():
142
+ for node in centroid_dict:
143
+
144
+ degree = degree_dict[node]
143
145
  z, y, x = centroid_dict[node].astype(int)
144
146
 
145
147
  try:
nettracer3d/proximity.py CHANGED
@@ -6,6 +6,7 @@ from scipy.spatial import KDTree
6
6
  import concurrent.futures
7
7
  import multiprocessing as mp
8
8
  import pandas as pd
9
+ from typing import Dict, Union, Tuple, List, Optional
9
10
 
10
11
 
11
12
  # Related to morphological border searching:
@@ -74,7 +75,7 @@ def _get_node_node_dict(label_array, label, dilate_xy, dilate_z):
74
75
 
75
76
  # Create a boolean mask where elements with the specified label are True
76
77
  binary_array = label_array == label
77
- binary_array = nettracer.dilate_3D(binary_array, dilate_xy, dilate_xy, dilate_z) #Dilate the label to see where the dilated label overlaps
78
+ binary_array = nettracer.dilate_3D_recursive(binary_array, dilate_xy, dilate_xy, dilate_z) #Dilate the label to see where the dilated label overlaps
78
79
  label_array = label_array * binary_array # Filter the labels by the node in question
79
80
  label_array = label_array.flatten() # Convert 3d array to 1d array
80
81
  label_array = nettracer.remove_zeros(label_array) # Remove zeros
@@ -95,22 +96,26 @@ def process_label(args):
95
96
  return label, sub_nodes
96
97
 
97
98
 
98
- def create_node_dictionary(nodes, num_nodes, dilate_xy, dilate_z):
99
+ def create_node_dictionary(nodes, num_nodes, dilate_xy, dilate_z, targets = None):
99
100
  """Internal method used for the secondary algorithm to process nodes in parallel."""
100
101
  # Initialize the dictionary to be returned
101
102
  node_dict = {}
102
103
 
103
104
  array_shape = nodes.shape
104
105
 
106
+
105
107
  # Use ThreadPoolExecutor for parallel execution
106
108
  with ThreadPoolExecutor(max_workers=mp.cpu_count()) as executor:
107
109
  # First parallel section to process labels
108
110
  # List of arguments for each parallel task
109
111
  args_list = [(nodes, i, dilate_xy, dilate_z, array_shape) for i in range(1, num_nodes + 1)]
110
112
 
111
- # Execute parallel tasks to process labels
113
+ if targets is not None:
114
+ args_list = [tup for tup in args_list if tup[1] in targets]
115
+
112
116
  results = executor.map(process_label, args_list)
113
117
 
118
+
114
119
  # Second parallel section to create dictionary entries
115
120
  for label, sub_nodes in results:
116
121
  executor.submit(create_dict_entry, node_dict, label, sub_nodes, dilate_xy, dilate_z)
@@ -182,15 +187,19 @@ def find_neighbors_kdtree(array, radius, targets=None):
182
187
  if targets is None:
183
188
  # Original behavior: find neighbors for all points
184
189
  query_points = points
190
+ query_indices = range(len(points)) # Add this line
185
191
  else:
186
192
  # Find coordinates of target values
187
193
  target_points = []
194
+ target_indices = [] # Add this line
188
195
  for idx, point in enumerate(points):
189
196
  if array[tuple(point)] in targets:
190
197
  target_points.append(point)
198
+ target_indices.append(idx) # Add this line
191
199
 
192
200
  # Convert to numpy array for querying
193
201
  query_points = np.array(target_points)
202
+ query_indices = target_indices # Add this line
194
203
 
195
204
  # Handle case where no target values were found
196
205
  if len(query_points) == 0:
@@ -203,14 +212,14 @@ def find_neighbors_kdtree(array, radius, targets=None):
203
212
  output = []
204
213
 
205
214
  # Generate pairs
206
- for query_idx, neighbors in enumerate(neighbor_indices):
215
+ for i, neighbors in enumerate(neighbor_indices):
216
+ query_idx = query_indices[i] # Modified this line
207
217
  for neighbor_idx in neighbors:
208
218
  # Skip self-pairing
209
219
  if neighbor_idx != query_idx:
210
220
  query_value = array[tuple(points[query_idx])]
211
221
  neighbor_value = array[tuple(points[neighbor_idx])]
212
222
  output.append([query_value, neighbor_value, 0])
213
-
214
223
 
215
224
  return output
216
225
 
@@ -225,4 +234,55 @@ def extract_pairwise_connections(connections):
225
234
  output.append([list_index_value, number, 0])
226
235
  print(f'sublist: {sublist}, adding: {[list_index_value, number, 0]}')
227
236
 
228
- return output
237
+ return output
238
+
239
+
240
+
241
+ #voronois:
242
+ def create_voronoi_3d_kdtree(centroids: Dict[Union[int, str], Union[Tuple[int, int, int], List[int]]],
243
+ shape: Optional[Tuple[int, int, int]] = None) -> np.ndarray:
244
+ """
245
+ Create a 3D Voronoi diagram using scipy's KDTree for faster computation.
246
+
247
+ Args:
248
+ centroids: Dictionary with labels as keys and (z,y,x) coordinates as values
249
+ shape: Optional tuple of (Z,Y,X) dimensions. If None, calculated from centroids
250
+
251
+ Returns:
252
+ 3D numpy array where each cell contains the label of the closest centroid as uint32
253
+ """
254
+ from scipy.spatial import cKDTree
255
+
256
+ # Convert string labels to integers if necessary
257
+ if any(isinstance(k, str) for k in centroids.keys()):
258
+ label_map = {label: idx for idx, label in enumerate(centroids.keys())}
259
+ centroids = {label_map[k]: v for k, v in centroids.items()}
260
+
261
+ # Convert centroids to array and keep track of labels
262
+ labels = np.array(list(centroids.keys()), dtype=np.uint32)
263
+ centroid_points = np.array([centroids[label] for label in labels])
264
+
265
+ # Calculate shape if not provided
266
+ if shape is None:
267
+ max_coords = centroid_points.max(axis=0)
268
+ shape = tuple(max_coord + 1 for max_coord in max_coords)
269
+
270
+ # Create KD-tree
271
+ tree = cKDTree(centroid_points)
272
+
273
+ # Create coordinate arrays
274
+ coords = np.array(np.meshgrid(
275
+ np.arange(shape[0]),
276
+ np.arange(shape[1]),
277
+ np.arange(shape[2]),
278
+ indexing='ij'
279
+ )).reshape(3, -1).T
280
+
281
+ # Find nearest centroid for each point
282
+ _, indices = tree.query(coords)
283
+
284
+ # Convert indices to labels and ensure uint32 dtype
285
+ label_array = labels[indices].astype(np.uint32)
286
+
287
+ # Reshape to final shape
288
+ return label_array.reshape(shape)
@@ -237,21 +237,6 @@ def show_simple_network(excel_file_path, geometric = False, geo_info = None, dir
237
237
  # Add edges from the DataFrame
238
238
  G.add_edges_from(edges)
239
239
 
240
- # Print basic information about the graph
241
- num_nodes = G.number_of_nodes()
242
- num_edge = G.number_of_edges()
243
-
244
- print("Number of nodes:", num_nodes)
245
- print("Number of edges:", num_edge)
246
-
247
- # Calculate the average degree connectivity
248
- average_degree_connectivity = nx.average_degree_connectivity(G)
249
- print("Average degree connectivity:", average_degree_connectivity)
250
-
251
- # Calculate the average number of edges attached to a node
252
- average_edges_per_node = num_nodes/num_edge
253
- print("Average edges per node:", average_edges_per_node)
254
-
255
240
  if geometric:
256
241
  for node in list(G.nodes()):
257
242
  if node not in geo_info[0]:
@@ -272,12 +257,10 @@ def show_simple_network(excel_file_path, geometric = False, geo_info = None, dir
272
257
  plt.show()
273
258
 
274
259
 
275
- def show_identity_network(excel_file_path, node_identities, geometric = False, geo_info = None, directory = None):
276
-
260
+ def show_identity_network(excel_file_path, node_identities, geometric=False, geo_info=None, directory=None):
277
261
  if type(node_identities) == str:
278
262
  # Read the Excel file into a DataFrame
279
263
  df = pd.read_excel(node_identities)
280
-
281
264
  # Convert the DataFrame to a dictionary
282
265
  identity_dict = pd.Series(df.iloc[:, 1].values, index=df.iloc[:, 0]).to_dict()
283
266
  else:
@@ -289,74 +272,101 @@ def show_identity_network(excel_file_path, node_identities, geometric = False, g
289
272
  master_list = excel_file_path
290
273
 
291
274
  edges = zip(master_list[0], master_list[1])
292
-
275
+
293
276
  # Create a graph
294
277
  G = nx.Graph()
295
-
296
- # Add edges from the lists
297
278
  G.add_edges_from(edges)
298
279
 
299
- # Print basic information about the graph
300
- num_nodes = G.number_of_nodes()
301
- num_edge = G.number_of_edges()
302
-
303
- print("Number of nodes:", num_nodes)
304
- print("Number of edges:", num_edge)
305
-
306
- # Calculate the average number of edges attached to a node
307
- average_edges_per_node = num_edge / num_nodes
308
- print("Average edges per node:", average_edges_per_node)
309
-
310
- # Calculate the average degree connectivity
311
- average_degree_connectivity = nx.average_degree_connectivity(G)
312
- print("Average degree connectivity:", average_degree_connectivity)
313
-
314
- # Create a color map based on the categories in identity_dict
280
+ # Create a more sophisticated color palette using a combination of colormap sequences
315
281
  unique_categories = list(set(identity_dict.values()))
316
- color_map = {category: mcolors.to_hex(plt.cm.tab10(i / len(unique_categories))[:3]) for i, category in enumerate(unique_categories)}
317
-
318
- node_dict = {}
319
- edges_list = []
320
- nodes_list = []
321
-
322
- for node in list(G.nodes()):
323
- if identity_dict[node] == 'Edge':
324
- node_dict[node] = 30
325
- edges_list.append(node)
326
- else:
327
- node_dict[node] = 100
328
- nodes_list.append(node)
329
-
330
- node_sizes_list = [node_dict[node] for node in G.nodes()]
331
-
332
- # Visualize the graph with different node colors based on categories
282
+ num_categories = len(unique_categories)
283
+
284
+ # Create a color palette that combines multiple colormaps for more distinct colors
285
+ if num_categories <= 10:
286
+ colors = plt.cm.tab10(np.linspace(0, 1, num_categories))
287
+ elif num_categories <= 20:
288
+ colors1 = plt.cm.tab20(np.linspace(0, 1, min(num_categories, 20)))
289
+ colors = colors1[:num_categories]
290
+ else:
291
+ # For large number of categories, combine multiple distinct colormaps
292
+ colors1 = plt.cm.tab20(np.linspace(0, 1, 20))
293
+ colors2 = plt.cm.Set3(np.linspace(0, 1, 12))
294
+ colors3 = plt.cm.Pastel1(np.linspace(0, 1, 9))
295
+ colors4 = plt.cm.Paired(np.linspace(0, 1, 12))
296
+
297
+ # Combine and take needed number of colors
298
+ all_colors = np.vstack([colors1, colors2, colors3, colors4])
299
+ # Shuffle the colors to ensure adjacent categories have distinct colors
300
+ np.random.seed(42) # For consistency
301
+ np.random.shuffle(all_colors)
302
+ colors = all_colors[:num_categories]
303
+
304
+ color_map = {category: mcolors.to_hex(color[:3])
305
+ for category, color in zip(unique_categories, colors)}
306
+
307
+ # Node size handling
308
+ node_dict = {node: 30 if identity_dict[node] == 'Edge' else 100
309
+ for node in G.nodes()}
310
+
333
311
  if geometric:
312
+ # Handle geometric positioning
334
313
  for node in list(G.nodes()):
335
314
  if node not in geo_info[0]:
336
315
  G.remove_node(node)
337
- print(f"Removing node {node} from network visualization (no centroid - likely due to downsampling when finding centroids)")
338
-
339
- pos, z_pos = geometric_positions(geo_info[0], geo_info[1])
316
+ print(f"Removing node {node} from network visualization "
317
+ f"(no centroid - likely due to downsampling when finding centroids)")
318
+
319
+ pos, z_pos = geometric_positions(geo_info[0], geo_info[1])
340
320
  node_sizes_list = [z_pos[node] for node in G.nodes()]
341
- # Assign a color to each node based on its category
342
- node_colors = [color_map[identity_dict[node]] for node in G.nodes]
343
- nx.draw(G, pos, with_labels=True, font_color='black', font_weight='bold', node_size= node_sizes_list, node_color=node_colors, alpha=0.8, font_size = 12)
344
321
  else:
345
- # Assign a color to each node based on its category
346
- node_colors = [color_map[identity_dict[node]] for node in G.nodes]
347
322
  pos = nx.spring_layout(G)
348
- nx.draw(G, pos, with_labels=True, font_color='black', font_weight='bold', node_size= 100, node_color=node_colors, alpha=0.8, font_size = 12)
349
-
350
- # Create custom legend handles
351
- legend_handles = [Patch(color=color, label=category) for category, color in color_map.items()]
352
-
353
- # Add legend to the plot
354
- plt.legend(handles=legend_handles, loc='upper right', title='Categories')
323
+ node_sizes_list = [node_dict[node] for node in G.nodes()]
355
324
 
325
+ # Create figure with custom size
326
+ plt.figure(figsize=(12, 8))
327
+
328
+ # Create separate axes for the graph and legend
329
+ graph_ax = plt.gca()
330
+
331
+ # Draw the network
332
+ node_colors = [color_map[identity_dict[node]] for node in G.nodes()]
333
+ nx.draw(G, pos, ax=graph_ax, with_labels=True, font_color='black',
334
+ font_weight='bold', node_size=node_sizes_list,
335
+ node_color=node_colors, alpha=0.8, font_size=12)
336
+
337
+ # Create custom legend with multiple columns if needed
338
+ legend_handles = [Patch(color=color, label=category)
339
+ for category, color in color_map.items()]
340
+
341
+ # Adjust number of columns based on number of categories
342
+ if len(unique_categories) > 20:
343
+ ncol = 3
344
+ bbox_to_anchor = (1.2, 1)
345
+ elif len(unique_categories) > 10:
346
+ ncol = 2
347
+ bbox_to_anchor = (1.1, 1)
348
+ else:
349
+ ncol = 1
350
+ bbox_to_anchor = (1.05, 1)
351
+
352
+ # Add legend with adjusted parameters
353
+ legend = plt.legend(handles=legend_handles,
354
+ bbox_to_anchor=bbox_to_anchor,
355
+ loc='upper left',
356
+ title='Categories',
357
+ ncol=ncol,
358
+ fontsize='small',
359
+ title_fontsize='medium')
360
+
361
+ # Adjust layout to prevent legend overlap
362
+ plt.tight_layout()
363
+
364
+ # Save if directory provided
356
365
  if directory is not None:
357
- plt.savefig(f'{directory}/identity_network_plot.png')
358
-
359
- # Display the graph
366
+ plt.savefig(f'{directory}/identity_network_plot.png',
367
+ bbox_inches='tight',
368
+ dpi=300)
369
+
360
370
  plt.show()
361
371
 
362
372
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: nettracer3d
3
- Version: 0.2.5
3
+ Version: 0.2.7
4
4
  Summary: Scripts for intializing and analyzing networks from segmentations of three dimensional images.
5
5
  Author-email: Liam McLaughlin <boom2449@gmail.com>
6
6
  Project-URL: User_Manual, https://drive.google.com/drive/folders/1fTkz3n4LN9_VxKRKC8lVQSlrz_wq0bVn?usp=drive_link
@@ -0,0 +1,18 @@
1
+ nettracer3d/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ nettracer3d/community_extractor.py,sha256=DaDKwb6UuqQWVS0kue4de_HtXxODTvTYRgwZiinEAlU,26632
3
+ nettracer3d/hub_getter.py,sha256=KiNtxdajLkwB1ftslvrh1FE1Ch9ZCFEmHSEEotwR-To,8298
4
+ nettracer3d/modularity.py,sha256=V1f3s_vGd8EuVz27mzq6ycIGr0BWIpH7c7NU4QjgAHU,30247
5
+ nettracer3d/morphology.py,sha256=5cSoLVy0i6NNhHW6s1y4vxQZvY5afKmDbc-kG8Zvh4Q,13204
6
+ nettracer3d/nettracer.py,sha256=xS0jv0Vlmr6pyGcgrEk9Hve5MKtGM5GMkTjgQlk-1gc,202853
7
+ nettracer3d/nettracer_gui.py,sha256=9i69FuTvWfnA1KYk-wCocWpkvA8_3SJiwX28CgaPC24,272740
8
+ nettracer3d/network_analysis.py,sha256=bT9luJ9uRbdw-KhVNeElLAI3MhXP4kuEpwcvCtslPR8,43849
9
+ nettracer3d/network_draw.py,sha256=JWWEX7zT6Y9fcO75TtBwkGpPKFIkvBy8pfyB3YB-H_E,12599
10
+ nettracer3d/node_draw.py,sha256=AL8KfFNYBybOx4q6y2pGsAD4QdMebnS-FGRVTqDa0tA,8234
11
+ nettracer3d/proximity.py,sha256=KYs4QUbt1U79RLzTvt8BmrxeGVaeKOQ2brtzTjjA78c,11011
12
+ nettracer3d/simple_network.py,sha256=fP1gkDdtQcHruEZpUdasKdZeVacoLOxKhR3bY0L1CAQ,15426
13
+ nettracer3d/smart_dilate.py,sha256=howfO6Lw5PxNjkaOBSCjkmf7fyau_-_8iTct2mAuTAQ,22083
14
+ nettracer3d-0.2.7.dist-info/LICENSE,sha256=gM207DhJjWrxLuEWXl0Qz5ISbtWDmADfjHp3yC2XISs,888
15
+ nettracer3d-0.2.7.dist-info/METADATA,sha256=jZs7zfReDebDgdZSAG6_oDHLWR-jtBWaLL4rLobjNjk,2258
16
+ nettracer3d-0.2.7.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
17
+ nettracer3d-0.2.7.dist-info/top_level.txt,sha256=zsYy9rZwirfCEOubolhee4TyzqBAL5gSUeFMzhFTX8c,12
18
+ nettracer3d-0.2.7.dist-info/RECORD,,
@@ -1,18 +0,0 @@
1
- nettracer3d/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- nettracer3d/community_extractor.py,sha256=nehyfOkhmMo5CfIEQUijrnNzbIZ04_PfUCATa4Uwusg,20982
3
- nettracer3d/hub_getter.py,sha256=L10HkXYAkAA2ZgvZe50z3AOmvBkcsTyRPcQCe4fjehk,8273
4
- nettracer3d/modularity.py,sha256=On1qI0gNv2UinVeQTYM3hthbBvhXSTxwlCrDMJxVmjA,21381
5
- nettracer3d/morphology.py,sha256=rgV8zkVks9t7qBqQTj1HCq22_CiQIzBSRqZmfRs3xCE,8593
6
- nettracer3d/nettracer.py,sha256=3TFLDjWLWh9aKZBXtn-YGVOm-ut0kyu_XyUzGEbnYfQ,184479
7
- nettracer3d/nettracer_gui.py,sha256=SzmqZDue8T2nv--UO225AgNxaFFTO24BO_jaPgm-pow,214471
8
- nettracer3d/network_analysis.py,sha256=uFEogY1IHVnDQ1iMmRPOy7LPpv3LVy5i4325KyNdVBY,42191
9
- nettracer3d/network_draw.py,sha256=JWWEX7zT6Y9fcO75TtBwkGpPKFIkvBy8pfyB3YB-H_E,12599
10
- nettracer3d/node_draw.py,sha256=4xZHBQQYxJvlAQUkRSTPyD2-Z8LriZ7_HqFpkb7fvz4,8210
11
- nettracer3d/proximity.py,sha256=e7IW2flA0sPrU6TVmTRqVFtV-QoZb4PPXLfGpRmK6Bw,8735
12
- nettracer3d/simple_network.py,sha256=0DwHYvaRcCbAxlTB8QbBkoOqpxr7hRs0H1O2ThoNU_4,15075
13
- nettracer3d/smart_dilate.py,sha256=howfO6Lw5PxNjkaOBSCjkmf7fyau_-_8iTct2mAuTAQ,22083
14
- nettracer3d-0.2.5.dist-info/LICENSE,sha256=gM207DhJjWrxLuEWXl0Qz5ISbtWDmADfjHp3yC2XISs,888
15
- nettracer3d-0.2.5.dist-info/METADATA,sha256=NVFX0u4rKFnfYY7BBjBjJMrQBHlRWcGbiF5-4SX4Lj0,2258
16
- nettracer3d-0.2.5.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
17
- nettracer3d-0.2.5.dist-info/top_level.txt,sha256=zsYy9rZwirfCEOubolhee4TyzqBAL5gSUeFMzhFTX8c,12
18
- nettracer3d-0.2.5.dist-info/RECORD,,