nettracer3d 0.8.0__py3-none-any.whl → 0.8.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nettracer3d/proximity.py CHANGED
@@ -10,7 +10,8 @@ import pandas as pd
10
10
  import matplotlib.pyplot as plt
11
11
  from typing import Dict, Union, Tuple, List, Optional
12
12
  from collections import defaultdict
13
-
13
+ from multiprocessing import Pool, cpu_count
14
+ import functools
14
15
 
15
16
  # Related to morphological border searching:
16
17
 
@@ -200,7 +201,103 @@ def populate_array(centroids, clip=False):
200
201
  else:
201
202
  return array
202
203
 
203
- def find_neighbors_kdtree(radius, centroids=None, array=None, targets=None):
204
+ def _process_chunk_centroids(args):
205
+ """Process a chunk of neighbor indices for centroids mode"""
206
+ chunk_data, idx_to_node, query_indices, tree, points, max_neighbors = args
207
+ output = []
208
+
209
+ for i, neighbors in chunk_data:
210
+ query_idx = query_indices[i]
211
+ query_value = idx_to_node[query_idx]
212
+ query_point = points[query_idx]
213
+
214
+ # Filter out self-reference
215
+ filtered_neighbors = [n for n in neighbors if n != query_idx]
216
+
217
+ # If max_neighbors is specified and we have more neighbors than allowed
218
+ if max_neighbors is not None and len(filtered_neighbors) > max_neighbors:
219
+ # Use KDTree to get distances efficiently - query for more than we need
220
+ # to ensure we get the exact closest ones
221
+ k = min(len(filtered_neighbors), max_neighbors + 1) # +1 in case query point is included
222
+ distances, indices = tree.query(query_point, k=k)
223
+
224
+ # Filter out self and limit to max_neighbors
225
+ selected_neighbors = []
226
+ for dist, idx in zip(distances, indices):
227
+ if idx != query_idx and idx in filtered_neighbors:
228
+ selected_neighbors.append(idx)
229
+ if len(selected_neighbors) >= max_neighbors:
230
+ break
231
+
232
+ filtered_neighbors = selected_neighbors
233
+
234
+ # Add all selected neighbors to output
235
+ for neighbor_idx in filtered_neighbors:
236
+ neighbor_value = idx_to_node[neighbor_idx]
237
+ output.append([query_value, neighbor_value, 0])
238
+
239
+ return output
240
+
241
+ def _process_chunk_array(args):
242
+ """Process a chunk of neighbor indices for array mode"""
243
+ chunk_data, array, point_tuples, query_indices, tree, points, max_neighbors = args
244
+ output = []
245
+
246
+ for i, neighbors in chunk_data:
247
+ query_idx = query_indices[i]
248
+ query_value = array[point_tuples[query_idx]]
249
+ query_point = points[query_idx]
250
+
251
+ # Filter out self-reference
252
+ filtered_neighbors = [n for n in neighbors if n != query_idx]
253
+
254
+ # If max_neighbors is specified and we have more neighbors than allowed
255
+ if max_neighbors is not None and len(filtered_neighbors) > max_neighbors:
256
+ # Use KDTree to get distances efficiently - query for more than we need
257
+ # to ensure we get the exact closest ones
258
+ k = min(len(filtered_neighbors), max_neighbors + 1) # +1 in case query point is included
259
+ distances, indices = tree.query(query_point, k=k)
260
+
261
+ # Filter out self and limit to max_neighbors
262
+ selected_neighbors = []
263
+ for dist, idx in zip(distances, indices):
264
+ if idx != query_idx and idx in filtered_neighbors:
265
+ selected_neighbors.append(idx)
266
+ if len(selected_neighbors) >= max_neighbors:
267
+ break
268
+
269
+ filtered_neighbors = selected_neighbors
270
+
271
+ # Add all selected neighbors to output
272
+ for neighbor_idx in filtered_neighbors:
273
+ neighbor_value = array[point_tuples[neighbor_idx]]
274
+ output.append([query_value, neighbor_value, 0])
275
+
276
+ return output
277
+
278
+ def find_neighbors_kdtree(radius, centroids=None, array=None, targets=None, n_jobs=None, chunk_size=None, max_neighbors=None):
279
+ """
280
+ Find neighbors using KDTree with optional parallelization.
281
+
282
+ Parameters:
283
+ -----------
284
+ radius : float
285
+ Search radius for finding neighbors
286
+ centroids : dict or list, optional
287
+ Dictionary mapping node IDs to coordinates or list of points
288
+ array : numpy.ndarray, optional
289
+ Array to search for nonzero points
290
+ targets : list, optional
291
+ Specific targets to query for neighbors
292
+ n_jobs : int, optional
293
+ Number of parallel jobs. If None, uses cpu_count(). Set to 1 to disable parallelization.
294
+ chunk_size : int, optional
295
+ Size of chunks for parallel processing. If None, auto-calculated based on data size.
296
+ max_neighbors : int, optional
297
+ Maximum number of nearest neighbors to return per query point within the radius.
298
+ If None, returns all neighbors within radius (original behavior).
299
+ """
300
+
204
301
  # Get coordinates of nonzero points
205
302
  if centroids:
206
303
  # If centroids is a dictionary mapping node IDs to coordinates
@@ -214,72 +311,136 @@ def find_neighbors_kdtree(radius, centroids=None, array=None, targets=None):
214
311
  points = np.array(centroids, dtype=np.int32)
215
312
  node_ids = list(range(1, len(points) + 1)) # Default sequential IDs
216
313
 
217
- # Create a temporary array only if we need it for lookups
218
- if array is None:
219
- max_coords = np.max(points, axis=0) + 1
220
- array = np.zeros(tuple(max_coords), dtype=np.int32)
221
- for i, point in enumerate(points):
222
- array[tuple(point)] = node_ids[i] # Use the actual node ID
314
+ # Create direct index-to-node mapping instead of sparse array
315
+ idx_to_node = {i: node_ids[i] for i in range(len(points))}
316
+
223
317
  elif array is not None:
224
318
  points = np.transpose(np.nonzero(array))
225
319
  node_ids = None # Not used in array-based mode
320
+ # Pre-convert points to tuples once to avoid repeated conversions
321
+ point_tuples = [tuple(point) for point in points]
226
322
  else:
227
323
  return []
228
324
 
325
+ print("Building KDTree...")
229
326
  # Create KD-tree from all nonzero points
230
327
  tree = KDTree(points)
231
328
 
232
329
  if targets is None:
233
330
  # Original behavior: find neighbors for all points
234
331
  query_points = points
235
- query_indices = range(len(points))
332
+ query_indices = list(range(len(points)))
236
333
  else:
334
+ # Convert targets to set for O(1) lookup
335
+ targets_set = set(targets)
336
+
237
337
  # Find coordinates of target values
238
338
  target_points = []
239
339
  target_indices = []
240
340
 
241
341
  if array is not None:
242
342
  # Standard array-based filtering
243
- for idx, point in enumerate(points):
244
- point_tuple = tuple(point)
245
- if array[point_tuple] in targets:
246
- target_points.append(point)
343
+ for idx, point_tuple in enumerate(point_tuples):
344
+ if array[point_tuple] in targets_set:
345
+ target_points.append(points[idx])
247
346
  target_indices.append(idx)
248
347
  else:
249
348
  # Filter based on node IDs directly
250
349
  for idx, node_id in enumerate(node_ids):
251
- if node_id in targets:
350
+ if node_id in targets_set:
252
351
  target_points.append(points[idx])
253
352
  target_indices.append(idx)
254
353
 
255
354
  # Convert to numpy array for querying
256
355
  query_points = np.array(target_points)
257
356
  query_indices = target_indices
357
+
258
358
 
259
359
  # Handle case where no target values were found
260
360
  if len(query_points) == 0:
261
361
  return []
262
362
 
363
+ print("Querying KDTree...")
364
+
263
365
  # Query for all points within radius of each query point
264
366
  neighbor_indices = tree.query_ball_point(query_points, radius)
265
367
 
266
- # Initialize output list
267
- output = []
368
+ print("Sorting Through Output...")
369
+
370
+ # Determine parallelization parameters
371
+ if n_jobs is None:
372
+ n_jobs = cpu_count()
268
373
 
269
- # Generate pairs
270
- for i, neighbors in enumerate(neighbor_indices):
271
- query_idx = query_indices[i]
272
- for neighbor_idx in neighbors:
273
- # Skip self-pairing
274
- if neighbor_idx != query_idx:
275
- # Use node IDs from the dictionary if available
276
- if array is not None:
277
- query_value = array[tuple(points[query_idx])]
278
- neighbor_value = array[tuple(points[neighbor_idx])]
279
- else:
280
- query_value = node_ids[query_idx]
281
- neighbor_value = node_ids[neighbor_idx]
282
- output.append([query_value, neighbor_value, 0])
374
+ # Skip parallelization for small datasets or when n_jobs=1
375
+ if n_jobs == 1 or len(neighbor_indices) < 100:
376
+ # Sequential processing (original logic with max_neighbors support)
377
+ output = []
378
+ for i, neighbors in enumerate(neighbor_indices):
379
+ query_idx = query_indices[i]
380
+ query_point = points[query_idx]
381
+
382
+ # Filter out self-reference
383
+ filtered_neighbors = [n for n in neighbors if n != query_idx]
384
+
385
+ # If max_neighbors is specified and we have more neighbors than allowed
386
+ if max_neighbors is not None and len(filtered_neighbors) > max_neighbors:
387
+ # Use KDTree to get distances efficiently - query for more than we need
388
+ # to ensure we get the exact closest ones
389
+ k = min(len(filtered_neighbors), max_neighbors + 1) # +1 in case query point is included
390
+ distances, indices = tree.query(query_point, k=k)
391
+
392
+ # Filter out self and limit to max_neighbors
393
+ selected_neighbors = []
394
+ for dist, idx in zip(distances, indices):
395
+ if idx != query_idx and idx in filtered_neighbors:
396
+ selected_neighbors.append(idx)
397
+ if len(selected_neighbors) >= max_neighbors:
398
+ break
399
+
400
+ filtered_neighbors = selected_neighbors
401
+
402
+ # Process the selected neighbors
403
+ if centroids:
404
+ query_value = idx_to_node[query_idx]
405
+ for neighbor_idx in filtered_neighbors:
406
+ neighbor_value = idx_to_node[neighbor_idx]
407
+ output.append([query_value, neighbor_value, 0])
408
+ else:
409
+ query_value = array[point_tuples[query_idx]]
410
+ for neighbor_idx in filtered_neighbors:
411
+ neighbor_value = array[point_tuples[neighbor_idx]]
412
+ output.append([query_value, neighbor_value, 0])
413
+ return output
414
+
415
+ # Parallel processing
416
+ if chunk_size is None:
417
+ # Auto-calculate chunk size: aim for ~4x more chunks than processes
418
+ chunk_size = max(1, len(neighbor_indices) // (n_jobs * 4))
419
+
420
+ # Create chunks of (index, neighbors) pairs
421
+ chunks = []
422
+ for i in range(0, len(neighbor_indices), chunk_size):
423
+ chunk = [(j, neighbor_indices[j]) for j in range(i, min(i + chunk_size, len(neighbor_indices)))]
424
+ chunks.append(chunk)
425
+
426
+ # Process chunks in parallel
427
+ with Pool(processes=n_jobs) as pool:
428
+ if centroids:
429
+ # Prepare arguments for centroids mode
430
+ chunk_args = [(chunk, idx_to_node, query_indices, tree, points, max_neighbors) for chunk in chunks]
431
+ chunk_results = pool.map(_process_chunk_centroids, chunk_args)
432
+ else:
433
+ # Prepare arguments for array mode
434
+ chunk_args = [(chunk, array, point_tuples, query_indices, tree, points, max_neighbors) for chunk in chunks]
435
+ chunk_results = pool.map(_process_chunk_array, chunk_args)
436
+
437
+ # Flatten results
438
+ output = []
439
+ for chunk_result in chunk_results:
440
+ output.extend(chunk_result)
441
+
442
+ print("Organizing Network...")
443
+
283
444
 
284
445
  return output
285
446
 
@@ -296,6 +457,69 @@ def extract_pairwise_connections(connections):
296
457
  return output
297
458
 
298
459
 
460
+ def average_nearest_neighbor_distances(point_centroids, root_set, compare_set, xy_scale=1.0, z_scale=1.0, num = 1):
461
+ """
462
+ Calculate the average distance between each point in root_set and its nearest neighbor in compare_set.
463
+
464
+ Args:
465
+ point_centroids (dict): Dictionary mapping point IDs to [Z, Y, X] coordinates
466
+ root_set (set): Set of point IDs to find nearest neighbors for
467
+ compare_set (set): Set of point IDs to search for nearest neighbors in
468
+ xy_scale (float): Scaling factor for X and Y coordinates
469
+ z_scale (float): Scaling factor for Z coordinate
470
+
471
+ Returns:
472
+ float: Average distance to nearest neighbors
473
+ """
474
+
475
+ # Extract and scale coordinates for compare_set
476
+ compare_coords = []
477
+ compare_ids = list(compare_set)
478
+
479
+ for point_id in compare_ids:
480
+ z, y, x = point_centroids[point_id]
481
+ compare_coords.append([z * z_scale, y * xy_scale, x * xy_scale])
482
+
483
+ compare_coords = np.array(compare_coords)
484
+
485
+ # Build KDTree for efficient nearest neighbor search
486
+ tree = KDTree(compare_coords)
487
+
488
+ distances = {}
489
+ same_sets = root_set == compare_set
490
+
491
+ for root_id in root_set:
492
+ # Get scaled coordinates for root point
493
+ z, y, x = point_centroids[root_id]
494
+ root_coord = np.array([z * z_scale, y * xy_scale, x * xy_scale])
495
+
496
+ if same_sets:
497
+ # When sets are the same, find 2 nearest neighbors and take the second one
498
+ # (first one would be the point itself)
499
+ distances_to_all, indices = tree.query(root_coord, k= (num + 1))
500
+
501
+ temp_dist = 0
502
+ for i in range(1, len(distances_to_all)):
503
+ temp_dist += distances_to_all[i]
504
+
505
+ distances[root_id] = temp_dist/(len(distances_to_all) - 1)
506
+
507
+ else:
508
+ # Different sets, find nearest neighbors
509
+ distances_to_all, _ = tree.query(root_coord, k=num)
510
+ temp_dist = 0
511
+ for val in distances_to_all:
512
+ temp_dist += val
513
+
514
+ distances[root_id] = temp_dist/(len(distances_to_all))
515
+
516
+ avg = np.mean(list(distances.values())) if list(distances.values()) else 0.0
517
+
518
+
519
+ # Return average distance
520
+ return avg, distances
521
+
522
+
299
523
 
300
524
  #voronois:
301
525
  def create_voronoi_3d_kdtree(centroids: Dict[Union[int, str], Union[Tuple[int, int, int], List[int]]],
@@ -0,0 +1,117 @@
1
+ Metadata-Version: 2.4
2
+ Name: nettracer3d
3
+ Version: 0.8.2
4
+ Summary: Scripts for intializing and analyzing networks from segmentations of three dimensional images.
5
+ Author-email: Liam McLaughlin <liamm@wustl.edu>
6
+ Project-URL: Documentation, https://nettracer3d.readthedocs.io/en/latest/
7
+ Project-URL: Video_Tutorial, https://www.youtube.com/watch?v=cRatn5VTWDY
8
+ Project-URL: Reference_Citation_For_Use, https://doi.org/10.1101/2024.07.29.605633
9
+ Classifier: Programming Language :: Python :: 3
10
+ Classifier: License :: Other/Proprietary License
11
+ Classifier: Operating System :: OS Independent
12
+ Requires-Python: >=3.7
13
+ Description-Content-Type: text/markdown
14
+ License-File: LICENSE
15
+ Requires-Dist: numpy
16
+ Requires-Dist: scipy
17
+ Requires-Dist: scikit-image
18
+ Requires-Dist: Pillow
19
+ Requires-Dist: matplotlib
20
+ Requires-Dist: networkx
21
+ Requires-Dist: opencv-python-headless
22
+ Requires-Dist: openpyxl
23
+ Requires-Dist: pandas
24
+ Requires-Dist: tifffile
25
+ Requires-Dist: qtrangeslider
26
+ Requires-Dist: PyQt6
27
+ Requires-Dist: scikit-learn
28
+ Requires-Dist: nibabel
29
+ Requires-Dist: setuptools
30
+ Requires-Dist: umap-learn
31
+ Provides-Extra: cuda11
32
+ Requires-Dist: cupy-cuda11x; extra == "cuda11"
33
+ Provides-Extra: cuda12
34
+ Requires-Dist: cupy-cuda12x; extra == "cuda12"
35
+ Provides-Extra: cupy
36
+ Requires-Dist: cupy; extra == "cupy"
37
+ Provides-Extra: cellpose
38
+ Requires-Dist: cellpose[GUI]; extra == "cellpose"
39
+ Provides-Extra: viz
40
+ Requires-Dist: napari; extra == "viz"
41
+ Provides-Extra: all
42
+ Requires-Dist: cellpose[GUI]; extra == "all"
43
+ Requires-Dist: napari; extra == "all"
44
+ Dynamic: license-file
45
+
46
+ NetTracer3D is a python package developed for both 2D and 3D analysis of microscopic images in the .tif file format. It supports generation of 3D networks showing the relationships between objects (or nodes) in three dimensional space, either based on their own proximity or connectivity via connecting objects such as nerves or blood vessels. In addition to these functionalities are several advanced 3D data processing algorithms, such as labeling of branched structures or abstraction of branched structures into networks. Note that nettracer3d uses segmented data, which can be segmented from other softwares such as ImageJ and imported into NetTracer3D, although it does offer its own segmentation via intensity and volumetric thresholding, or random forest machine learning segmentation. NetTracer3D currently has a fully functional GUI. To use the GUI, after installing the nettracer3d package via pip, enter the command 'nettracer3d' in your command prompt:
47
+
48
+ --- Documentation ---
49
+
50
+ Please see: https://nettracer3d.readthedocs.io/en/latest/
51
+
52
+ --- Installation ---
53
+
54
+ To install nettracer3d, simply install Python and use this command in your command terminal:
55
+
56
+ pip install nettracer3d
57
+
58
+ I recommend installing the program as an Anaconda package to ensure its modules are work together on your specific system:
59
+ (Install anaconda at the link below, set up a new python env for nettracer3d, then use the same pip command).
60
+
61
+ https://www.anaconda.com/download?utm_source=anacondadocs&utm_medium=documentation&utm_campaign=download&utm_content=installwindows
62
+
63
+ Optional Packages
64
+ ~~~~~~~~~~~~~~~~~~
65
+ I recommend including Napari (Chi-Li Chiu, Nathan Clack, the napari community, napari: a Python Multi-Dimensional Image Viewer Platform for the Research Community, Microscopy and Microanalysis, Volume 28, Issue S1, 1 August 2022, Pages 1576–1577, https://doi.org/10.1017/S1431927622006328) in the download as well, which allows NetTracer3D to use 3D displays. The standard package only comes with its native 2D slice display window.
66
+ If Napari is present, all 3D images and overlays from NetTracer3D can be easily displayed in 3D with a click of a button. To package with Napari, use this install command instead:
67
+
68
+ pip install nettracer3d[viz]
69
+
70
+ Additionally, for easy access to high-quality cell segmentation, as of version 0.8.2, NetTracer3D can be optionally packaged with Cellpose3. (Stringer, C., Pachitariu, M. Cellpose3: one-click image restoration for improved cellular segmentation. Nat Methods 22, 592–599 (2025). https://doi.org/10.1038/s41592-025-02595-5)
71
+ Cellpose3 is not involved with the rest of the program in any way, although its GUI can be opened from NetTracer3D's GUI, provided both are installed in the same environment. It is a top-tier cell segmenter which can assist in the production of cell networks.
72
+ To include Cellpose3 in the install, use this command:
73
+
74
+
75
+ pip install nettracer3d[cellpose]
76
+
77
+ Alternatively, both Napari and Cellpose can be included in the package with this command: (Or they can be independently installed with pip from the base package env)
78
+
79
+
80
+ pip install nettracer3d[all]
81
+
82
+ GPU
83
+ ~~~~~~~~~~~~~~~~~~
84
+ NetTracer3D is mostly CPU-bound, but a few functions can optionally use the GPU. To install optional GPU functionalities, first set up a CUDA toolkit that runs with the GPU on your machine. This requires an NVIDIA GPU. Then, find your GPUs compatible CUDA toolkit and install it with the auto-installer from the NVIDIA website: https://developer.nvidia.com/cuda-toolkit
85
+
86
+ With a CUDA toolkit installed, use:
87
+
88
+ pip install nettracer3d[CUDA11] #If your CUDA toolkit is version 11
89
+ pip install nettracer3d[CUDA12] #If your CUDA toolkit is version 12
90
+ pip install nettracer3d[cupy] #For the generic cupy library (The above two are usually the ones you want)
91
+
92
+ Or if you've already installed the NetTracer3D base package and want to get just the GPU associated packages:
93
+
94
+ pip install cupy-cuda11x #If your CUDA toolkit is version 11
95
+ pip install cupy-cuda12x #If your CUDA toolkit is version 12
96
+ pip install cupy #For the generic cupy library (The above two are usually the ones you want)
97
+
98
+ While not related to NetTracer3D, if you want to use Cellpose3 (for which GPU-usage is somewhat obligatory) to help segment cells for any networks, you will also want to install pytorch here: https://pytorch.org/. Use the pytorch build menu on this webpage to find a pip install command that is compatible with Python and your CUDA version.
99
+
100
+
101
+ This gui is built from the PyQt6 package and therefore may not function on dockers or virtual envs that are unable to support PyQt6 displays.
102
+
103
+
104
+ For a (slightly outdated) video tutorial on using the GUI: https://www.youtube.com/watch?v=cRatn5VTWDY
105
+
106
+ NetTracer3D is free to use/fork for academic/nonprofit use so long as citation is provided, and is available for commercial use at a fee (see license file for information).
107
+
108
+ NetTracer3D was developed by Liam McLaughlin while working under Dr. Sanjay Jain at Washington University School of Medicine.
109
+
110
+ -- Version 0.8.2 Updates --
111
+
112
+ * Bug Fixes.
113
+ * Improved some of the image viewer window features.
114
+ * New option to zoom in on specific windows by clicking + dragging while in zoom mode.
115
+ * Added more features to UMAP/community neighborhood clustering (optional DBSCAN clustering, results more robust to node distribution)
116
+ * Made Napari and optional rather than core dependency.
117
+ * Added Cellpose as an optional dependency.
@@ -0,0 +1,24 @@
1
+ nettracer3d/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ nettracer3d/cellpose_manager.py,sha256=qZpTxSkmPb38Pru8TmjJ88xxcD_wM02EfJB5Mw9Xx4Y,6021
3
+ nettracer3d/community_extractor.py,sha256=oa4e0bAwUALDgu_QdBiqNNn1XrmOW3drD5bjwey81BI,25313
4
+ nettracer3d/excelotron.py,sha256=lS5vnpoOGZWp7fdqVpTPqeC-mUKrfwDrWHfx4PQ7Uzg,71384
5
+ nettracer3d/modularity.py,sha256=O9OeKbjD3v6gSFz9K2GzP6LsxlpQaPfeJbM1pyIEigw,21788
6
+ nettracer3d/morphology.py,sha256=jyDjYzrZ4LvI5jOyw8DLsxmo-i5lpqHsejYpW7Tq7Mo,19786
7
+ nettracer3d/neighborhoods.py,sha256=ac_gjN7pUlXZZpMSZnUVErKbKtSlInxX2dHe22oDNJA,34532
8
+ nettracer3d/nettracer.py,sha256=L7FGwgiuzIloFBcQmR7UNYGicJMHgHL-etIgogCrRE0,235084
9
+ nettracer3d/nettracer_gui.py,sha256=QRZEEazgO-8jLjZu29ZDD2MLnC4rObISbZgLFiQxgbE,526421
10
+ nettracer3d/network_analysis.py,sha256=yUEzy4hBDTuZvXwFuJWdIQcxqPW4z67APe4zcjCjDW8,43613
11
+ nettracer3d/network_draw.py,sha256=F7fw6Pcf4qWOhdKwLmhwqWdschbDlHzwCVolQC9imeU,14117
12
+ nettracer3d/node_draw.py,sha256=LoeTFeOcrX6kPquZvCqYnMW-jDd9oqKM27r-rTlKEtY,10274
13
+ nettracer3d/proximity.py,sha256=mRkug_y6fbqq_pOYTkF5uOoiRhvYv2e_QFC92ZTraYE,38110
14
+ nettracer3d/run.py,sha256=xYeaAc8FCx8MuzTGyL3NR3mK7WZzffAYAH23bNRZYO4,127
15
+ nettracer3d/segmenter.py,sha256=VatOSpc41lxhPuYLTTejCxG1CcwP5hwiQ3ZFK9OBavA,60115
16
+ nettracer3d/segmenter_GPU.py,sha256=sFVmz_cYIVOQqnfFV3peK9hzb6IoIV5WDQHH9Lws96I,53915
17
+ nettracer3d/simple_network.py,sha256=dkG4jpc4zzdeuoaQobgGfL3PNo6N8dGKQ5hEEubFIvA,9947
18
+ nettracer3d/smart_dilate.py,sha256=DOEOQq9ig6-AO4MpqAG0CqrGDFqw5_UBeqfSedqHk28,25933
19
+ nettracer3d-0.8.2.dist-info/licenses/LICENSE,sha256=gM207DhJjWrxLuEWXl0Qz5ISbtWDmADfjHp3yC2XISs,888
20
+ nettracer3d-0.8.2.dist-info/METADATA,sha256=QlUhREQ7hy0wHfA8nKn6Qu_W-QONoGPSQY5yp9GEBYE,7112
21
+ nettracer3d-0.8.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
22
+ nettracer3d-0.8.2.dist-info/entry_points.txt,sha256=Nx1rr_0QhJXDBHAQg2vcqCzLMKBzSHfwy3xwGkueVyc,53
23
+ nettracer3d-0.8.2.dist-info/top_level.txt,sha256=zsYy9rZwirfCEOubolhee4TyzqBAL5gSUeFMzhFTX8c,12
24
+ nettracer3d-0.8.2.dist-info/RECORD,,
@@ -1,83 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: nettracer3d
3
- Version: 0.8.0
4
- Summary: Scripts for intializing and analyzing networks from segmentations of three dimensional images.
5
- Author-email: Liam McLaughlin <liamm@wustl.edu>
6
- Project-URL: Documentation, https://nettracer3d.readthedocs.io/en/latest/
7
- Project-URL: Video_Tutorial, https://www.youtube.com/watch?v=cRatn5VTWDY
8
- Project-URL: Reference_Citation_For_Use, https://doi.org/10.1101/2024.07.29.605633
9
- Classifier: Programming Language :: Python :: 3
10
- Classifier: License :: Other/Proprietary License
11
- Classifier: Operating System :: OS Independent
12
- Requires-Python: >=3.7
13
- Description-Content-Type: text/markdown
14
- License-File: LICENSE
15
- Requires-Dist: numpy
16
- Requires-Dist: scipy
17
- Requires-Dist: scikit-image
18
- Requires-Dist: Pillow
19
- Requires-Dist: matplotlib
20
- Requires-Dist: networkx
21
- Requires-Dist: opencv-python-headless
22
- Requires-Dist: openpyxl
23
- Requires-Dist: pandas
24
- Requires-Dist: napari
25
- Requires-Dist: tifffile
26
- Requires-Dist: qtrangeslider
27
- Requires-Dist: PyQt6
28
- Requires-Dist: scikit-learn
29
- Requires-Dist: nibabel
30
- Requires-Dist: setuptools
31
- Requires-Dist: umap-learn
32
- Provides-Extra: cuda11
33
- Requires-Dist: cupy-cuda11x; extra == "cuda11"
34
- Provides-Extra: cuda12
35
- Requires-Dist: cupy-cuda12x; extra == "cuda12"
36
- Provides-Extra: cupy
37
- Requires-Dist: cupy; extra == "cupy"
38
- Dynamic: license-file
39
-
40
- NetTracer3D is a python package developed for both 2D and 3D analysis of microscopic images in the .tif file format. It supports generation of 3D networks showing the relationships between objects (or nodes) in three dimensional space, either based on their own proximity or connectivity via connecting objects such as nerves or blood vessels. In addition to these functionalities are several advanced 3D data processing algorithms, such as labeling of branched structures or abstraction of branched structures into networks. Note that nettracer3d uses segmented data, which can be segmented from other softwares such as ImageJ and imported into NetTracer3D, although it does offer its own segmentation via intensity and volumetric thresholding, or random forest machine learning segmentation. NetTracer3D currently has a fully functional GUI. To use the GUI, after installing the nettracer3d package via pip, enter the command 'nettracer3d' in your command prompt:
41
-
42
- --- Documentation ---
43
-
44
- Please see: https://nettracer3d.readthedocs.io/en/latest/
45
-
46
- --- Installation ---
47
-
48
- To install nettracer3d, simply install Python and use this command in your command terminal:
49
-
50
- pip install nettracer3d
51
-
52
- I recommend installing the program as an Anaconda package to ensure its modules are work together on your specific system:
53
- (Install anaconda at the link below, set up a new python env for nettracer3d, then use the same pip command).
54
-
55
- https://www.anaconda.com/download?utm_source=anacondadocs&utm_medium=documentation&utm_campaign=download&utm_content=installwindows
56
-
57
- nettracer3d mostly utilizes the CPU for processing and visualization, although it does have a few GPU-aided options. If you would like to use the GPU for these, you will need an NVIDIA GPU and a corresponding CUDA toolkit which can be installed here:
58
- https://developer.nvidia.com/cuda-toolkit
59
-
60
- To install nettracer3d with associated GPU-supporting packages, please use:
61
-
62
- If your CUDA toolkit is version 11: pip install nettracer3d[CUDA11]
63
- If your CUDA toolkit is version 12: pip install nettracer3d[CUDA12]
64
- If you just want the entire cupy library: pip install nettracer3d[cupy]
65
-
66
-
67
- This gui is built from the PyQt6 package and therefore may not function on dockers or virtual envs that are unable to support PyQt6 displays.
68
-
69
-
70
- For a (slightly outdated) video tutorial on using the GUI: https://www.youtube.com/watch?v=cRatn5VTWDY
71
-
72
- NetTracer3D is free to use/fork for academic/nonprofit use so long as citation is provided, and is available for commercial use at a fee (see license file for information).
73
-
74
- NetTracer3D was developed by Liam McLaughlin while working under Dr. Sanjay Jain at Washington University School of Medicine.
75
-
76
- -- Version 0.8.0 Updates --
77
-
78
- * Added ability to threshold nodes by degree.
79
- * Improved image viewer window performance.
80
- * Bug fixes and a few optimizations.
81
- * Added ability to 'merge node identities' which just uses the nodes image as a reference for collecting 'identity' information from a group of other images - ie can use with cell nuclei (DAPI) to see what markers from the same imaging session overlap.
82
- * Added ability to search for specific nodes directly in the nodes image with 'shift + f' or right click.
83
-
@@ -1,23 +0,0 @@
1
- nettracer3d/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- nettracer3d/community_extractor.py,sha256=ZOz97Au4k7dHK8azarWQtxCPyvwzGmDHNL8kTIC9by8,22670
3
- nettracer3d/excelotron.py,sha256=lS5vnpoOGZWp7fdqVpTPqeC-mUKrfwDrWHfx4PQ7Uzg,71384
4
- nettracer3d/modularity.py,sha256=O9OeKbjD3v6gSFz9K2GzP6LsxlpQaPfeJbM1pyIEigw,21788
5
- nettracer3d/morphology.py,sha256=jyDjYzrZ4LvI5jOyw8DLsxmo-i5lpqHsejYpW7Tq7Mo,19786
6
- nettracer3d/neighborhoods.py,sha256=kkKR8m6Gjw34cDd_mytAIwLxqvuNBtQb2hU4JuBY9pI,12301
7
- nettracer3d/nettracer.py,sha256=494nHDmdrdfecTAShbXc0eFE2tG6WKJtEqKvJyt4sh4,227141
8
- nettracer3d/nettracer_gui.py,sha256=Pc_t5pDk0P4gpyC1axx37JJX6EpMvXdj0UHbqjqENrQ,501757
9
- nettracer3d/network_analysis.py,sha256=h-5yzUWdE0hcWYy8wcBA5LV1bRhdqiMnKbQLrRzb1Sw,41443
10
- nettracer3d/network_draw.py,sha256=F7fw6Pcf4qWOhdKwLmhwqWdschbDlHzwCVolQC9imeU,14117
11
- nettracer3d/node_draw.py,sha256=k3sCTfUCJs3aH1C1q1gTNxDz9EAQbBd1hsUIJajxRx8,9823
12
- nettracer3d/proximity.py,sha256=hPmTPFGUziPMVwfWRLVV9gUjqSL7nzLD6WAVLekvxbE,28545
13
- nettracer3d/run.py,sha256=xYeaAc8FCx8MuzTGyL3NR3mK7WZzffAYAH23bNRZYO4,127
14
- nettracer3d/segmenter.py,sha256=VatOSpc41lxhPuYLTTejCxG1CcwP5hwiQ3ZFK9OBavA,60115
15
- nettracer3d/segmenter_GPU.py,sha256=sFVmz_cYIVOQqnfFV3peK9hzb6IoIV5WDQHH9Lws96I,53915
16
- nettracer3d/simple_network.py,sha256=dkG4jpc4zzdeuoaQobgGfL3PNo6N8dGKQ5hEEubFIvA,9947
17
- nettracer3d/smart_dilate.py,sha256=DOEOQq9ig6-AO4MpqAG0CqrGDFqw5_UBeqfSedqHk28,25933
18
- nettracer3d-0.8.0.dist-info/licenses/LICENSE,sha256=gM207DhJjWrxLuEWXl0Qz5ISbtWDmADfjHp3yC2XISs,888
19
- nettracer3d-0.8.0.dist-info/METADATA,sha256=2bK3auAT6Xr7y_VDBaZE_cB4dgShYtF0qrIzcuTDflU,4605
20
- nettracer3d-0.8.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
21
- nettracer3d-0.8.0.dist-info/entry_points.txt,sha256=Nx1rr_0QhJXDBHAQg2vcqCzLMKBzSHfwy3xwGkueVyc,53
22
- nettracer3d-0.8.0.dist-info/top_level.txt,sha256=zsYy9rZwirfCEOubolhee4TyzqBAL5gSUeFMzhFTX8c,12
23
- nettracer3d-0.8.0.dist-info/RECORD,,