nettracer3d 0.8.3__py3-none-any.whl → 0.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nettracer3d might be problematic. Click here for more details.

@@ -136,7 +136,6 @@ def open_network(excel_file_path):
136
136
  G.add_edge(nodes_a[i], nodes_b[i])
137
137
 
138
138
  return G
139
-
140
139
  def read_excel_to_lists(file_path, sheet_name=0):
141
140
  """Convert a pd dataframe to lists. Handles both .xlsx and .csv files"""
142
141
  def load_json_to_list(filename):
@@ -157,6 +156,61 @@ def read_excel_to_lists(file_path, sheet_name=0):
157
156
  converted_data[k] = v
158
157
 
159
158
  return converted_data
159
+
160
+ if type(file_path) == str:
161
+ # Check file extension
162
+ if file_path.lower().endswith('.xlsx'):
163
+ # Read the Excel file with headers (since your new save method includes them)
164
+ df = pd.read_excel(file_path, sheet_name=sheet_name)
165
+ elif file_path.lower().endswith('.csv'):
166
+ # Read the CSV file with headers and specify dtype to avoid the warning
167
+ df = pd.read_csv(file_path, dtype=str, low_memory=False)
168
+ elif file_path.lower().endswith('.json'):
169
+ df = load_json_to_list(file_path)
170
+ return df
171
+ else:
172
+ raise ValueError("File must be either .xlsx, .csv, or .json format")
173
+ else:
174
+ df = file_path
175
+
176
+ # Initialize an empty list to store the lists of values
177
+ data_lists = []
178
+ # Iterate over each column in the DataFrame
179
+ for column_name, column_data in df.items():
180
+ # Convert the column values to a list and append to the data_lists
181
+ data_lists.append(column_data.tolist())
182
+
183
+ master_list = [[], [], []]
184
+ for i in range(0, len(data_lists), 3):
185
+ master_list[0].extend([int(x) for x in data_lists[i]])
186
+ master_list[1].extend([int(x) for x in data_lists[i+1]])
187
+ try:
188
+ master_list[2].extend([int(x) for x in data_lists[i+2]])
189
+ except IndexError:
190
+ master_list[2].extend([0]) # Note: Changed to list with single int 0
191
+
192
+ return master_list
193
+
194
+ def read_excel_to_lists_old(file_path, sheet_name=0):
195
+ """Convert a pd dataframe to lists. Handles both .xlsx and .csv files"""
196
+ def load_json_to_list(filename):
197
+ with open(filename, 'r') as f:
198
+ data = json.load(f)
199
+
200
+ # Convert only numeric strings to integers, leave other strings as is
201
+ converted_data = [[],[],[]]
202
+ for i in data[0]:
203
+ try:
204
+ converted_data[0].append(int(data[0][i]))
205
+ converted_data[1].append(int(data[1][i]))
206
+ try:
207
+ converted_data[2].append(int(data[2][i]))
208
+ except IndexError:
209
+ converted_data[2].append(0)
210
+ except ValueError:
211
+ converted_data[k] = v
212
+
213
+ return converted_data
160
214
 
161
215
  if type(file_path) == str:
162
216
  # Check file extension
@@ -545,42 +599,51 @@ def _find_centroids_old(nodes, node_list = None, down_factor = None):
545
599
 
546
600
  return centroid_dict
547
601
 
602
+
548
603
  def _find_centroids(nodes, node_list=None, down_factor=None):
549
604
  """Internal use version to get centroids without saving"""
550
- def get_label_indices(binary_stack, label, y_offset):
551
- """
552
- Finds indices of labelled object in array and adjusts for the Y-offset.
553
- """
554
- indices = np.argwhere(binary_stack == label)
555
- # Adjust the Y coordinate by the y_offset
556
- indices[:, 1] += y_offset
557
- return indices
605
+
558
606
 
559
607
  def compute_indices_in_chunk(chunk, y_offset):
560
608
  """
561
- Get indices for all labels in a given chunk of the 3D array.
562
- Adjust Y-coordinate based on the y_offset for each chunk.
609
+ Alternative approach using np.where for even better performance on sparse arrays.
563
610
  """
564
611
  indices_dict_chunk = {}
565
- label_list = np.unique(chunk)
566
- try:
567
- if label_list[0] == 0:
568
- label_list = np.delete(label_list, 0)
569
- except:
570
- pass
571
612
 
572
- for label in label_list:
573
- indices = get_label_indices(chunk, label, y_offset)
574
- indices_dict_chunk[label] = indices
613
+ # Get all coordinates where chunk is non-zero
614
+ z_coords, y_coords, x_coords = np.where(chunk != 0)
615
+
616
+ if len(z_coords) == 0:
617
+ return indices_dict_chunk
618
+
619
+ # Adjust Y coordinates
620
+ y_coords_adjusted = y_coords + y_offset
621
+
622
+ # Get labels at these coordinates
623
+ labels = chunk[z_coords, y_coords, x_coords]
624
+
625
+ # Group by unique labels
626
+ unique_labels = np.unique(labels)
627
+
628
+ for label in unique_labels:
629
+ if label == 0: # Skip background
630
+ continue
631
+ mask = (labels == label)
632
+ # Stack coordinates into the expected format [z, y, x]
633
+ indices_dict_chunk[label] = np.column_stack((
634
+ z_coords[mask],
635
+ y_coords_adjusted[mask],
636
+ x_coords[mask]
637
+ ))
638
+
575
639
  return indices_dict_chunk
576
640
 
577
641
  def chunk_3d_array(array, num_chunks):
578
- """
579
- Split the 3D array into smaller chunks along the y-axis.
580
- """
642
+ """Split the 3D array into smaller chunks along the y-axis."""
581
643
  y_slices = np.array_split(array, num_chunks, axis=1)
582
644
  return y_slices
583
645
 
646
+ # Handle input processing
584
647
  if isinstance(nodes, str): # Open into numpy array if filepath
585
648
  nodes = tifffile.imread(nodes)
586
649
  if len(np.unique(nodes)) == 2: # Label if binary
@@ -595,14 +658,14 @@ def _find_centroids(nodes, node_list=None, down_factor=None):
595
658
  indices_dict = {}
596
659
  num_cpus = mp.cpu_count()
597
660
 
598
- # Chunk the 3D array along the y-axis into smaller subarrays
661
+ # Chunk the 3D array along the y-axis
599
662
  node_chunks = chunk_3d_array(nodes, num_cpus)
600
663
 
601
664
  # Calculate Y offset for each chunk
602
665
  chunk_sizes = [chunk.shape[1] for chunk in node_chunks]
603
666
  y_offsets = np.cumsum([0] + chunk_sizes[:-1])
604
667
 
605
- # Parallel computation of indices across chunks
668
+ # Parallel computation using the optimized single-pass approach
606
669
  with ThreadPoolExecutor(max_workers=num_cpus) as executor:
607
670
  futures = {executor.submit(compute_indices_in_chunk, chunk, y_offset): chunk_id
608
671
  for chunk_id, (chunk, y_offset) in enumerate(zip(node_chunks, y_offsets))}
@@ -622,10 +685,8 @@ def _find_centroids(nodes, node_list=None, down_factor=None):
622
685
  centroid = np.round(np.mean(indices, axis=0)).astype(int)
623
686
  centroid_dict[label] = centroid
624
687
 
625
- try:
626
- del centroid_dict[0]
627
- except:
628
- pass
688
+ # Remove background label if it exists
689
+ centroid_dict.pop(0, None)
629
690
 
630
691
  return centroid_dict
631
692
 
nettracer3d/node_draw.py CHANGED
@@ -200,8 +200,12 @@ def degree_draw(degree_dict, centroid_dict, nodes):
200
200
 
201
201
  return draw_array
202
202
 
203
- def degree_infect(degree_dict, nodes):
204
- return_nodes = np.zeros_like(nodes) # Start with all zeros
203
+ def degree_infect(degree_dict, nodes, make_floats = False):
204
+
205
+ if not make_floats:
206
+ return_nodes = np.zeros_like(nodes) # Start with all zeros
207
+ else:
208
+ return_nodes = np.zeros(nodes.shape, dtype=np.float32)
205
209
 
206
210
  if not degree_dict: # Handle empty dict
207
211
  return return_nodes
nettracer3d/proximity.py CHANGED
@@ -144,7 +144,7 @@ def find_shared_value_pairs(input_dict):
144
144
 
145
145
  #Related to kdtree centroid searching:
146
146
 
147
- def populate_array(centroids, clip=False):
147
+ def populate_array(centroids, clip=False, shape = None):
148
148
  """
149
149
  Create a 3D array from centroid coordinates.
150
150
 
@@ -164,12 +164,16 @@ def populate_array(centroids, clip=False):
164
164
  coords = np.array(list(centroids.values()))
165
165
  # Round coordinates to nearest integer
166
166
  coords = np.round(coords).astype(int)
167
- min_coords = coords.min(axis=0)
168
- max_coords = coords.max(axis=0)
167
+ if shape is None:
168
+ min_coords = coords.min(axis=0)
169
+ max_coords = coords.max(axis=0)
170
+ else:
171
+ min_coords = [0, 0, 0]
172
+ max_coords = shape
169
173
 
170
174
  # Check for negative coordinates only if not clipping
171
- if not clip and np.any(min_coords < 0):
172
- raise ValueError("Negative coordinates found in centroids")
175
+ #if not clip and np.any(min_coords < 0):
176
+ #raise ValueError("Negative coordinates found in centroids")
173
177
 
174
178
  # Apply clipping if requested
175
179
  clipped_centroids = {}
@@ -183,10 +187,15 @@ def populate_array(centroids, clip=False):
183
187
  for i, obj_id in enumerate(centroids.keys()):
184
188
  clipped_centroids[obj_id] = coords[i].tolist()
185
189
 
186
- # Create array
187
- array = np.zeros((max_coords[0] + 1,
188
- max_coords[1] + 1,
189
- max_coords[2] + 1), dtype=int)
190
+ if shape is None:
191
+ # Create array
192
+ array = np.zeros((max_coords[0] + 1,
193
+ max_coords[1] + 1,
194
+ max_coords[2] + 1), dtype=int)
195
+ else:
196
+ array = np.zeros((max_coords[0],
197
+ max_coords[1],
198
+ max_coords[2]), dtype=int)
190
199
 
191
200
  # Populate array with (possibly clipped) rounded coordinates
192
201
  for i, (obj_id, coord) in enumerate(centroids.items()):
@@ -194,7 +203,10 @@ def populate_array(centroids, clip=False):
194
203
  z, y, x = coords[i] # Use pre-computed clipped coordinates
195
204
  else:
196
205
  z, y, x = np.round([coord[0], coord[1], coord[2]]).astype(int)
197
- array[z, y, x] = obj_id
206
+ try:
207
+ array[z, y, x] = obj_id
208
+ except:
209
+ pass
198
210
 
199
211
  if clip:
200
212
  return array, clipped_centroids
@@ -328,7 +340,7 @@ def find_neighbors_kdtree(radius, centroids=None, array=None, targets=None, n_jo
328
340
 
329
341
  if targets is None:
330
342
  # Original behavior: find neighbors for all points
331
- query_points = points
343
+ query_points = np.array(points)
332
344
  query_indices = list(range(len(points)))
333
345
  else:
334
346
  # Convert targets to set for O(1) lookup
@@ -354,7 +366,6 @@ def find_neighbors_kdtree(radius, centroids=None, array=None, targets=None, n_jo
354
366
  # Convert to numpy array for querying
355
367
  query_points = np.array(target_points)
356
368
  query_indices = target_indices
357
-
358
369
 
359
370
  # Handle case where no target values were found
360
371
  if len(query_points) == 0:
@@ -373,6 +384,7 @@ def find_neighbors_kdtree(radius, centroids=None, array=None, targets=None, n_jo
373
384
 
374
385
  # Skip parallelization for small datasets or when n_jobs=1
375
386
  if n_jobs == 1 or len(neighbor_indices) < 100:
387
+ #if True:
376
388
  # Sequential processing (original logic with max_neighbors support)
377
389
  output = []
378
390
  for i, neighbors in enumerate(neighbor_indices):
@@ -599,10 +611,10 @@ def convert_centroids_to_array(centroids_list, xy_scale = 1, z_scale = 1):
599
611
  points_array[:, 1:] = points_array[:, 1:] * xy_scale #account for scaling
600
612
 
601
613
  points_array[:, 0] = points_array[:, 0] * z_scale #account for scaling
602
-
614
+
603
615
  return points_array
604
616
 
605
- def generate_r_values(points_array, step_size, bounds = None, dim = 2, max_proportion=0.5):
617
+ def generate_r_values(points_array, step_size, bounds = None, dim = 2, max_proportion=0.5, max_r = None):
606
618
  """
607
619
  Generate an array of r values based on point distribution and step size.
608
620
 
@@ -634,18 +646,27 @@ def generate_r_values(points_array, step_size, bounds = None, dim = 2, max_propo
634
646
  min_coords = np.array([0,0,0])
635
647
  dimensions = max_coords - min_coords
636
648
 
637
- max_dimension = np.max(dimensions)
649
+ if 1 in dimensions:
650
+ dimensions = np.delete(dimensions, 0) #Presuming 2D data
651
+
652
+ min_dimension = np.min(dimensions) #Biased for smaller dimension now for safety
638
653
 
639
654
  # Calculate maximum r value (typically half the shortest side for 2D,
640
655
  # or scaled by max_proportion for general use)
641
- max_r = max_dimension * max_proportion
656
+ if max_r is None:
657
+ max_r = min_dimension * max_proportion
658
+ if max_proportion < 1:
659
+ print(f"Omitting search radii beyond {max_r}")
660
+ else:
661
+ print(f"Omitting search radii beyond {max_r} (to keep analysis within the mask)")
662
+
642
663
 
643
664
  # Generate r values from 0 to max_r with step_size increments
644
665
  num_steps = int(max_r / step_size)
645
666
  r_values = np.linspace(step_size, max_r, num_steps)
646
667
 
647
668
  if r_values[0] == 0:
648
- np.delete(r_values, 0)
669
+ r_values = np.delete(r_values, 0)
649
670
 
650
671
  return r_values
651
672
 
@@ -662,7 +683,7 @@ def convert_augmented_array_to_points(augmented_array):
662
683
  # Extract just the coordinate columns (all except first column)
663
684
  return augmented_array[:, 1:]
664
685
 
665
- def optimized_ripleys_k(reference_points, subset_points, r_values, bounds=None, edge_correction=True, dim = 2, is_subset = False):
686
+ def optimized_ripleys_k(reference_points, subset_points, r_values, bounds=None, dim = 2, is_subset = False, volume = None, n_subset = None):
666
687
  """
667
688
  Optimized computation of Ripley's K function using KD-Tree with simplified but effective edge correction.
668
689
 
@@ -677,7 +698,8 @@ def optimized_ripleys_k(reference_points, subset_points, r_values, bounds=None,
677
698
  K_values: numpy array of K values corresponding to r_values
678
699
  """
679
700
  n_ref = len(reference_points)
680
- n_subset = len(subset_points)
701
+ if n_subset is None:
702
+ n_subset = len(subset_points)
681
703
 
682
704
  # Determine bounds if not provided
683
705
  if bounds is None:
@@ -688,10 +710,12 @@ def optimized_ripleys_k(reference_points, subset_points, r_values, bounds=None,
688
710
  # Calculate volume of study area
689
711
  min_bounds, max_bounds = bounds
690
712
  sides = max_bounds - min_bounds
691
- if dim == 2:
692
- volume = sides[0] * sides[1]
693
- else:
694
- volume = np.prod(sides)
713
+
714
+ if volume is None:
715
+ if dim == 2:
716
+ volume = sides[0] * sides[1]
717
+ else:
718
+ volume = np.prod(sides)
695
719
 
696
720
  # Point intensity (points per unit volume)
697
721
  intensity = n_ref / volume
@@ -705,94 +729,19 @@ def optimized_ripleys_k(reference_points, subset_points, r_values, bounds=None,
705
729
  # For each r value, compute cumulative counts
706
730
  for i, r in enumerate(r_values):
707
731
  total_count = 0
708
-
732
+
709
733
  # Query the tree for all points within radius r of each subset point
710
734
  for j, point in enumerate(subset_points):
711
735
  # Find all reference points within radius r
712
736
  indices = tree.query_ball_point(point, r)
713
737
  count = len(indices)
714
-
715
- # Apply edge correction if needed
716
- if edge_correction:
717
- # Calculate edge correction weight
718
- weight = 1.0
719
-
720
- if dim == 2:
721
- # For 2D - check all four boundaries
722
- x, y = point
723
-
724
- # Distances to all boundaries
725
- x_min_dist = x - min_bounds[0]
726
- x_max_dist = max_bounds[0] - x
727
- y_min_dist = y - min_bounds[1]
728
- y_max_dist = max_bounds[1] - y
729
-
730
- proportion_in = 1.0
731
- # Apply correction for each boundary if needed
732
- if x_min_dist < r:
733
- proportion_in -= 0.5 * (1 - x_min_dist/r)
734
- if x_max_dist < r:
735
- proportion_in -= 0.5 * (1 - x_max_dist/r)
736
- if y_min_dist < r:
737
- proportion_in -= 0.5 * (1 - y_min_dist/r)
738
- if y_max_dist < r:
739
- proportion_in -= 0.5 * (1 - y_max_dist/r)
740
-
741
- # Corner correction
742
- if ((x_min_dist < r and y_min_dist < r) or
743
- (x_min_dist < r and y_max_dist < r) or
744
- (x_max_dist < r and y_min_dist < r) or
745
- (x_max_dist < r and y_max_dist < r)):
746
- proportion_in += 0.1 # Add a small boost for corners
747
-
748
- elif dim == 3:
749
- # For 3D - check all six boundaries
750
- x, y, z = point
751
-
752
- # Distances to all boundaries
753
- x_min_dist = x - min_bounds[0]
754
- x_max_dist = max_bounds[0] - x
755
- y_min_dist = y - min_bounds[1]
756
- y_max_dist = max_bounds[1] - y
757
- z_min_dist = z - min_bounds[2]
758
- z_max_dist = max_bounds[2] - z
759
-
760
- proportion_in = 1.0
761
- # Apply correction for each boundary if needed
762
- if x_min_dist < r:
763
- proportion_in -= 0.25 * (1 - x_min_dist/r)
764
- if x_max_dist < r:
765
- proportion_in -= 0.25 * (1 - x_max_dist/r)
766
- if y_min_dist < r:
767
- proportion_in -= 0.25 * (1 - y_min_dist/r)
768
- if y_max_dist < r:
769
- proportion_in -= 0.25 * (1 - y_max_dist/r)
770
- if z_min_dist < r:
771
- proportion_in -= 0.25 * (1 - z_min_dist/r)
772
- if z_max_dist < r:
773
- proportion_in -= 0.25 * (1 - z_max_dist/r)
774
738
 
775
- # Corner correction for 3D (if point is near a corner)
776
- num_close_edges = (
777
- (x_min_dist < r) + (x_max_dist < r) +
778
- (y_min_dist < r) + (y_max_dist < r) +
779
- (z_min_dist < r) + (z_max_dist < r)
780
- )
781
- if num_close_edges >= 2:
782
- proportion_in += 0.05 * num_close_edges # Stronger boost for more edges
783
-
784
- # Ensure proportion_in stays within reasonable bounds
785
- proportion_in = max(0.1, min(1.0, proportion_in))
786
- weight = 1.0 / proportion_in
787
-
788
- count *= weight
789
-
790
739
  total_count += count
791
-
740
+
792
741
  # Subtract self-counts if points appear in both sets
793
742
  if is_subset or np.array_equal(reference_points, subset_points):
794
743
  total_count -= n_ref # Subtract all self-counts
795
-
744
+
796
745
  # Normalize
797
746
  K_values[i] = total_count / (n_subset * intensity)
798
747
 
@@ -223,6 +223,7 @@ def dilate_3D_dt(array, search_distance, xy_scaling=1.0, z_scaling=1.0, GPU = Fa
223
223
  inv, indices = compute_distance_transform_GPU(inv, return_dists = True, sampling = [z_scaling, xy_scaling, xy_scaling])
224
224
  except:
225
225
  print("Failed, attempting on CPU...")
226
+ cleanup()
226
227
  #Who would have seen this coming?:
227
228
  inv, indices = compute_distance_transform(inv, return_dists = True, sampling = [z_scaling, xy_scaling, xy_scaling])
228
229
  else:
@@ -251,15 +252,37 @@ def process_chunk(start_idx, end_idx, nodes, ring_mask, nearest_label_indices):
251
252
  nodes_chunk = nodes[:, start_idx:end_idx, :]
252
253
  ring_mask_chunk = ring_mask[:, start_idx:end_idx, :]
253
254
  dilated_nodes_with_labels_chunk = np.copy(nodes_chunk)
255
+
256
+ # Get all ring indices at once
254
257
  ring_indices = np.argwhere(ring_mask_chunk)
255
-
256
- for index in ring_indices:
257
- z, y, x = index
258
- nearest_z, nearest_y, nearest_x = nearest_label_indices[:, z, y + start_idx, x]
259
- try: #There was an index error here once on the highest val of the second axis. I could not understand why because it usually doesnt hence the try block.
260
- dilated_nodes_with_labels_chunk[z, y, x] = nodes[nearest_z, nearest_y, nearest_x]
261
- except:
262
- pass
258
+
259
+ if len(ring_indices) > 0:
260
+ # Extract coordinates
261
+ z_coords = ring_indices[:, 0]
262
+ y_coords = ring_indices[:, 1]
263
+ x_coords = ring_indices[:, 2]
264
+
265
+ # Get nearest label coordinates (adjust y for chunk offset)
266
+ nearest_coords = nearest_label_indices[:, z_coords, y_coords + start_idx, x_coords]
267
+ nearest_z = nearest_coords[0, :]
268
+ nearest_y = nearest_coords[1, :]
269
+ nearest_x = nearest_coords[2, :]
270
+
271
+ # Vectorized assignment
272
+ try:
273
+ dilated_nodes_with_labels_chunk[z_coords, y_coords, x_coords] = \
274
+ nodes[nearest_z, nearest_y, nearest_x]
275
+ except IndexError:
276
+ # Fallback for any problematic indices
277
+ valid_mask = (nearest_z < nodes.shape[0]) & \
278
+ (nearest_y < nodes.shape[1]) & \
279
+ (nearest_x < nodes.shape[2]) & \
280
+ (nearest_z >= 0) & (nearest_y >= 0) & (nearest_x >= 0)
281
+
282
+ valid_indices = valid_mask.nonzero()[0]
283
+ if len(valid_indices) > 0:
284
+ dilated_nodes_with_labels_chunk[z_coords[valid_indices], y_coords[valid_indices], x_coords[valid_indices]] = \
285
+ nodes[nearest_z[valid_indices], nearest_y[valid_indices], nearest_x[valid_indices]]
263
286
 
264
287
  return dilated_nodes_with_labels_chunk
265
288
 
@@ -506,6 +529,7 @@ def compute_distance_transform_GPU(nodes, return_dists = False, sampling = [1, 1
506
529
 
507
530
 
508
531
  def compute_distance_transform(nodes, return_dists = False, sampling = [1, 1, 1]):
532
+ print("(Now doing distance transform...)")
509
533
  is_pseudo_3d = nodes.shape[0] == 1
510
534
  if is_pseudo_3d:
511
535
  nodes = np.squeeze(nodes) # Convert to 2D for processing
@@ -548,13 +572,15 @@ def compute_distance_transform_distance_GPU(nodes, sampling = [1, 1, 1]):
548
572
  distance = cp.asnumpy(distance)
549
573
 
550
574
  if is_pseudo_3d:
551
- np.expand_dims(distance, axis = 0)
575
+ distance = np.expand_dims(distance, axis = 0)
552
576
 
553
577
  return distance
554
578
 
555
579
 
556
580
  def compute_distance_transform_distance(nodes, sampling = [1, 1, 1]):
557
581
 
582
+ print("(Now doing distance transform...)")
583
+
558
584
  is_pseudo_3d = nodes.shape[0] == 1
559
585
  if is_pseudo_3d:
560
586
  nodes = np.squeeze(nodes) # Convert to 2D for processing
@@ -563,7 +589,7 @@ def compute_distance_transform_distance(nodes, sampling = [1, 1, 1]):
563
589
  # Fallback to CPU if there's an issue with GPU computation
564
590
  distance = distance_transform_edt(nodes, sampling = sampling)
565
591
  if is_pseudo_3d:
566
- np.expand_dims(distance, axis = 0)
592
+ distance = np.expand_dims(distance, axis = 0)
567
593
  return distance
568
594
 
569
595
 
@@ -621,6 +647,14 @@ def catch_memory(e):
621
647
  downsample_needed = (memory_required/total_memory)
622
648
  return (downsample_needed)
623
649
 
650
+ def cleanup():
651
+
652
+ try:
653
+ cp.get_default_memory_pool().free_all_blocks()
654
+ except:
655
+ pass
656
+
657
+
624
658
 
625
659
  if __name__ == "__main__":
626
660
  nodes = input("Labelled Nodes tiff?: ")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nettracer3d
3
- Version: 0.8.3
3
+ Version: 0.8.4
4
4
  Summary: Scripts for intializing and analyzing networks from segmentations of three dimensional images.
5
5
  Author-email: Liam McLaughlin <liamm@wustl.edu>
6
6
  Project-URL: Documentation, https://nettracer3d.readthedocs.io/en/latest/
@@ -107,6 +107,6 @@ NetTracer3D is free to use/fork for academic/nonprofit use so long as citation i
107
107
 
108
108
  NetTracer3D was developed by Liam McLaughlin while working under Dr. Sanjay Jain at Washington University School of Medicine.
109
109
 
110
- -- Version 0.8.3 Updates --
110
+ -- Version 0.8.4 Updates --
111
111
 
112
- * Added better color legend display
112
+ * See Documentation Once Updated
@@ -0,0 +1,24 @@
1
+ nettracer3d/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ nettracer3d/cellpose_manager.py,sha256=qZpTxSkmPb38Pru8TmjJ88xxcD_wM02EfJB5Mw9Xx4Y,6021
3
+ nettracer3d/community_extractor.py,sha256=3lcWy64znl6xhl7axhWwdOk3N0dmpSep0259kLXMp9s,28135
4
+ nettracer3d/excelotron.py,sha256=X9v_mte8gJBPNGdj6NJNUYja0Z6eorVoKAFx4nHiMnU,72058
5
+ nettracer3d/modularity.py,sha256=O9OeKbjD3v6gSFz9K2GzP6LsxlpQaPfeJbM1pyIEigw,21788
6
+ nettracer3d/morphology.py,sha256=jyDjYzrZ4LvI5jOyw8DLsxmo-i5lpqHsejYpW7Tq7Mo,19786
7
+ nettracer3d/neighborhoods.py,sha256=ac_gjN7pUlXZZpMSZnUVErKbKtSlInxX2dHe22oDNJA,34532
8
+ nettracer3d/nettracer.py,sha256=zHIUHLfWY3XAQBhcaZmKUqJ6UPCxk0A09cQBAnOK5TE,253362
9
+ nettracer3d/nettracer_gui.py,sha256=QsHMiTW62Hl0jHOnAjSH_l8TAV7-xQBH7cdFfW8HTM0,547961
10
+ nettracer3d/network_analysis.py,sha256=kBzsVaq4dZkMe0k-VGvQIUvM-tK0ZZ8bvb-wtsugZRQ,46150
11
+ nettracer3d/network_draw.py,sha256=F7fw6Pcf4qWOhdKwLmhwqWdschbDlHzwCVolQC9imeU,14117
12
+ nettracer3d/node_draw.py,sha256=kZcR1PekLg0riioNeGcALIXQyZ5PtHA_9MT6z7Zovdk,10401
13
+ nettracer3d/proximity.py,sha256=kSc4PwBJswPup72ZFJgS03fgQLUbRSv-MQmGuAPg_0k,35325
14
+ nettracer3d/run.py,sha256=xYeaAc8FCx8MuzTGyL3NR3mK7WZzffAYAH23bNRZYO4,127
15
+ nettracer3d/segmenter.py,sha256=VatOSpc41lxhPuYLTTejCxG1CcwP5hwiQ3ZFK9OBavA,60115
16
+ nettracer3d/segmenter_GPU.py,sha256=sFVmz_cYIVOQqnfFV3peK9hzb6IoIV5WDQHH9Lws96I,53915
17
+ nettracer3d/simple_network.py,sha256=dkG4jpc4zzdeuoaQobgGfL3PNo6N8dGKQ5hEEubFIvA,9947
18
+ nettracer3d/smart_dilate.py,sha256=nlU5drRgD5xglb3_3tFiCtVEjyqfGgZF56RkVWylRR0,27143
19
+ nettracer3d-0.8.4.dist-info/licenses/LICENSE,sha256=gM207DhJjWrxLuEWXl0Qz5ISbtWDmADfjHp3yC2XISs,888
20
+ nettracer3d-0.8.4.dist-info/METADATA,sha256=3Q1UzbGTRyjogKPQmxPdqEiehxj1aZD9dMVV1uwVNXY,6747
21
+ nettracer3d-0.8.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
22
+ nettracer3d-0.8.4.dist-info/entry_points.txt,sha256=Nx1rr_0QhJXDBHAQg2vcqCzLMKBzSHfwy3xwGkueVyc,53
23
+ nettracer3d-0.8.4.dist-info/top_level.txt,sha256=zsYy9rZwirfCEOubolhee4TyzqBAL5gSUeFMzhFTX8c,12
24
+ nettracer3d-0.8.4.dist-info/RECORD,,
@@ -1,24 +0,0 @@
1
- nettracer3d/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- nettracer3d/cellpose_manager.py,sha256=qZpTxSkmPb38Pru8TmjJ88xxcD_wM02EfJB5Mw9Xx4Y,6021
3
- nettracer3d/community_extractor.py,sha256=08WWUO94ToZFglBU9X1L-T-WJuaIa-8t35-52kN4i6g,28167
4
- nettracer3d/excelotron.py,sha256=lS5vnpoOGZWp7fdqVpTPqeC-mUKrfwDrWHfx4PQ7Uzg,71384
5
- nettracer3d/modularity.py,sha256=O9OeKbjD3v6gSFz9K2GzP6LsxlpQaPfeJbM1pyIEigw,21788
6
- nettracer3d/morphology.py,sha256=jyDjYzrZ4LvI5jOyw8DLsxmo-i5lpqHsejYpW7Tq7Mo,19786
7
- nettracer3d/neighborhoods.py,sha256=ac_gjN7pUlXZZpMSZnUVErKbKtSlInxX2dHe22oDNJA,34532
8
- nettracer3d/nettracer.py,sha256=L7FGwgiuzIloFBcQmR7UNYGicJMHgHL-etIgogCrRE0,235084
9
- nettracer3d/nettracer_gui.py,sha256=QRZEEazgO-8jLjZu29ZDD2MLnC4rObISbZgLFiQxgbE,526421
10
- nettracer3d/network_analysis.py,sha256=yUEzy4hBDTuZvXwFuJWdIQcxqPW4z67APe4zcjCjDW8,43613
11
- nettracer3d/network_draw.py,sha256=F7fw6Pcf4qWOhdKwLmhwqWdschbDlHzwCVolQC9imeU,14117
12
- nettracer3d/node_draw.py,sha256=LoeTFeOcrX6kPquZvCqYnMW-jDd9oqKM27r-rTlKEtY,10274
13
- nettracer3d/proximity.py,sha256=mRkug_y6fbqq_pOYTkF5uOoiRhvYv2e_QFC92ZTraYE,38110
14
- nettracer3d/run.py,sha256=xYeaAc8FCx8MuzTGyL3NR3mK7WZzffAYAH23bNRZYO4,127
15
- nettracer3d/segmenter.py,sha256=VatOSpc41lxhPuYLTTejCxG1CcwP5hwiQ3ZFK9OBavA,60115
16
- nettracer3d/segmenter_GPU.py,sha256=sFVmz_cYIVOQqnfFV3peK9hzb6IoIV5WDQHH9Lws96I,53915
17
- nettracer3d/simple_network.py,sha256=dkG4jpc4zzdeuoaQobgGfL3PNo6N8dGKQ5hEEubFIvA,9947
18
- nettracer3d/smart_dilate.py,sha256=DOEOQq9ig6-AO4MpqAG0CqrGDFqw5_UBeqfSedqHk28,25933
19
- nettracer3d-0.8.3.dist-info/licenses/LICENSE,sha256=gM207DhJjWrxLuEWXl0Qz5ISbtWDmADfjHp3yC2XISs,888
20
- nettracer3d-0.8.3.dist-info/METADATA,sha256=yVQGa_obriAzfkWCzXCS0EYtxBRD8UmHL8OWmFIxFJ8,6750
21
- nettracer3d-0.8.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
22
- nettracer3d-0.8.3.dist-info/entry_points.txt,sha256=Nx1rr_0QhJXDBHAQg2vcqCzLMKBzSHfwy3xwGkueVyc,53
23
- nettracer3d-0.8.3.dist-info/top_level.txt,sha256=zsYy9rZwirfCEOubolhee4TyzqBAL5gSUeFMzhFTX8c,12
24
- nettracer3d-0.8.3.dist-info/RECORD,,