nettracer3d 0.7.4__py3-none-any.whl → 0.7.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,7 +4,6 @@ import json
4
4
  import tifffile
5
5
  import numpy as np
6
6
  from networkx.algorithms import community
7
- from community import community_louvain
8
7
  from scipy.ndimage import zoom
9
8
  from scipy import ndimage
10
9
  from . import node_draw
@@ -239,183 +238,6 @@ def weighted_network(excel_file_path):
239
238
 
240
239
  return G, edge_weights
241
240
 
242
- def community_partition_simple(nodes, network, centroids = None, down_factor = None, color_code = False, directory = None):
243
-
244
- G = network
245
-
246
- communities = list(nx.community.label_propagation_communities(G))
247
- partition = {}
248
- for i, community in enumerate(communities):
249
- for node in community:
250
- partition[node] = i + 1
251
- print(partition)
252
-
253
-
254
- print("Generating excel notebook containing community of each node...")
255
- if type(nodes) == str:
256
- nodes = tifffile.imread(nodes)
257
-
258
- if len(np.unique(nodes)) < 3:
259
-
260
- structure_3d = np.ones((3, 3, 3), dtype=int)
261
- nodes, num_nodes = ndimage.label(nodes, structure=structure_3d)
262
-
263
- # Convert dictionary to DataFrame with keys as index and values as a column
264
- df = pd.DataFrame.from_dict(partition, orient='index', columns=['CommunityID'])
265
-
266
- # Rename the index to 'Node ID'
267
- df.index.name = 'Node ID'
268
-
269
- if directory is None:
270
-
271
- # Save DataFrame to Excel file
272
- df.to_excel('communities.xlsx', engine='openpyxl')
273
- print("Community info saved to communities.xls")
274
- else:
275
- df.to_excel(f'{directory}/communities.xlsx', engine='openpyxl')
276
- print(f"Community info saved to {directory}/communities.xls")
277
-
278
-
279
- print("Drawing overlay containing community labels for each node...")
280
-
281
- if centroids is None:
282
- centroids = _find_centroids(nodes, down_factor = down_factor)
283
-
284
- labels = node_draw.degree_draw(partition, centroids, nodes)
285
-
286
- if directory is None:
287
- tifffile.imwrite("community_labels.tif", labels)
288
- print("Community labels saved to community_labels.tif")
289
- else:
290
- tifffile.imwrite(f"{directory}/community_labels.tif", labels)
291
- print(f"Community labels saved to {directory}/community_labels.tif")
292
-
293
- print("Drawing overlay containing grayscale community labels for each node...")
294
-
295
- masks = node_draw.degree_infect(partition, nodes)
296
-
297
- if color_code:
298
- print("And drawing color coded community labels...")
299
- colored_masks = _color_code(masks)
300
-
301
- if directory is None:
302
-
303
- tifffile.imwrite("community_labels_colorcoded.tif", colored_masks)
304
- print("Color coded communities saved to community_labels_colorcoded.tif")
305
-
306
- else:
307
- tifffile.imwrite(f"{directory}/community_labels_colorcoded.tif", colored_masks)
308
- print(f"Color coded communities saved to {directory}/community_labels_colorcoded.tif")
309
-
310
-
311
- if directory is None:
312
-
313
- tifffile.imwrite("community_labels_grayscale.tif", masks)
314
- print("Grayscale labeled communities saved to community_labels_grayscale.tif")
315
-
316
- else:
317
- tifffile.imwrite(f"{directory}/community_labels_grayscale.tif", masks)
318
- print(f"Grayscale labeled communities saved to {directory}/community_labels_grayscale.tif")
319
-
320
- return partition
321
-
322
-
323
- def community_partition(nodes, network, centroids = None, down_factor = None, color_code = False, directory = None):
324
-
325
- G, edge_weights = weighted_network(network)
326
-
327
- G = nx.Graph()
328
-
329
- # Find the maximum and minimum edge weights
330
- max_weight = max(weight for edge, weight in edge_weights.items())
331
- min_weight = min(weight for edge, weight in edge_weights.items())
332
-
333
- if max_weight > 1:
334
- # Normalize edge weights to the range [0.1, 1.0]
335
- normalized_weights = {edge: 0.1 + 0.9 * ((weight - min_weight) / (max_weight - min_weight)) for edge, weight in edge_weights.items()}
336
- else:
337
- normalized_weights = {edge: 0.1 for edge, weight in edge_weights.items()}
338
-
339
- # Add edges to the graph with normalized weights
340
- for edge, normalized_weight in normalized_weights.items():
341
- G.add_edge(edge[0], edge[1], weight=normalized_weight)
342
-
343
- # Perform Louvain community detection
344
- partition = community_louvain.best_partition(G)
345
-
346
- for key in partition.keys():
347
- partition[key] = partition[key] + 1
348
-
349
-
350
- print("Generating excel notebook containing community of each node...")
351
- if type(nodes) == str:
352
- nodes = tifffile.imread(nodes)
353
-
354
- if len(np.unique(nodes)) < 3:
355
-
356
- structure_3d = np.ones((3, 3, 3), dtype=int)
357
- nodes, num_nodes = ndimage.label(nodes, structure=structure_3d)
358
-
359
- # Convert dictionary to DataFrame with keys as index and values as a column
360
- df = pd.DataFrame.from_dict(partition, orient='index', columns=['CommunityID'])
361
-
362
- # Rename the index to 'Node ID'
363
- df.index.name = 'Node ID'
364
-
365
- if directory is None:
366
-
367
- # Save DataFrame to Excel file
368
- df.to_excel('communities.xlsx', engine='openpyxl')
369
- print("Community info saved to communities.xls")
370
- else:
371
- df.to_excel(f'{directory}/communities.xlsx', engine='openpyxl')
372
- print(f"Community info saved to {directory}/communities.xls")
373
-
374
-
375
- print("Drawing overlay containing community labels for each node...")
376
-
377
- if centroids is None:
378
- centroids = _find_centroids(nodes, down_factor = down_factor)
379
-
380
- labels = node_draw.degree_draw(partition, centroids, nodes)
381
-
382
- if directory is None:
383
- tifffile.imwrite("community_labels.tif", labels)
384
- print("Community labels saved to community_labels.tif")
385
- else:
386
- tifffile.imwrite(f"{directory}/community_labels.tif", labels)
387
- print(f"Community labels saved to {directory}/community_labels.tif")
388
-
389
- print("Drawing overlay containing grayscale community labels for each node...")
390
-
391
- masks = node_draw.degree_infect(partition, nodes)
392
-
393
- if color_code:
394
- print("And drawing color coded community labels...")
395
- colored_masks = _color_code(masks)
396
-
397
- if directory is None:
398
-
399
- tifffile.imwrite("community_labels_colorcoded.tif", colored_masks)
400
- print("Color coded communities saved to community_labels_colorcoded.tif")
401
-
402
- else:
403
- tifffile.imwrite(f"{directory}/community_labels_colorcoded.tif", colored_masks)
404
- print(f"Color coded communities saved to {directory}/community_labels_colorcoded.tif")
405
-
406
-
407
- if directory is None:
408
-
409
- tifffile.imwrite("community_labels_grayscale.tif", masks)
410
- print("Grayscale labeled communities saved to community_labels_grayscale.tif")
411
-
412
- else:
413
- tifffile.imwrite(f"{directory}/community_labels_grayscale.tif", masks)
414
- print(f"Grayscale labeled communities saved to {directory}/community_labels_grayscale.tif")
415
-
416
- return partition
417
-
418
-
419
241
 
420
242
  def _color_code(grayscale_image):
421
243
  """Color code a grayscale array. Currently expects linearly ascending grayscale labels, will crash if there are gaps. (Main use case is grayscale anyway)"""
nettracer3d/segmenter.py CHANGED
@@ -1188,24 +1188,12 @@ class InteractiveSegmenter:
1188
1188
 
1189
1189
  if self.current_speed != speed:
1190
1190
  self.feature_cache = None
1191
- if use_gpu:
1192
- try:
1193
- self.model = cuRandomForestClassifier(
1194
- n_estimators=100,
1195
- max_depth=None
1196
- )
1197
- except:
1198
- self.model = RandomForestClassifier(
1199
- n_estimators=100,
1200
- n_jobs=-1,
1201
- max_depth=None
1202
- )
1203
- else:
1204
- self.model = RandomForestClassifier(
1205
- n_estimators=100,
1206
- n_jobs=-1,
1207
- max_depth=None
1208
- )
1191
+
1192
+ self.model = RandomForestClassifier(
1193
+ n_estimators=100,
1194
+ n_jobs=-1,
1195
+ max_depth=None
1196
+ )
1209
1197
 
1210
1198
 
1211
1199
  if use_two:
@@ -1272,7 +1260,7 @@ class InteractiveSegmenter:
1272
1260
  background_features.append(feature_vector)
1273
1261
 
1274
1262
 
1275
- elif mem_lock: #Forces ram efficiency
1263
+ else: #Forces ram efficiency
1276
1264
 
1277
1265
  box_size = self.master_chunk
1278
1266
 
@@ -1348,32 +1336,6 @@ class InteractiveSegmenter:
1348
1336
  for local_z, local_y, local_x in local_back_coords:
1349
1337
  feature = subarray_features[local_z, local_y, local_x]
1350
1338
  background_features.append(feature)
1351
-
1352
- else:
1353
-
1354
- self.two_slices = []
1355
-
1356
- if self.use_two: #Clarifies if we need to redo feature cache for 3D
1357
-
1358
- self.feature_cache = None
1359
- self.use_two = False
1360
-
1361
- if self.feature_cache is None:
1362
- with self.lock:
1363
- if self.feature_cache is None and speed:
1364
- if use_gpu:
1365
- self.feature_cache = self.compute_feature_maps()
1366
- else:
1367
- self.feature_cache = self.compute_feature_maps_cpu()
1368
-
1369
- elif self.feature_cache is None and not speed:
1370
- if use_gpu:
1371
-
1372
- self.feature_cache = self.compute_deep_feature_maps()
1373
- else:
1374
- self.feature_cache = self.compute_deep_feature_maps_cpu()
1375
-
1376
-
1377
1339
  try:
1378
1340
  # Get foreground coordinates and features
1379
1341
  z_fore, y_fore, x_fore = np.where(foreground_array == 1)
@@ -1485,20 +1447,11 @@ class InteractiveSegmenter:
1485
1447
  if self._currently_segmenting is not None:
1486
1448
  return
1487
1449
 
1488
- #with self.lock <- cant remember why this was here
1489
1450
  if speed:
1490
-
1491
- if self.mem_lock:
1492
- output = self.compute_feature_maps_cpu_2d_parallel(z = z)
1493
- else:
1494
- output = self.compute_feature_maps_cpu_2d(z = z)
1451
+ output = self.compute_feature_maps_cpu_2d_parallel(z = z)
1495
1452
 
1496
1453
  elif not speed:
1497
-
1498
- if self.mem_lock:
1499
- output = self.compute_deep_feature_maps_cpu_2d_parallel(z = z)
1500
- else:
1501
- output = self.compute_deep_feature_maps_cpu_2d(z = z)
1454
+ output = self.compute_deep_feature_maps_cpu_2d_parallel(z = z)
1502
1455
 
1503
1456
  return output
1504
1457