nettracer3d 0.6.4__py3-none-any.whl → 0.6.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nettracer3d/segmenter.py CHANGED
@@ -21,7 +21,7 @@ import multiprocessing
21
21
  from collections import defaultdict
22
22
 
23
23
  class InteractiveSegmenter:
24
- def __init__(self, image_3d, use_gpu=True):
24
+ def __init__(self, image_3d, use_gpu=False):
25
25
  self.image_3d = image_3d
26
26
  self.patterns = []
27
27
 
@@ -82,7 +82,13 @@ class InteractiveSegmenter:
82
82
  self._last_processed_slice = None
83
83
  self.mem_lock = False
84
84
 
85
- def segment_slice_chunked(self, slice_z, block_size=64):
85
+ #Adjustable feature map params:
86
+ self.alphas = [1,2,4,8]
87
+ self.windows = 10
88
+ self.dogs = [(1, 2), (2, 4), (4, 8)]
89
+ self.master_chunk = 49
90
+
91
+ def segment_slice_chunked(self, slice_z, block_size = 49):
86
92
  """
87
93
  A completely standalone method to segment a single z-slice in chunks
88
94
  with improved safeguards.
@@ -280,13 +286,24 @@ class InteractiveSegmenter:
280
286
  def compute_gaussian(sigma):
281
287
  return ndimage.gaussian_filter(image_3d, sigma)
282
288
 
283
- for sigma in [0.5, 1.0, 2.0, 4.0]:
289
+ for sigma in self.alphas:
284
290
  future = executor.submit(compute_gaussian, sigma)
285
291
  futures.append(('gaussian', sigma, future))
292
+
293
+ def compute_dog_local(img, s1, s2):
294
+ g1 = ndimage.gaussian_filter(img, s1)
295
+ g2 = ndimage.gaussian_filter(img, s2)
296
+ return g1 - g2
297
+
298
+ # Difference of Gaussians
299
+ for (s1, s2) in self.dogs:
300
+
301
+ future = executor.submit(compute_dog_local, image_3d, s1, s2)
302
+ futures.append('dog', s1, future)
286
303
 
287
304
  # Local statistics computation
288
305
  def compute_local_mean():
289
- window_size = 5
306
+ window_size = self.windows
290
307
  kernel = np.ones((window_size, window_size, window_size)) / (window_size**3)
291
308
  return ndimage.convolve(image_3d, kernel, mode='reflect')
292
309
 
@@ -294,7 +311,7 @@ class InteractiveSegmenter:
294
311
  futures.append(('local_mean', None, future))
295
312
 
296
313
  def compute_local_variance():
297
- window_size = 5
314
+ window_size = self.windows
298
315
  kernel = np.ones((window_size, window_size, window_size)) / (window_size**3)
299
316
  mean = np.mean(image_3d)
300
317
  return ndimage.convolve((image_3d - mean)**2, kernel, mode='reflect')
@@ -395,8 +412,11 @@ class InteractiveSegmenter:
395
412
  features = []
396
413
 
397
414
  # Add Gaussian features
398
- for sigma in [0.5, 1.0, 2.0, 4.0]:
415
+ for sigma in self.alphas:
399
416
  features.append(results[f'gaussian_{sigma}'])
417
+
418
+ for sigma in self.dogs:
419
+ features.append(results[f'dog_{sigma[0]}'])
400
420
 
401
421
  # Add local statistics
402
422
  features.append(results['local_mean'])
@@ -494,13 +514,24 @@ class InteractiveSegmenter:
494
514
  def compute_gaussian(sigma):
495
515
  return ndimage.gaussian_filter(image_2d, sigma)
496
516
 
497
- for sigma in [0.5, 1.0, 2.0, 4.0]:
517
+ for sigma in self.alphas:
498
518
  future = executor.submit(compute_gaussian, sigma)
499
519
  futures.append(('gaussian', sigma, future))
520
+
521
+ # Difference of Gaussians
522
+ def compute_dog(s1, s2):
523
+ g1 = ndimage.gaussian_filter(image_2d, s1)
524
+ g2 = ndimage.gaussian_filter(image_2d, s2)
525
+ return g1 - g2
526
+
527
+ dog_pairs = self.dogs
528
+ for (s1, s2) in dog_pairs:
529
+ future = executor.submit(compute_dog, s1, s2)
530
+ futures.append(('dog', s1, future))
500
531
 
501
532
  # Local statistics computation
502
533
  def compute_local_mean():
503
- window_size = 5
534
+ window_size = self.windows
504
535
  kernel = np.ones((window_size, window_size)) / (window_size**2)
505
536
  return ndimage.convolve(image_2d, kernel, mode='reflect')
506
537
 
@@ -508,7 +539,7 @@ class InteractiveSegmenter:
508
539
  futures.append(('local_mean', None, future))
509
540
 
510
541
  def compute_local_variance():
511
- window_size = 5
542
+ window_size = self.windows
512
543
  kernel = np.ones((window_size, window_size)) / (window_size**2)
513
544
  mean = np.mean(image_2d)
514
545
  return ndimage.convolve((image_2d - mean)**2, kernel, mode='reflect')
@@ -608,8 +639,11 @@ class InteractiveSegmenter:
608
639
  features = []
609
640
 
610
641
  # Add Gaussian features
611
- for sigma in [0.5, 1.0, 2.0, 4.0]:
642
+ for sigma in self.alphas:
612
643
  features.append(results[f'gaussian_{sigma}'])
644
+
645
+ for sigma in self.dogs:
646
+ features.append(results[f'dog_{sigma[0]}'])
613
647
 
614
648
  # Add local statistics
615
649
  features.append(results['local_mean'])
@@ -843,7 +877,7 @@ class InteractiveSegmenter:
843
877
  def compute_gaussian(sigma):
844
878
  return ndimage.gaussian_filter(image_2d, sigma)
845
879
 
846
- gaussian_sigmas = [0.5, 1.0, 2.0, 4.0]
880
+ gaussian_sigmas = self.alphas
847
881
  for sigma in gaussian_sigmas:
848
882
  future = executor.submit(compute_gaussian, sigma)
849
883
  futures.append(('gaussian', sigma, future))
@@ -854,7 +888,7 @@ class InteractiveSegmenter:
854
888
  g2 = ndimage.gaussian_filter(image_2d, s2)
855
889
  return g1 - g2
856
890
 
857
- dog_pairs = [(1, 2), (2, 4)]
891
+ dog_pairs = self.dogs
858
892
  for (s1, s2) in dog_pairs:
859
893
  future = executor.submit(compute_dog, s1, s2)
860
894
  futures.append(('dog', (s1, s2), future))
@@ -978,17 +1012,17 @@ class InteractiveSegmenter:
978
1012
  futures = []
979
1013
 
980
1014
  # Gaussian smoothing at different scales
981
- for sigma in [0.5, 1.0, 2.0, 4.0]:
1015
+ for sigma in self.alphas:
982
1016
  future = executor.submit(ndimage.gaussian_filter, image_3d, sigma)
983
1017
  futures.append(future)
984
1018
 
1019
+ def compute_dog_local(img, s1, s2):
1020
+ g1 = ndimage.gaussian_filter(img, s1)
1021
+ g2 = ndimage.gaussian_filter(img, s2)
1022
+ return g1 - g2
1023
+
985
1024
  # Difference of Gaussians
986
- for (s1, s2) in [(1, 2), (2, 4)]:
987
- # Need to define a local function for this task
988
- def compute_dog_local(img, s1, s2):
989
- g1 = ndimage.gaussian_filter(img, s1)
990
- g2 = ndimage.gaussian_filter(img, s2)
991
- return g1 - g2
1025
+ for (s1, s2) in self.dogs:
992
1026
 
993
1027
  future = executor.submit(compute_dog_local, image_3d, s1, s2)
994
1028
  futures.append(future)
@@ -1180,9 +1214,10 @@ class InteractiveSegmenter:
1180
1214
  Dictionary with z-values as keys and lists of corresponding [y, x] coordinates as values
1181
1215
  """
1182
1216
  z_dict = defaultdict(list)
1183
-
1217
+
1184
1218
  for z, y, x in coordinates:
1185
1219
  z_dict[z].append((y, x))
1220
+
1186
1221
 
1187
1222
  return dict(z_dict) # Convert back to regular dict
1188
1223
 
@@ -1204,19 +1239,34 @@ class InteractiveSegmenter:
1204
1239
  foreground = set()
1205
1240
  background = set()
1206
1241
 
1207
- if not self.use_two:
1242
+ if self.previewing or not self.use_two:
1208
1243
  if self.mem_lock:
1209
1244
  # For mem_lock, we need to extract a subarray and compute features
1210
-
1211
- # Find min/max bounds of the coordinates to get the smallest containing subarray
1212
- z_coords = [z for z, y, x in chunk_coords]
1213
- y_coords = [y for z, y, x in chunk_coords]
1214
- x_coords = [x for z, y, x in chunk_coords]
1215
-
1216
- z_min, z_max = min(z_coords), max(z_coords)
1217
- y_min, y_max = min(y_coords), max(y_coords)
1218
- x_min, x_max = min(x_coords), max(x_coords)
1219
-
1245
+
1246
+ if self.realtimechunks is None: #Presuming we're segmenting all
1247
+ z_min, z_max = chunk_coords[0], chunk_coords[1]
1248
+ y_min, y_max = chunk_coords[2], chunk_coords[3]
1249
+ x_min, x_max = chunk_coords[4], chunk_coords[5]
1250
+
1251
+ # Consider moving this to process chunk ??
1252
+ chunk_coords = np.stack(np.meshgrid(
1253
+ np.arange(z_min, z_max),
1254
+ np.arange(y_min, y_max),
1255
+ np.arange(x_min, x_max),
1256
+ indexing='ij'
1257
+ )).reshape(3, -1).T
1258
+
1259
+ chunk_coords = (list(map(tuple, chunk_coords)))
1260
+ else: #Presumes we're not segmenting all
1261
+ # Find min/max bounds of the coordinates to get the smallest containing subarray
1262
+ z_coords = [z for z, y, x in chunk_coords]
1263
+ y_coords = [y for z, y, x in chunk_coords]
1264
+ x_coords = [x for z, y, x in chunk_coords]
1265
+
1266
+ z_min, z_max = min(z_coords), max(z_coords)
1267
+ y_min, y_max = min(y_coords), max(y_coords)
1268
+ x_min, x_max = min(x_coords), max(x_coords)
1269
+
1220
1270
 
1221
1271
  # Extract the subarray
1222
1272
  subarray = self.image_3d[z_min:z_max+1, y_min:y_max+1, x_min:x_max+1]
@@ -1254,8 +1304,13 @@ class InteractiveSegmenter:
1254
1304
  background.add(coord)
1255
1305
 
1256
1306
  else:
1257
- chunk_by_z = self.organize_by_z(chunk_coords)
1258
- for z, coords in chunk_by_z.items():
1307
+
1308
+ if self.mem_lock:
1309
+ chunk_coords = self.twodim_coords(chunk_coords[0], chunk_coords[1], chunk_coords[2], chunk_coords[3], chunk_coords[4])
1310
+
1311
+ chunk_coords = self.organize_by_z(chunk_coords)
1312
+
1313
+ for z, coords in chunk_coords.items():
1259
1314
 
1260
1315
  if self.feature_cache is None:
1261
1316
  features = self.get_feature_map_slice(z, self.speed, self.cur_gpu)
@@ -1283,6 +1338,63 @@ class InteractiveSegmenter:
1283
1338
 
1284
1339
  return foreground, background
1285
1340
 
1341
+ def twodim_coords(self, y_dim, x_dim, z, chunk_size = None, subrange = None):
1342
+
1343
+ if subrange is None:
1344
+ y_coords, x_coords = np.meshgrid(
1345
+ np.arange(y_dim),
1346
+ np.arange(x_dim),
1347
+ indexing='ij'
1348
+ )
1349
+
1350
+ slice_coords = np.column_stack((
1351
+ np.full(chunk_size, z),
1352
+ y_coords.ravel(),
1353
+ x_coords.ravel()
1354
+ ))
1355
+
1356
+ elif subrange[0] == 'y':
1357
+
1358
+ y_subrange = np.arange(subrange[1], subrange[2])
1359
+
1360
+ # Create meshgrid for this subchunk
1361
+ y_sub, x_sub = np.meshgrid(
1362
+ y_subrange,
1363
+ np.arange(x_dim),
1364
+ indexing='ij'
1365
+ )
1366
+
1367
+ # Create coordinates for this subchunk
1368
+ subchunk_size = len(y_subrange) * x_dim
1369
+ slice_coords = np.column_stack((
1370
+ np.full(subchunk_size, z),
1371
+ y_sub.ravel(),
1372
+ x_sub.ravel()
1373
+ ))
1374
+
1375
+ elif subrange[0] == 'x':
1376
+
1377
+ x_subrange = np.arange(subrange[1], subrange[2])
1378
+
1379
+ # Create meshgrid for this subchunk
1380
+ y_sub, x_sub = np.meshgrid(
1381
+ np.arange(y_dim),
1382
+ x_subrange,
1383
+ indexing='ij'
1384
+ )
1385
+
1386
+ # Create coordinates for this subchunk
1387
+ subchunk_size = y_dim * len(x_subrange)
1388
+ slice_coords = np.column_stack((
1389
+ np.full(subchunk_size, z),
1390
+ y_sub.ravel(),
1391
+ x_sub.ravel()
1392
+ ))
1393
+
1394
+
1395
+
1396
+ return list(map(tuple, slice_coords))
1397
+
1286
1398
 
1287
1399
 
1288
1400
  def segment_volume(self, chunk_size=None, gpu=False):
@@ -1293,19 +1405,21 @@ class InteractiveSegmenter:
1293
1405
  self.map_slice = None
1294
1406
 
1295
1407
  if self.mem_lock:
1296
- chunk_size = 32 #memory efficient chunk
1408
+ chunk_size = self.master_chunk #memory efficient chunk
1297
1409
 
1298
1410
 
1299
1411
  def create_2d_chunks():
1300
1412
  """
1301
1413
  Create chunks by z-slices for 2D processing.
1302
1414
  Each chunk is a complete z-slice with all y,x coordinates,
1303
- unless the slice exceeds 32768 pixels, in which case it's divided into subchunks.
1415
+ unless the slice exceeds 262144 pixels, in which case it's divided into subchunks.
1304
1416
 
1305
1417
  Returns:
1306
1418
  List of chunks, where each chunk contains the coordinates for one z-slice or subchunk
1307
1419
  """
1308
- MAX_CHUNK_SIZE = 32768
1420
+ MAX_CHUNK_SIZE = 262144
1421
+ if not self.mem_lock:
1422
+ MAX_CHUNK_SIZE = 10000000000000000000000000 #unlimited i guess
1309
1423
  chunks = []
1310
1424
 
1311
1425
  for z in range(self.image_3d.shape[0]):
@@ -1315,20 +1429,16 @@ class InteractiveSegmenter:
1315
1429
  total_pixels = y_dim * x_dim
1316
1430
 
1317
1431
  # If the slice is small enough, do not subchunk
1318
- if total_pixels <= MAX_CHUNK_SIZE or not self.mem_lock:
1319
- y_coords, x_coords = np.meshgrid(
1320
- np.arange(y_dim),
1321
- np.arange(x_dim),
1322
- indexing='ij'
1323
- )
1324
-
1325
- slice_coords = np.column_stack((
1326
- np.full(total_pixels, z),
1327
- y_coords.ravel(),
1328
- x_coords.ravel()
1329
- ))
1330
-
1331
- chunks.append(list(map(tuple, slice_coords)))
1432
+ if total_pixels <= MAX_CHUNK_SIZE:
1433
+
1434
+
1435
+ if not self.mem_lock:
1436
+ chunks.append(self.twodim_coords(y_dim, x_dim, z, total_pixels))
1437
+ else:
1438
+ chunks.append([y_dim, x_dim, z, total_pixels, None])
1439
+
1440
+
1441
+
1332
1442
  else:
1333
1443
  # Determine which dimension to divide (the largest one)
1334
1444
  largest_dim = 'y' if y_dim >= x_dim else 'x'
@@ -1342,56 +1452,31 @@ class InteractiveSegmenter:
1342
1452
  # Create subchunks by dividing the y-dimension
1343
1453
  for i in range(0, y_dim, div_size):
1344
1454
  end_i = min(i + div_size, y_dim)
1345
- y_subrange = np.arange(i, end_i)
1346
-
1347
- # Create meshgrid for this subchunk
1348
- y_sub, x_sub = np.meshgrid(
1349
- y_subrange,
1350
- np.arange(x_dim),
1351
- indexing='ij'
1352
- )
1353
-
1354
- # Create coordinates for this subchunk
1355
- subchunk_size = len(y_subrange) * x_dim
1356
- subchunk_coords = np.column_stack((
1357
- np.full(subchunk_size, z),
1358
- y_sub.ravel(),
1359
- x_sub.ravel()
1360
- ))
1361
-
1362
- chunks.append(list(map(tuple, subchunk_coords)))
1455
+
1456
+ if not self.mem_lock:
1457
+ chunks.append(self.twodim_coords(y_dim, x_dim, z, None, ['y', i, end_i]))
1458
+ else:
1459
+ chunks.append([y_dim, x_dim, z, None, ['y', i, end_i]])
1460
+
1363
1461
  else: # largest_dim == 'x'
1364
1462
  div_size = int(np.ceil(x_dim / num_divisions))
1365
1463
  # Create subchunks by dividing the x-dimension
1366
1464
  for i in range(0, x_dim, div_size):
1367
1465
  end_i = min(i + div_size, x_dim)
1368
- x_subrange = np.arange(i, end_i)
1369
-
1370
- # Create meshgrid for this subchunk
1371
- y_sub, x_sub = np.meshgrid(
1372
- np.arange(y_dim),
1373
- x_subrange,
1374
- indexing='ij'
1375
- )
1376
-
1377
- # Create coordinates for this subchunk
1378
- subchunk_size = y_dim * len(x_subrange)
1379
- subchunk_coords = np.column_stack((
1380
- np.full(subchunk_size, z),
1381
- y_sub.ravel(),
1382
- x_sub.ravel()
1383
- ))
1384
-
1385
- chunks.append(list(map(tuple, subchunk_coords)))
1466
+
1467
+ if not self.mem_lock:
1468
+ chunks.append(self.twodim_coords(y_dim, x_dim, z, None, ['x', i, end_i]))
1469
+ else:
1470
+ chunks.append([y_dim, x_dim, z, None, ['x', i, end_i]])
1386
1471
 
1387
1472
  return chunks
1388
1473
 
1389
- try:
1390
- from cuml.ensemble import RandomForestClassifier as cuRandomForestClassifier
1391
- except:
1392
- print("Cannot find cuML, using CPU to segment instead...")
1393
- gpu = False
1394
-
1474
+ #try:
1475
+ #from cuml.ensemble import RandomForestClassifier as cuRandomForestClassifier
1476
+ #except:
1477
+ #print("Cannot find cuML, using CPU to segment instead...")
1478
+ #gpu = False
1479
+
1395
1480
  if self.feature_cache is None and not self.mem_lock and not self.use_two:
1396
1481
  with self.lock:
1397
1482
  if self.feature_cache is None:
@@ -1437,15 +1522,24 @@ class InteractiveSegmenter:
1437
1522
  y_end = min(y_start + chunk_size, self.image_3d.shape[1])
1438
1523
  x_end = min(x_start + chunk_size, self.image_3d.shape[2])
1439
1524
 
1440
- # Create coordinates for this chunk efficiently
1441
- coords = np.stack(np.meshgrid(
1442
- np.arange(z_start, z_end),
1443
- np.arange(y_start, y_end),
1444
- np.arange(x_start, x_end),
1445
- indexing='ij'
1446
- )).reshape(3, -1).T
1525
+ if self.mem_lock:
1526
+ # Create coordinates for this chunk efficiently
1527
+ coords = [z_start, z_end, y_start, y_end, x_start, x_end]
1528
+ chunks.append(coords)
1529
+
1530
+ else:
1531
+ # Consider moving this to process chunk ??
1532
+ coords = np.stack(np.meshgrid(
1533
+ np.arange(z_start, z_end),
1534
+ np.arange(y_start, y_end),
1535
+ np.arange(x_start, x_end),
1536
+ indexing='ij'
1537
+ )).reshape(3, -1).T
1447
1538
 
1448
- chunks.append(list(map(tuple, coords)))
1539
+ chunks.append(list(map(tuple, coords)))
1540
+
1541
+
1542
+
1449
1543
  else:
1450
1544
  chunks = create_2d_chunks()
1451
1545
  self.feature_cache = None #Decided this should not maintain training data for segmenting 2D
@@ -1476,7 +1570,10 @@ class InteractiveSegmenter:
1476
1570
  fore, back = self.process_chunk(chunk)
1477
1571
  foreground_coords.update(fore)
1478
1572
  background_coords.update(back)
1479
- chunk[i] = None #Help garbage collection
1573
+ try:
1574
+ chunk[i] = None #Help garbage collection
1575
+ except:
1576
+ pass
1480
1577
  print(f"Processed {i}/{len(chunks)} chunks")
1481
1578
 
1482
1579
  return foreground_coords, background_coords
@@ -1518,10 +1615,7 @@ class InteractiveSegmenter:
1518
1615
  self.prev_z = z
1519
1616
 
1520
1617
 
1521
- def get_realtime_chunks(self, chunk_size = 64):
1522
- print("Computing some overhead...")
1523
-
1524
-
1618
+ def get_realtime_chunks(self, chunk_size = 49):
1525
1619
 
1526
1620
  # Determine if we need to chunk XY planes
1527
1621
  small_dims = (self.image_3d.shape[1] <= chunk_size and
@@ -1544,16 +1638,9 @@ class InteractiveSegmenter:
1544
1638
  # Create chunks for each Z plane
1545
1639
  for z in range(self.image_3d.shape[0]):
1546
1640
  if small_dims:
1547
- # One chunk per Z
1548
- coords = np.stack(np.meshgrid(
1549
- [z],
1550
- np.arange(self.image_3d.shape[1]),
1551
- np.arange(self.image_3d.shape[2]),
1552
- indexing='ij'
1553
- )).reshape(3, -1).T
1554
1641
 
1555
1642
  chunk_dict[(z, 0, 0)] = {
1556
- 'coords': list(map(tuple, coords)),
1643
+ 'coords': [0, self.image_3d.shape[1], 0, self.image_3d.shape[2]],
1557
1644
  'processed': False,
1558
1645
  'z': z
1559
1646
  }
@@ -1566,15 +1653,8 @@ class InteractiveSegmenter:
1566
1653
  y_end = min(y_start + chunk_size_xy, self.image_3d.shape[1])
1567
1654
  x_end = min(x_start + chunk_size_xy, self.image_3d.shape[2])
1568
1655
 
1569
- coords = np.stack(np.meshgrid(
1570
- [z],
1571
- np.arange(y_start, y_end),
1572
- np.arange(x_start, x_end),
1573
- indexing='ij'
1574
- )).reshape(3, -1).T
1575
-
1576
1656
  chunk_dict[(z, y_start, x_start)] = {
1577
- 'coords': list(map(tuple, coords)),
1657
+ 'coords': [y_start, y_end, x_start, x_end],
1578
1658
  'processed': False,
1579
1659
  'z': z
1580
1660
  }
@@ -1606,15 +1686,15 @@ class InteractiveSegmenter:
1606
1686
  def get_nearest_unprocessed_chunk(self):
1607
1687
  """Get nearest unprocessed chunk prioritizing current Z"""
1608
1688
  curr_z = self.current_z if self.current_z is not None else self.image_3d.shape[0] // 2
1609
- curr_y = self.current_x if self.current_x is not None else self.image_3d.shape[1] // 2
1610
- curr_x = self.current_y if self.current_y is not None else self.image_3d.shape[2] // 2
1689
+ curr_y = self.current_y if self.current_y is not None else self.image_3d.shape[1] // 2
1690
+ curr_x = self.current_x if self.current_x is not None else self.image_3d.shape[2] // 2
1611
1691
 
1612
1692
  # First try to find chunks at current Z
1613
1693
  current_z_chunks = [(pos, info) for pos, info in chunk_dict.items()
1614
- if info['z'] == curr_z and not info['processed']]
1694
+ if pos[0] == curr_z and not info['processed']]
1615
1695
 
1616
1696
  if current_z_chunks:
1617
- # Find nearest chunk in current Z plane
1697
+ # Find nearest chunk in current Z plane using the chunk positions from the key
1618
1698
  nearest = min(current_z_chunks,
1619
1699
  key=lambda x: ((x[0][1] - curr_y) ** 2 +
1620
1700
  (x[0][2] - curr_x) ** 2))
@@ -1631,7 +1711,7 @@ class InteractiveSegmenter:
1631
1711
  target_z = available_z[0][0]
1632
1712
  # Find nearest chunk in target Z plane
1633
1713
  z_chunks = [(pos, info) for pos, info in chunk_dict.items()
1634
- if info['z'] == target_z and not info['processed']]
1714
+ if pos[0] == target_z and not info['processed']]
1635
1715
  nearest = min(z_chunks,
1636
1716
  key=lambda x: ((x[0][1] - curr_y) ** 2 +
1637
1717
  (x[0][2] - curr_x) ** 2))
@@ -1640,45 +1720,38 @@ class InteractiveSegmenter:
1640
1720
  return None
1641
1721
 
1642
1722
 
1643
- with ThreadPoolExecutor() as executor:
1644
- futures = {}
1645
- import multiprocessing
1646
- total_cores = multiprocessing.cpu_count()
1647
- #available_workers = max(1, min(4, total_cores // 2)) # Use half cores, max of 4
1648
- available_workers = 1
1723
+ while True:
1724
+ # Find nearest unprocessed chunk using class attributes
1725
+ chunk_idx = get_nearest_unprocessed_chunk(self)
1726
+ if chunk_idx is None:
1727
+ break
1728
+
1729
+ # Process the chunk directly
1730
+ chunk = chunk_dict[chunk_idx]
1731
+ chunk['processed'] = True
1732
+ coords = chunk['coords']
1733
+
1734
+ coords = np.stack(np.meshgrid(
1735
+ [chunk['z']],
1736
+ np.arange(coords[0], coords[1]),
1737
+ np.arange(coords[2], coords[3]),
1738
+ indexing='ij'
1739
+ )).reshape(3, -1).T
1649
1740
 
1650
- while True:
1651
- # Find nearest unprocessed chunk using class attributes
1652
- chunk_idx = get_nearest_unprocessed_chunk(self) # Pass self
1653
- if chunk_idx is None:
1654
- break
1655
-
1656
- while (len(futures) < available_workers and
1657
- (chunk_idx := get_nearest_unprocessed_chunk(self))): # Pass self
1658
- chunk = chunk_dict[chunk_idx]
1659
- if gpu:
1660
- try:
1661
- futures = [executor.submit(self.process_chunk_GPU, chunk) for chunk in chunks]
1662
- except:
1663
- futures = [executor.submit(self.process_chunk, chunk) for chunk in chunks]
1664
- else:
1665
- future = executor.submit(self.process_chunk, chunk['coords'])
1741
+ coords = list(map(tuple, coords))
1666
1742
 
1667
- futures[future] = chunk_idx
1668
- chunk['processed'] = True
1669
-
1670
- # Check completed futures
1671
- done, _ = concurrent.futures.wait(
1672
- futures.keys(),
1673
- timeout=0.1,
1674
- return_when=concurrent.futures.FIRST_COMPLETED
1675
- )
1676
-
1677
- # Process completed chunks
1678
- for future in done:
1679
- fore, back = future.result()
1680
- del futures[future]
1681
- yield fore, back
1743
+
1744
+ # Process the chunk directly based on whether GPU is available
1745
+ if gpu:
1746
+ try:
1747
+ fore, back = self.process_chunk_GPU(coords)
1748
+ except:
1749
+ fore, back = self.process_chunk(coords)
1750
+ else:
1751
+ fore, back = self.process_chunk(coords)
1752
+
1753
+ # Yield the results
1754
+ yield fore, back
1682
1755
 
1683
1756
 
1684
1757
  def cleanup(self):
@@ -1700,6 +1773,8 @@ class InteractiveSegmenter:
1700
1773
  self.realtimechunks = None #dump ram
1701
1774
  self.feature_cache = None
1702
1775
 
1776
+ if not use_two:
1777
+ self.use_two = False
1703
1778
 
1704
1779
  self.mem_lock = mem_lock
1705
1780
 
@@ -1791,7 +1866,7 @@ class InteractiveSegmenter:
1791
1866
 
1792
1867
  elif mem_lock: #Forces ram efficiency
1793
1868
 
1794
- box_size = 32
1869
+ box_size = self.master_chunk
1795
1870
 
1796
1871
  # Memory-efficient approach: compute features only for necessary subarrays
1797
1872
  foreground_features = []