copick-utils 0.6.1__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. copick_utils/__init__.py +1 -1
  2. copick_utils/cli/__init__.py +33 -0
  3. copick_utils/cli/clipmesh.py +161 -0
  4. copick_utils/cli/clippicks.py +154 -0
  5. copick_utils/cli/clipseg.py +163 -0
  6. copick_utils/cli/conversion_commands.py +32 -0
  7. copick_utils/cli/enclosed.py +191 -0
  8. copick_utils/cli/filter_components.py +166 -0
  9. copick_utils/cli/fit_spline.py +191 -0
  10. copick_utils/cli/hull.py +138 -0
  11. copick_utils/cli/input_output_selection.py +76 -0
  12. copick_utils/cli/logical_commands.py +29 -0
  13. copick_utils/cli/mesh2picks.py +170 -0
  14. copick_utils/cli/mesh2seg.py +167 -0
  15. copick_utils/cli/meshop.py +262 -0
  16. copick_utils/cli/picks2ellipsoid.py +171 -0
  17. copick_utils/cli/picks2mesh.py +181 -0
  18. copick_utils/cli/picks2plane.py +156 -0
  19. copick_utils/cli/picks2seg.py +134 -0
  20. copick_utils/cli/picks2sphere.py +170 -0
  21. copick_utils/cli/picks2surface.py +164 -0
  22. copick_utils/cli/picksin.py +146 -0
  23. copick_utils/cli/picksout.py +148 -0
  24. copick_utils/cli/processing_commands.py +18 -0
  25. copick_utils/cli/seg2mesh.py +135 -0
  26. copick_utils/cli/seg2picks.py +128 -0
  27. copick_utils/cli/segop.py +248 -0
  28. copick_utils/cli/separate_components.py +155 -0
  29. copick_utils/cli/skeletonize.py +164 -0
  30. copick_utils/cli/util.py +580 -0
  31. copick_utils/cli/validbox.py +155 -0
  32. copick_utils/converters/__init__.py +35 -0
  33. copick_utils/converters/converter_common.py +543 -0
  34. copick_utils/converters/ellipsoid_from_picks.py +335 -0
  35. copick_utils/converters/lazy_converter.py +576 -0
  36. copick_utils/converters/mesh_from_picks.py +209 -0
  37. copick_utils/converters/mesh_from_segmentation.py +119 -0
  38. copick_utils/converters/picks_from_mesh.py +542 -0
  39. copick_utils/converters/picks_from_segmentation.py +168 -0
  40. copick_utils/converters/plane_from_picks.py +251 -0
  41. copick_utils/converters/segmentation_from_mesh.py +291 -0
  42. copick_utils/{segmentation → converters}/segmentation_from_picks.py +123 -13
  43. copick_utils/converters/sphere_from_picks.py +306 -0
  44. copick_utils/converters/surface_from_picks.py +337 -0
  45. copick_utils/logical/__init__.py +43 -0
  46. copick_utils/logical/distance_operations.py +604 -0
  47. copick_utils/logical/enclosed_operations.py +222 -0
  48. copick_utils/logical/mesh_operations.py +443 -0
  49. copick_utils/logical/point_operations.py +303 -0
  50. copick_utils/logical/segmentation_operations.py +399 -0
  51. copick_utils/process/__init__.py +47 -0
  52. copick_utils/process/connected_components.py +360 -0
  53. copick_utils/process/filter_components.py +306 -0
  54. copick_utils/process/hull.py +106 -0
  55. copick_utils/process/skeletonize.py +326 -0
  56. copick_utils/process/spline_fitting.py +648 -0
  57. copick_utils/process/validbox.py +333 -0
  58. copick_utils/util/__init__.py +6 -0
  59. copick_utils/util/config_models.py +614 -0
  60. {copick_utils-0.6.1.dist-info → copick_utils-1.0.0.dist-info}/METADATA +15 -2
  61. copick_utils-1.0.0.dist-info/RECORD +71 -0
  62. copick_utils-1.0.0.dist-info/entry_points.txt +29 -0
  63. copick_utils/segmentation/picks_from_segmentation.py +0 -81
  64. copick_utils-0.6.1.dist-info/RECORD +0 -14
  65. /copick_utils/{segmentation → io}/__init__.py +0 -0
  66. {copick_utils-0.6.1.dist-info → copick_utils-1.0.0.dist-info}/WHEEL +0 -0
  67. {copick_utils-0.6.1.dist-info → copick_utils-1.0.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,542 @@
1
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple
2
+
3
+ import numpy as np
4
+ import trimesh as tm
5
+ from copick.util.log import get_logger
6
+ from scipy.stats.qmc import PoissonDisk
7
+
8
+ from copick_utils.converters.lazy_converter import create_lazy_batch_converter
9
+
10
+ if TYPE_CHECKING:
11
+ from copick.models import CopickMesh, CopickPicks, CopickRoot, CopickRun
12
+
13
+ logger = get_logger(__name__)
14
+
15
+
16
+ def ensure_mesh(trimesh_object):
17
+ """
18
+ Ensure that the input is a valid Trimesh object.
19
+
20
+ Args:
21
+ trimesh_object: Trimesh or Scene object
22
+
23
+ Returns:
24
+ Trimesh object or None if no geometry found
25
+ """
26
+ if isinstance(trimesh_object, tm.Scene):
27
+ if len(trimesh_object.geometry) == 0:
28
+ return None
29
+ else:
30
+ return tm.util.concatenate(list(trimesh_object.geometry.values()))
31
+ elif isinstance(trimesh_object, tm.Trimesh):
32
+ return trimesh_object
33
+ else:
34
+ raise ValueError("Input must be a Trimesh or Scene object")
35
+
36
+
37
+ def poisson_disk_in_out(
38
+ n_in: int,
39
+ n_out: int,
40
+ mesh: tm.Trimesh,
41
+ max_dim: Sequence[float],
42
+ min_dist: float,
43
+ edge_dist: float,
44
+ input_points: np.ndarray,
45
+ seed: int = 1234,
46
+ ) -> Tuple[np.ndarray, np.ndarray]:
47
+ """
48
+ Generate Poisson disk sampled points inside and outside the mesh.
49
+
50
+ Args:
51
+ n_in: Number of points to sample inside the mesh
52
+ n_out: Number of points to sample outside the mesh
53
+ mesh: Trimesh object
54
+ max_dim: Maximum dimensions of the volume
55
+ min_dist: Minimum distance between points
56
+ edge_dist: Distance from volume edges
57
+ input_points: Existing points to avoid
58
+ seed: Random seed
59
+
60
+ Returns:
61
+ Tuple of (points_in, points_out) arrays
62
+ """
63
+ max_max = np.max(max_dim)
64
+ min_dist = min_dist / max_max
65
+
66
+ engine = PoissonDisk(d=3, radius=min_dist, seed=seed)
67
+
68
+ # Fill space
69
+ points = engine.fill_space() * max_max
70
+
71
+ # Reject points outside the volume
72
+ lb = np.array([edge_dist, edge_dist, edge_dist])
73
+ ub = max_dim - np.array([edge_dist, edge_dist, edge_dist])
74
+ points = points[np.all(np.logical_and(points > lb, points < ub), axis=1), :]
75
+
76
+ # Reject points that are too close to the input points
77
+ for pt in input_points:
78
+ dist = np.linalg.norm(points - pt, axis=1)
79
+ points = points[dist > min_dist]
80
+
81
+ # Check if points are inside/outside the mesh
82
+ mask = mesh.contains(points)
83
+ inv_mask = np.logical_not(mask)
84
+
85
+ points_in = points[mask, :]
86
+ points_out = points[inv_mask, :]
87
+
88
+ # Shuffle output
89
+ np.random.default_rng(seed).shuffle(points_in)
90
+ np.random.default_rng(seed).shuffle(points_out)
91
+
92
+ # Limit number of points to n_in and n_out
93
+ if n_in > points_in.shape[0]:
94
+ print(f"Warning: Not enough points inside the mesh. Requested {n_in}, found {points_in.shape[0]}")
95
+ n_in = min(n_in, points_in.shape[0])
96
+ final_points_in = points_in[:n_in, :]
97
+
98
+ if n_out > points_out.shape[0]:
99
+ print(f"Warning: Not enough points outside the mesh. Requested {n_out}, found {points_out.shape[0]}")
100
+ n_out = min(n_out, points_out.shape[0])
101
+ final_points_out = points_out[:n_out, :]
102
+
103
+ return final_points_in, final_points_out
104
+
105
+
106
+ def generate_random_orientations(n_points: int, seed: Optional[int] = None) -> np.ndarray:
107
+ """
108
+ Generate random orientations as 4x4 transformation matrices.
109
+
110
+ Args:
111
+ n_points: Number of transformation matrices to generate
112
+ seed: Random seed
113
+
114
+ Returns:
115
+ Array of shape (n_points, 4, 4) containing transformation matrices
116
+ """
117
+ if seed is not None:
118
+ np.random.seed(seed)
119
+
120
+ transforms = np.zeros((n_points, 4, 4))
121
+
122
+ for i in range(n_points):
123
+ # Generate random rotation matrix using quaternions
124
+ # Generate random quaternion (uniform distribution on unit sphere)
125
+ u1, u2, u3 = np.random.random(3)
126
+ q = np.array(
127
+ [
128
+ np.sqrt(1 - u1) * np.sin(2 * np.pi * u2),
129
+ np.sqrt(1 - u1) * np.cos(2 * np.pi * u2),
130
+ np.sqrt(u1) * np.sin(2 * np.pi * u3),
131
+ np.sqrt(u1) * np.cos(2 * np.pi * u3),
132
+ ],
133
+ )
134
+
135
+ # Convert quaternion to rotation matrix
136
+ qx, qy, qz, qw = q
137
+ rotation_matrix = np.array(
138
+ [
139
+ [1 - 2 * (qy**2 + qz**2), 2 * (qx * qy - qz * qw), 2 * (qx * qz + qy * qw)],
140
+ [2 * (qx * qy + qz * qw), 1 - 2 * (qx**2 + qz**2), 2 * (qy * qz - qx * qw)],
141
+ [2 * (qx * qz - qy * qw), 2 * (qy * qz + qx * qw), 1 - 2 * (qx**2 + qy**2)],
142
+ ],
143
+ )
144
+
145
+ transforms[i, :3, :3] = rotation_matrix
146
+ transforms[i, 3, 3] = 1.0
147
+
148
+ return transforms
149
+
150
+
151
+ def picks_from_mesh(
152
+ mesh: tm.Trimesh,
153
+ sampling_type: str,
154
+ n_points: int,
155
+ run: "CopickRun",
156
+ object_name: str,
157
+ session_id: str,
158
+ user_id: str,
159
+ voxel_spacing: float,
160
+ tomo_type: str = "wbp",
161
+ min_dist: Optional[float] = None,
162
+ edge_dist: float = 32.0,
163
+ include_normals: bool = False,
164
+ random_orientations: bool = False,
165
+ seed: Optional[int] = None,
166
+ ) -> Optional["CopickPicks"]:
167
+ """
168
+ Sample points from a mesh using different strategies.
169
+
170
+ Args:
171
+ mesh: Trimesh object to sample from
172
+ sampling_type: Type of sampling ('inside', 'surface', 'outside', 'vertices')
173
+ n_points: Number of points to sample (ignored for 'vertices')
174
+ run: Copick run object
175
+ object_name: Name of the object for the picks
176
+ session_id: Session ID for the picks
177
+ user_id: User ID for the picks
178
+ voxel_spacing: Voxel spacing for coordinate scaling
179
+ tomo_type: Tomogram type for getting volume dimensions
180
+ min_dist: Minimum distance between points (if None, uses voxel_spacing)
181
+ edge_dist: Distance from volume edges in voxels
182
+ include_normals: Include surface normals as orientations (surface sampling only)
183
+ random_orientations: Generate random orientations for points
184
+ seed: Random seed for reproducible results
185
+
186
+ Returns:
187
+ CopickPicks object or None if sampling failed
188
+ """
189
+ if not mesh.is_watertight and sampling_type in ["inside", "outside"]:
190
+ print(f"Warning: Mesh is not watertight, {sampling_type} sampling may be unreliable")
191
+
192
+ # Get tomogram dimensions
193
+ vs = run.get_voxel_spacing(voxel_spacing)
194
+ tomo = vs.get_tomogram(tomo_type)
195
+
196
+ if tomo is None:
197
+ print(f"Warning: Could not find tomogram of type '{tomo_type}' for run {run.name}")
198
+ return None
199
+
200
+ import zarr
201
+
202
+ pixel_max_dim = zarr.open(tomo.zarr())["0"].shape[::-1]
203
+ max_dim = np.array([d * voxel_spacing for d in pixel_max_dim])
204
+
205
+ # Set default min_dist if not provided
206
+ if min_dist is None:
207
+ min_dist = voxel_spacing * 2
208
+
209
+ edge_dist_physical = edge_dist * voxel_spacing
210
+
211
+ if seed is not None:
212
+ np.random.seed(seed)
213
+
214
+ points = None
215
+ orientations = None
216
+
217
+ if sampling_type == "vertices":
218
+ # Return mesh vertices directly
219
+ points = mesh.vertices.copy()
220
+
221
+ elif sampling_type == "surface":
222
+ # Sample points on mesh surface
223
+ points, face_indices = tm.sample.sample_surface_even(mesh, n_points, radius=min_dist, seed=seed)
224
+
225
+ if include_normals:
226
+ # Get face normals for the sampled points
227
+ face_normals = mesh.face_normals[face_indices]
228
+ # Convert normals to transformation matrices
229
+ orientations = np.zeros((len(points), 4, 4))
230
+ for i, normal in enumerate(face_normals):
231
+ # Create rotation matrix that aligns z-axis with normal
232
+ z_axis = np.array([0, 0, 1])
233
+ if np.allclose(normal, z_axis):
234
+ rot_matrix = np.eye(3)
235
+ elif np.allclose(normal, -z_axis):
236
+ rot_matrix = np.array([[-1, 0, 0], [0, -1, 0], [0, 0, -1]])
237
+ else:
238
+ v = np.cross(z_axis, normal)
239
+ s = np.linalg.norm(v)
240
+ c = np.dot(z_axis, normal)
241
+ vx = np.array([[0, -v[2], v[1]], [v[2], 0, -v[0]], [-v[1], v[0], 0]])
242
+ rot_matrix = np.eye(3) + vx + np.dot(vx, vx) * ((1 - c) / (s**2))
243
+
244
+ orientations[i, :3, :3] = rot_matrix
245
+ orientations[i, 3, 3] = 1.0
246
+
247
+ elif sampling_type in ["inside", "outside"]:
248
+ # Use Poisson disk sampling
249
+ if sampling_type == "inside":
250
+ points_in, _ = poisson_disk_in_out(
251
+ n_points,
252
+ 0,
253
+ mesh,
254
+ max_dim,
255
+ min_dist,
256
+ edge_dist_physical,
257
+ np.array([]),
258
+ seed,
259
+ )
260
+ points = points_in
261
+ else: # outside
262
+ _, points_out = poisson_disk_in_out(
263
+ 0,
264
+ n_points,
265
+ mesh,
266
+ max_dim,
267
+ min_dist,
268
+ edge_dist_physical,
269
+ np.array([]),
270
+ seed,
271
+ )
272
+ points = points_out
273
+
274
+ else:
275
+ raise ValueError(
276
+ f"Invalid sampling_type: {sampling_type}. Must be 'inside', 'surface', 'outside', or 'vertices'",
277
+ )
278
+
279
+ if points is None or len(points) == 0:
280
+ print(f"No points generated for {sampling_type} sampling")
281
+ return None
282
+
283
+ # Filter points that are too close to edges
284
+ valid_mask = np.all(
285
+ [
286
+ points[:, 0] >= edge_dist_physical,
287
+ points[:, 0] <= max_dim[0] - edge_dist_physical,
288
+ points[:, 1] >= edge_dist_physical,
289
+ points[:, 1] <= max_dim[1] - edge_dist_physical,
290
+ points[:, 2] >= edge_dist_physical,
291
+ points[:, 2] <= max_dim[2] - edge_dist_physical,
292
+ ],
293
+ axis=0,
294
+ )
295
+
296
+ points = points[valid_mask]
297
+ if orientations is not None:
298
+ orientations = orientations[valid_mask]
299
+
300
+ if len(points) == 0:
301
+ print("No valid points after edge filtering")
302
+ return None
303
+
304
+ # Generate random orientations if requested and not already set
305
+ if random_orientations and orientations is None:
306
+ orientations = generate_random_orientations(len(points), seed)
307
+
308
+ # Create picks
309
+ pick_set = run.new_picks(object_name, session_id, user_id, exist_ok=True)
310
+ pick_set.from_numpy(positions=points, transforms=orientations)
311
+ pick_set.store()
312
+
313
+ print(f"Created {len(points)} picks using {sampling_type} sampling")
314
+ return pick_set
315
+
316
+
317
+ def _picks_from_mesh_worker(
318
+ run: "CopickRun",
319
+ mesh_object_name: str,
320
+ mesh_user_id: str,
321
+ mesh_session_id: str,
322
+ sampling_type: str,
323
+ n_points: int,
324
+ pick_object_name: str,
325
+ pick_session_id: str,
326
+ pick_user_id: str,
327
+ voxel_spacing: float,
328
+ tomo_type: str,
329
+ min_dist: Optional[float],
330
+ edge_dist: float,
331
+ include_normals: bool,
332
+ random_orientations: bool,
333
+ seed: Optional[int],
334
+ ) -> Dict[str, Any]:
335
+ """Worker function for batch conversion of meshes to picks."""
336
+ try:
337
+ # Get mesh
338
+ meshes = run.get_meshes(object_name=mesh_object_name, user_id=mesh_user_id, session_id=mesh_session_id)
339
+
340
+ if not meshes:
341
+ return {"processed": 0, "errors": [f"No meshes found for {run.name}"]}
342
+
343
+ mesh_obj = meshes[0]
344
+ mesh = mesh_obj.mesh
345
+ mesh = ensure_mesh(mesh)
346
+
347
+ if mesh is None:
348
+ return {"processed": 0, "errors": [f"Could not load mesh data for {run.name}"]}
349
+
350
+ pick_set = picks_from_mesh(
351
+ mesh=mesh,
352
+ sampling_type=sampling_type,
353
+ n_points=n_points,
354
+ run=run,
355
+ object_name=pick_object_name,
356
+ session_id=pick_session_id,
357
+ user_id=pick_user_id,
358
+ voxel_spacing=voxel_spacing,
359
+ tomo_type=tomo_type,
360
+ min_dist=min_dist,
361
+ edge_dist=edge_dist,
362
+ include_normals=include_normals,
363
+ random_orientations=random_orientations,
364
+ seed=seed,
365
+ )
366
+
367
+ if pick_set and pick_set.points:
368
+ return {"processed": 1, "errors": [], "result": pick_set, "points_created": len(pick_set.points)}
369
+ else:
370
+ return {"processed": 0, "errors": [f"No picks generated for {run.name}"]}
371
+
372
+ except Exception as e:
373
+ return {"processed": 0, "errors": [f"Error processing {run.name}: {e}"]}
374
+
375
+
376
+ def picks_from_mesh_standard(
377
+ mesh: "CopickMesh",
378
+ run: "CopickRun",
379
+ output_object_name: str,
380
+ output_session_id: str,
381
+ output_user_id: str,
382
+ sampling_type: str,
383
+ n_points: int,
384
+ voxel_spacing: float,
385
+ tomo_type: str = "wbp",
386
+ min_dist: Optional[float] = None,
387
+ edge_dist: float = 32.0,
388
+ include_normals: bool = False,
389
+ random_orientations: bool = False,
390
+ seed: Optional[int] = None,
391
+ **kwargs,
392
+ ) -> Optional[Tuple["CopickPicks", Dict[str, int]]]:
393
+ """
394
+ Standard signature wrapper for picks_from_mesh to match converter pattern.
395
+
396
+ Args:
397
+ mesh: CopickMesh object to sample from
398
+ run: Copick run object
399
+ output_object_name: Name for the output picks
400
+ output_session_id: Session ID for the output picks
401
+ output_user_id: User ID for the output picks
402
+ sampling_type: Type of sampling ('inside', 'surface', 'outside', 'vertices')
403
+ n_points: Number of points to sample (ignored for 'vertices')
404
+ voxel_spacing: Voxel spacing for coordinate scaling
405
+ tomo_type: Tomogram type for getting volume dimensions
406
+ min_dist: Minimum distance between points
407
+ edge_dist: Distance from volume edges in voxels
408
+ include_normals: Include surface normals as orientations
409
+ random_orientations: Generate random orientations for points
410
+ seed: Random seed for reproducible results
411
+ **kwargs: Additional arguments (ignored)
412
+
413
+ Returns:
414
+ Tuple of (CopickPicks object, stats dict) or None if conversion failed
415
+ """
416
+ try:
417
+ # Get the trimesh object
418
+ trimesh_obj = mesh.mesh
419
+ if trimesh_obj is None:
420
+ logger.error("Could not load mesh data")
421
+ return None
422
+
423
+ # Handle Scene objects
424
+ if isinstance(trimesh_obj, tm.Scene):
425
+ if len(trimesh_obj.geometry) == 0:
426
+ logger.error("Mesh is empty")
427
+ return None
428
+ trimesh_obj = tm.util.concatenate(list(trimesh_obj.geometry.values()))
429
+
430
+ # Call the original picks_from_mesh function
431
+ result_picks = picks_from_mesh(
432
+ mesh=trimesh_obj,
433
+ sampling_type=sampling_type,
434
+ n_points=n_points,
435
+ run=run,
436
+ object_name=output_object_name,
437
+ session_id=output_session_id,
438
+ user_id=output_user_id,
439
+ voxel_spacing=voxel_spacing,
440
+ tomo_type=tomo_type,
441
+ min_dist=min_dist,
442
+ edge_dist=edge_dist,
443
+ include_normals=include_normals,
444
+ random_orientations=random_orientations,
445
+ seed=seed,
446
+ )
447
+
448
+ if result_picks is None:
449
+ return None
450
+
451
+ # Get point count for stats
452
+ points, _ = result_picks.numpy()
453
+ stats = {"points_created": len(points) if points is not None else 0}
454
+
455
+ return result_picks, stats
456
+
457
+ except Exception as e:
458
+ logger.error(f"Error converting mesh to picks: {e}")
459
+ return None
460
+
461
+
462
+ def picks_from_mesh_batch(
463
+ root: "CopickRoot",
464
+ mesh_object_name: str,
465
+ mesh_user_id: str,
466
+ mesh_session_id: str,
467
+ sampling_type: str,
468
+ n_points: int,
469
+ pick_object_name: str,
470
+ pick_session_id: str,
471
+ pick_user_id: str,
472
+ voxel_spacing: float,
473
+ tomo_type: str = "wbp",
474
+ min_dist: Optional[float] = None,
475
+ edge_dist: float = 32.0,
476
+ include_normals: bool = False,
477
+ random_orientations: bool = False,
478
+ seed: Optional[int] = None,
479
+ run_names: Optional[List[str]] = None,
480
+ workers: int = 8,
481
+ ) -> Dict[str, Any]:
482
+ """
483
+ Batch convert meshes to picks across multiple runs.
484
+
485
+ Args:
486
+ root: The copick root containing runs to process.
487
+ mesh_object_name: Name of the mesh object to sample from.
488
+ mesh_user_id: User ID of the mesh to convert.
489
+ mesh_session_id: Session ID of the mesh to convert.
490
+ sampling_type: Type of sampling ('inside', 'surface', 'outside', 'vertices').
491
+ n_points: Number of points to sample (ignored for 'vertices' type).
492
+ pick_object_name: Name of the object for created picks.
493
+ pick_session_id: Session ID for created picks.
494
+ pick_user_id: User ID for created picks.
495
+ voxel_spacing: Voxel spacing for coordinate scaling.
496
+ tomo_type: Tomogram type for getting volume dimensions. Default is 'wbp'.
497
+ min_dist: Minimum distance between points. If None, uses 2 * voxel_spacing.
498
+ edge_dist: Distance from volume edges in voxels. Default is 32.0.
499
+ include_normals: Include surface normals as orientations (surface sampling only). Default is False.
500
+ random_orientations: Generate random orientations for points. Default is False.
501
+ seed: Random seed for reproducible results.
502
+ run_names: List of run names to process. If None, processes all runs.
503
+ workers: Number of worker processes. Default is 8.
504
+
505
+ Returns:
506
+ Dictionary with processing results and statistics.
507
+ """
508
+ from copick.ops.run import map_runs
509
+
510
+ runs_to_process = [run.name for run in root.runs] if run_names is None else run_names
511
+
512
+ results = map_runs(
513
+ callback=_picks_from_mesh_worker,
514
+ root=root,
515
+ runs=runs_to_process,
516
+ workers=workers,
517
+ task_desc="Converting meshes to picks",
518
+ mesh_object_name=mesh_object_name,
519
+ mesh_user_id=mesh_user_id,
520
+ mesh_session_id=mesh_session_id,
521
+ sampling_type=sampling_type,
522
+ n_points=n_points,
523
+ pick_object_name=pick_object_name,
524
+ pick_session_id=pick_session_id,
525
+ pick_user_id=pick_user_id,
526
+ voxel_spacing=voxel_spacing,
527
+ tomo_type=tomo_type,
528
+ min_dist=min_dist,
529
+ edge_dist=edge_dist,
530
+ include_normals=include_normals,
531
+ random_orientations=random_orientations,
532
+ seed=seed,
533
+ )
534
+
535
+ return results
536
+
537
+
538
+ # Lazy batch converter for new architecture
539
+ picks_from_mesh_lazy_batch = create_lazy_batch_converter(
540
+ converter_func=picks_from_mesh_standard,
541
+ task_description="Converting meshes to picks",
542
+ )
@@ -0,0 +1,168 @@
1
+ from typing import TYPE_CHECKING, Dict, Optional, Tuple
2
+
3
+ import numpy as np
4
+ import scipy.ndimage as ndi
5
+ from copick.util.log import get_logger
6
+ from skimage.measure import regionprops
7
+ from skimage.morphology import ball, binary_dilation, binary_erosion
8
+ from skimage.segmentation import watershed
9
+
10
+ from copick_utils.converters.converter_common import (
11
+ create_batch_converter,
12
+ create_batch_worker,
13
+ )
14
+ from copick_utils.converters.lazy_converter import create_lazy_batch_converter
15
+
16
+ if TYPE_CHECKING:
17
+ from copick.models import CopickPicks, CopickRun, CopickSegmentation
18
+
19
+ logger = get_logger(__name__)
20
+
21
+
22
+ def _extract_centroids_from_segmentation_array(
23
+ segmentation: np.ndarray,
24
+ segmentation_idx: int,
25
+ maxima_filter_size: int,
26
+ min_particle_size: int,
27
+ max_particle_size: int,
28
+ voxel_spacing: float,
29
+ ) -> Optional[np.ndarray]:
30
+ """
31
+ Extract centroids from a segmentation array.
32
+
33
+ Args:
34
+ segmentation: Multilabel segmentation array.
35
+ segmentation_idx: The specific label from the segmentation to process.
36
+ maxima_filter_size: Size of the maximum detection filter.
37
+ min_particle_size: Minimum size threshold for particles.
38
+ max_particle_size: Maximum size threshold for particles.
39
+ voxel_spacing: The voxel spacing used to scale pick locations.
40
+
41
+ Returns:
42
+ Array of centroid positions or None if no centroids found.
43
+ """
44
+ # Create a binary mask for the specific segmentation label
45
+ binary_mask = (segmentation == segmentation_idx).astype(int)
46
+
47
+ # Skip if the segmentation label is not present
48
+ if np.sum(binary_mask) == 0:
49
+ logger.warning(f"No segmentation with label {segmentation_idx} found")
50
+ return None
51
+
52
+ # Structuring element for erosion and dilation
53
+ struct_elem = ball(1)
54
+ eroded = binary_erosion(binary_mask, struct_elem)
55
+ dilated = binary_dilation(eroded, struct_elem)
56
+
57
+ # Distance transform and local maxima detection
58
+ distance = ndi.distance_transform_edt(dilated)
59
+ local_max = distance == ndi.maximum_filter(
60
+ distance,
61
+ footprint=np.ones((maxima_filter_size, maxima_filter_size, maxima_filter_size)),
62
+ )
63
+
64
+ # Watershed segmentation
65
+ markers, _ = ndi.label(local_max)
66
+ watershed_labels = watershed(-distance, markers, mask=dilated)
67
+
68
+ # Extract region properties and filter based on particle size
69
+ all_centroids = []
70
+ for region in regionprops(watershed_labels):
71
+ if min_particle_size <= region.area <= max_particle_size:
72
+ all_centroids.append(region.centroid)
73
+
74
+ if all_centroids:
75
+ # Convert to positions (Z, Y, X) -> (X, Y, Z) and scale by voxel spacing
76
+ positions = np.array(all_centroids)[:, [2, 1, 0]] * voxel_spacing
77
+ return positions
78
+ else:
79
+ logger.warning(f"No valid centroids found for label {segmentation_idx}")
80
+ return None
81
+
82
+
83
+ def picks_from_segmentation(
84
+ segmentation: "CopickSegmentation",
85
+ run: "CopickRun",
86
+ object_name: str,
87
+ session_id: str,
88
+ user_id: str,
89
+ segmentation_idx: int,
90
+ maxima_filter_size: int = 9,
91
+ min_particle_size: int = 1000,
92
+ max_particle_size: int = 50000,
93
+ ) -> Optional[Tuple["CopickPicks", Dict[str, int]]]:
94
+ """
95
+ Convert a CopickSegmentation to picks by extracting centroids.
96
+
97
+ Args:
98
+ segmentation: CopickSegmentation object to convert
99
+ run: CopickRun object
100
+ object_name: Name for the output pick object
101
+ session_id: Session ID for the output picks
102
+ user_id: User ID for the output picks
103
+ segmentation_idx: The specific label from the segmentation to process
104
+ maxima_filter_size: Size of the maximum detection filter
105
+ min_particle_size: Minimum size threshold for particles
106
+ max_particle_size: Maximum size threshold for particles
107
+
108
+ Returns:
109
+ Tuple of (CopickPicks object, stats dict) or None if creation failed.
110
+ Stats dict contains 'points_created'.
111
+ """
112
+ try:
113
+ # Load the segmentation array
114
+ segmentation_array = segmentation.numpy()
115
+
116
+ if segmentation_array is None or segmentation_array.size == 0:
117
+ logger.error("Empty or invalid segmentation volume")
118
+ return None
119
+
120
+ # Get voxel spacing from segmentation
121
+ voxel_spacing = segmentation.voxel_size
122
+
123
+ # Extract centroids
124
+ positions = _extract_centroids_from_segmentation_array(
125
+ segmentation_array,
126
+ segmentation_idx,
127
+ maxima_filter_size,
128
+ min_particle_size,
129
+ max_particle_size,
130
+ voxel_spacing,
131
+ )
132
+
133
+ if positions is None:
134
+ logger.error("No centroids extracted from segmentation")
135
+ return None
136
+
137
+ # Create pick set and store positions
138
+ pick_set = run.new_picks(object_name, session_id, user_id, exist_ok=True)
139
+ pick_set.from_numpy(positions=positions)
140
+ pick_set.store()
141
+
142
+ stats = {"points_created": len(positions)}
143
+ logger.info(f"Created {stats['points_created']} picks from segmentation")
144
+ return pick_set, stats
145
+
146
+ except Exception as e:
147
+ logger.error(f"Error creating picks: {e}")
148
+ return None
149
+
150
+
151
+ # Create worker function using common infrastructure
152
+ _picks_from_segmentation_worker = create_batch_worker(picks_from_segmentation, "picks", "segmentation", min_points=0)
153
+
154
+
155
+ # Create batch converter using common infrastructure
156
+ picks_from_segmentation_batch = create_batch_converter(
157
+ picks_from_segmentation,
158
+ "Converting segmentations to picks",
159
+ "picks",
160
+ "segmentation",
161
+ min_points=0,
162
+ )
163
+
164
+ # Lazy batch converter for new architecture
165
+ picks_from_segmentation_lazy_batch = create_lazy_batch_converter(
166
+ converter_func=picks_from_segmentation,
167
+ task_description="Converting segmentations to picks",
168
+ )