copick-utils 0.6.1__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. copick_utils/__init__.py +1 -1
  2. copick_utils/cli/__init__.py +33 -0
  3. copick_utils/cli/clipmesh.py +161 -0
  4. copick_utils/cli/clippicks.py +154 -0
  5. copick_utils/cli/clipseg.py +163 -0
  6. copick_utils/cli/conversion_commands.py +32 -0
  7. copick_utils/cli/enclosed.py +191 -0
  8. copick_utils/cli/filter_components.py +166 -0
  9. copick_utils/cli/fit_spline.py +191 -0
  10. copick_utils/cli/hull.py +138 -0
  11. copick_utils/cli/input_output_selection.py +76 -0
  12. copick_utils/cli/logical_commands.py +29 -0
  13. copick_utils/cli/mesh2picks.py +170 -0
  14. copick_utils/cli/mesh2seg.py +167 -0
  15. copick_utils/cli/meshop.py +262 -0
  16. copick_utils/cli/picks2ellipsoid.py +171 -0
  17. copick_utils/cli/picks2mesh.py +181 -0
  18. copick_utils/cli/picks2plane.py +156 -0
  19. copick_utils/cli/picks2seg.py +134 -0
  20. copick_utils/cli/picks2sphere.py +170 -0
  21. copick_utils/cli/picks2surface.py +164 -0
  22. copick_utils/cli/picksin.py +146 -0
  23. copick_utils/cli/picksout.py +148 -0
  24. copick_utils/cli/processing_commands.py +18 -0
  25. copick_utils/cli/seg2mesh.py +135 -0
  26. copick_utils/cli/seg2picks.py +128 -0
  27. copick_utils/cli/segop.py +248 -0
  28. copick_utils/cli/separate_components.py +155 -0
  29. copick_utils/cli/skeletonize.py +164 -0
  30. copick_utils/cli/util.py +580 -0
  31. copick_utils/cli/validbox.py +155 -0
  32. copick_utils/converters/__init__.py +35 -0
  33. copick_utils/converters/converter_common.py +543 -0
  34. copick_utils/converters/ellipsoid_from_picks.py +335 -0
  35. copick_utils/converters/lazy_converter.py +576 -0
  36. copick_utils/converters/mesh_from_picks.py +209 -0
  37. copick_utils/converters/mesh_from_segmentation.py +119 -0
  38. copick_utils/converters/picks_from_mesh.py +542 -0
  39. copick_utils/converters/picks_from_segmentation.py +168 -0
  40. copick_utils/converters/plane_from_picks.py +251 -0
  41. copick_utils/converters/segmentation_from_mesh.py +291 -0
  42. copick_utils/{segmentation → converters}/segmentation_from_picks.py +123 -13
  43. copick_utils/converters/sphere_from_picks.py +306 -0
  44. copick_utils/converters/surface_from_picks.py +337 -0
  45. copick_utils/logical/__init__.py +43 -0
  46. copick_utils/logical/distance_operations.py +604 -0
  47. copick_utils/logical/enclosed_operations.py +222 -0
  48. copick_utils/logical/mesh_operations.py +443 -0
  49. copick_utils/logical/point_operations.py +303 -0
  50. copick_utils/logical/segmentation_operations.py +399 -0
  51. copick_utils/process/__init__.py +47 -0
  52. copick_utils/process/connected_components.py +360 -0
  53. copick_utils/process/filter_components.py +306 -0
  54. copick_utils/process/hull.py +106 -0
  55. copick_utils/process/skeletonize.py +326 -0
  56. copick_utils/process/spline_fitting.py +648 -0
  57. copick_utils/process/validbox.py +333 -0
  58. copick_utils/util/__init__.py +6 -0
  59. copick_utils/util/config_models.py +614 -0
  60. {copick_utils-0.6.1.dist-info → copick_utils-1.0.0.dist-info}/METADATA +15 -2
  61. copick_utils-1.0.0.dist-info/RECORD +71 -0
  62. copick_utils-1.0.0.dist-info/entry_points.txt +29 -0
  63. copick_utils/segmentation/picks_from_segmentation.py +0 -81
  64. copick_utils-0.6.1.dist-info/RECORD +0 -14
  65. /copick_utils/{segmentation → io}/__init__.py +0 -0
  66. {copick_utils-0.6.1.dist-info → copick_utils-1.0.0.dist-info}/WHEEL +0 -0
  67. {copick_utils-0.6.1.dist-info → copick_utils-1.0.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,251 @@
1
+ from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple
2
+
3
+ import numpy as np
4
+ import trimesh as tm
5
+ from copick.util.log import get_logger
6
+ from sklearn.decomposition import PCA
7
+
8
+ from copick_utils.converters.converter_common import (
9
+ cluster,
10
+ create_batch_converter,
11
+ create_batch_worker,
12
+ store_mesh_with_stats,
13
+ validate_points,
14
+ )
15
+ from copick_utils.converters.lazy_converter import create_lazy_batch_converter
16
+
17
+ if TYPE_CHECKING:
18
+ from copick.models import CopickMesh, CopickRun
19
+
20
+ logger = get_logger(__name__)
21
+
22
+
23
+ def fit_plane_to_points(points: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
24
+ """Fit a plane to a set of 3D points using PCA.
25
+
26
+ Args:
27
+ points: Nx3 array of points.
28
+
29
+ Returns:
30
+ Tuple of (center, normal_vector).
31
+ """
32
+ if len(points) < 3:
33
+ raise ValueError("Need at least 3 points to fit a plane")
34
+
35
+ # Center the points
36
+ center = np.mean(points, axis=0)
37
+ centered_points = points - center
38
+
39
+ # Use PCA to find the plane
40
+ pca = PCA(n_components=3)
41
+ pca.fit(centered_points)
42
+
43
+ # The normal vector is the component with the smallest variance (last component)
44
+ normal = pca.components_[-1] # Last principal component (smallest variance)
45
+
46
+ # Ensure consistent normal direction (pointing upward in z if possible)
47
+ if normal[2] < 0:
48
+ normal = -normal
49
+
50
+ return center, normal
51
+
52
+
53
+ def create_plane_mesh(center: np.ndarray, normal: np.ndarray, points: np.ndarray, padding: float = 1.0) -> tm.Trimesh:
54
+ """Create a plane mesh that encompasses the given points.
55
+
56
+ Args:
57
+ center: Plane center point.
58
+ normal: Plane normal vector.
59
+ points: Original points to determine plane size.
60
+ padding: Extra padding factor for plane size.
61
+
62
+ Returns:
63
+ Trimesh plane object.
64
+ """
65
+ # Create two orthogonal vectors in the plane
66
+ # Find a vector that's not parallel to the normal
67
+ temp_vector = np.array([0, 0, 1]) if abs(normal[2]) < 0.9 else np.array([1, 0, 0])
68
+
69
+ # Create two orthogonal vectors in the plane
70
+ u = np.cross(normal, temp_vector)
71
+ u = u / np.linalg.norm(u)
72
+ v = np.cross(normal, u)
73
+ v = v / np.linalg.norm(v)
74
+
75
+ # Project points onto the plane to determine size
76
+ centered_points = points - center
77
+ u_coords = np.dot(centered_points, u)
78
+ v_coords = np.dot(centered_points, v)
79
+
80
+ # Determine plane extents with padding
81
+ u_min, u_max = np.min(u_coords), np.max(u_coords)
82
+ v_min, v_max = np.min(v_coords), np.max(v_coords)
83
+
84
+ u_range = (u_max - u_min) * padding
85
+ v_range = (v_max - v_min) * padding
86
+
87
+ u_center = (u_min + u_max) / 2
88
+ v_center = (v_min + v_max) / 2
89
+
90
+ # Create plane vertices
91
+ vertices = [
92
+ center + (u_center - u_range / 2) * u + (v_center - v_range / 2) * v,
93
+ center + (u_center + u_range / 2) * u + (v_center - v_range / 2) * v,
94
+ center + (u_center + u_range / 2) * u + (v_center + v_range / 2) * v,
95
+ center + (u_center - u_range / 2) * u + (v_center + v_range / 2) * v,
96
+ ]
97
+
98
+ # Create two triangular faces for the plane
99
+ faces = [[0, 1, 2], [0, 2, 3]]
100
+
101
+ return tm.Trimesh(vertices=vertices, faces=faces)
102
+
103
+
104
+ def plane_from_picks(
105
+ points: np.ndarray,
106
+ run: "CopickRun",
107
+ object_name: str,
108
+ session_id: str,
109
+ user_id: str,
110
+ use_clustering: bool = False,
111
+ clustering_method: str = "dbscan",
112
+ clustering_params: Optional[Dict[str, Any]] = None,
113
+ padding: float = 1.2,
114
+ all_clusters: bool = True,
115
+ individual_meshes: bool = False,
116
+ session_id_template: Optional[str] = None,
117
+ ) -> Optional[Tuple["CopickMesh", Dict[str, int]]]:
118
+ """Create plane mesh(es) from pick points.
119
+
120
+ Args:
121
+ points: Nx3 array of pick positions.
122
+ run: Copick run object.
123
+ object_name: Name of the mesh object.
124
+ session_id: Session ID for the mesh.
125
+ user_id: User ID for the mesh.
126
+ use_clustering: Whether to cluster points first.
127
+ clustering_method: Clustering method ('dbscan', 'kmeans').
128
+ clustering_params: Parameters for clustering.
129
+ e.g.
130
+ - {'eps': 5.0, 'min_samples': 3} for DBSCAN
131
+ - {'n_clusters': 3} for KMeans
132
+ padding: Padding factor for plane size (1.0 = exact fit, >1.0 = larger plane).
133
+ all_clusters: If True, use all clusters; if False, use only the largest cluster.
134
+ individual_meshes: If True, create separate mesh objects for each plane.
135
+ session_id_template: Template for individual mesh session IDs.
136
+
137
+ Returns:
138
+ Tuple of (CopickMesh object, stats dict) or None if creation failed.
139
+ Stats dict contains 'vertices_created' and 'faces_created' totals.
140
+ """
141
+ if not validate_points(points, 3, "plane"):
142
+ return None
143
+
144
+ if clustering_params is None:
145
+ clustering_params = {}
146
+
147
+ # Define plane creation function
148
+ def create_plane_from_points(cluster_points):
149
+ center, normal = fit_plane_to_points(cluster_points)
150
+ return create_plane_mesh(center, normal, cluster_points, padding)
151
+
152
+ # Handle clustering workflow with special plane logic
153
+ if use_clustering:
154
+ point_clusters = cluster(
155
+ points,
156
+ clustering_method,
157
+ min_points_per_cluster=3, # Planes need at least 3 points
158
+ **clustering_params,
159
+ )
160
+
161
+ if not point_clusters:
162
+ logger.warning("No valid clusters found")
163
+ return None
164
+
165
+ logger.info(f"Found {len(point_clusters)} clusters")
166
+
167
+ if all_clusters and len(point_clusters) > 1:
168
+ if individual_meshes:
169
+ # Create separate mesh objects for each plane
170
+ created_meshes = []
171
+ total_vertices = 0
172
+ total_faces = 0
173
+
174
+ for i, cluster_points in enumerate(point_clusters):
175
+ try:
176
+ plane_mesh = create_plane_from_points(cluster_points)
177
+
178
+ # Generate session ID using template if provided
179
+ if session_id_template:
180
+ plane_session_id = session_id_template.format(
181
+ base_session_id=session_id,
182
+ instance_id=i,
183
+ )
184
+ else:
185
+ plane_session_id = f"{session_id}-{i:03d}"
186
+
187
+ copick_mesh = run.new_mesh(object_name, plane_session_id, user_id, exist_ok=True)
188
+ copick_mesh.mesh = plane_mesh
189
+ copick_mesh.store()
190
+ created_meshes.append(copick_mesh)
191
+ total_vertices += len(plane_mesh.vertices)
192
+ total_faces += len(plane_mesh.faces)
193
+ logger.info(
194
+ f"Created individual plane mesh {i} with {len(plane_mesh.vertices)} vertices",
195
+ )
196
+ except Exception as e:
197
+ logger.error(f"Failed to create mesh {i}: {e}")
198
+ continue
199
+
200
+ # Return the first mesh and total stats
201
+ if created_meshes:
202
+ stats = {"vertices_created": total_vertices, "faces_created": total_faces}
203
+ return created_meshes[0], stats
204
+ else:
205
+ return None
206
+ else:
207
+ # Create meshes from all clusters and combine them
208
+ all_meshes = []
209
+ for cluster_points in point_clusters:
210
+ plane_mesh = create_plane_from_points(cluster_points)
211
+ all_meshes.append(plane_mesh)
212
+
213
+ # Combine all meshes
214
+ combined_mesh = tm.util.concatenate(all_meshes)
215
+ else:
216
+ # Use largest cluster
217
+ cluster_sizes = [len(cluster) for cluster in point_clusters]
218
+ largest_cluster_idx = np.argmax(cluster_sizes)
219
+ points_to_use = point_clusters[largest_cluster_idx]
220
+ logger.info(f"Using largest cluster with {len(points_to_use)} points")
221
+
222
+ combined_mesh = create_plane_from_points(points_to_use)
223
+ else:
224
+ # Use all points without clustering
225
+ combined_mesh = create_plane_from_points(points)
226
+
227
+ # Store mesh and return stats
228
+ try:
229
+ return store_mesh_with_stats(run, combined_mesh, object_name, session_id, user_id, "plane")
230
+ except Exception as e:
231
+ logger.critical(f"Error creating mesh: {e}")
232
+ return None
233
+
234
+
235
+ # Create worker function using common infrastructure
236
+ _plane_from_picks_worker = create_batch_worker(plane_from_picks, "plane", min_points=3)
237
+
238
+
239
+ # Create batch converter using common infrastructure
240
+ plane_from_picks_batch = create_batch_converter(
241
+ plane_from_picks,
242
+ "Converting picks to plane meshes",
243
+ "plane",
244
+ min_points=3,
245
+ )
246
+
247
+ # Lazy batch converter for new architecture
248
+ plane_from_picks_lazy_batch = create_lazy_batch_converter(
249
+ converter_func=plane_from_picks,
250
+ task_description="Converting picks to plane meshes",
251
+ )
@@ -0,0 +1,291 @@
1
+ """Convert meshes to segmentation volumes."""
2
+
3
+ from concurrent.futures import ThreadPoolExecutor, as_completed
4
+ from typing import TYPE_CHECKING, Dict, Optional, Tuple
5
+
6
+ import numpy as np
7
+ import trimesh as tm
8
+ from copick.util.log import get_logger
9
+ from trimesh.ray.ray_triangle import RayMeshIntersector
10
+
11
+ from copick_utils.converters.converter_common import (
12
+ create_batch_converter,
13
+ create_batch_worker,
14
+ )
15
+ from copick_utils.converters.lazy_converter import create_lazy_batch_converter
16
+
17
+ if TYPE_CHECKING:
18
+ from copick.models import CopickMesh, CopickRun, CopickSegmentation
19
+
20
+ logger = get_logger(__name__)
21
+
22
+
23
+ def ensure_mesh(trimesh_object) -> Optional[tm.Trimesh]:
24
+ """
25
+ Convert a trimesh object to a single mesh.
26
+
27
+ Args:
28
+ trimesh_object: A Trimesh or Scene object
29
+
30
+ Returns:
31
+ Single Trimesh object or None if empty
32
+
33
+ Raises:
34
+ ValueError: If input is not a Trimesh or Scene object
35
+ """
36
+ if isinstance(trimesh_object, tm.Scene):
37
+ if len(trimesh_object.geometry) == 0:
38
+ return None
39
+ else:
40
+ return tm.util.concatenate(list(trimesh_object.geometry.values()))
41
+ elif isinstance(trimesh_object, tm.Trimesh):
42
+ return trimesh_object
43
+ else:
44
+ raise ValueError("Input must be a Trimesh or Scene object")
45
+
46
+
47
+ def _onesmask_z(mesh: tm.Trimesh, voxel_dims: Tuple[int, int, int], voxel_spacing: float) -> np.ndarray:
48
+ """Create mask by ray casting in Z direction."""
49
+ intersector = RayMeshIntersector(mesh)
50
+
51
+ # Create a grid of rays in XY plane, shooting in Z direction
52
+ grid_x, grid_y = np.mgrid[0 : voxel_dims[0], 0 : voxel_dims[1]]
53
+ ray_grid = np.vstack([grid_x.ravel(), grid_y.ravel(), -np.ones((grid_x.size,))]).T * voxel_spacing
54
+ ray_dir = np.zeros((ray_grid.shape[0], 3))
55
+ ray_dir[:, 2] = 1
56
+
57
+ loc, _, _ = intersector.intersects_location(ray_grid, ray_dir)
58
+
59
+ # Convert to voxel coordinates and sort by z
60
+ int_loc = np.round(loc / voxel_spacing).astype("int")
61
+ sort_idx = int_loc[:, 2].argsort()
62
+ int_loc = int_loc[sort_idx, :]
63
+
64
+ # Build volume by tracking crossings
65
+ img = np.zeros((voxel_dims[1], voxel_dims[0]), dtype="bool")
66
+ vol = np.zeros((voxel_dims[2], voxel_dims[1], voxel_dims[0]), dtype="bool")
67
+
68
+ for z in range(voxel_dims[2]):
69
+ idx = int_loc[:, 2] == z
70
+ img[int_loc[idx, 1], int_loc[idx, 0]] = np.logical_not(img[int_loc[idx, 1], int_loc[idx, 0]])
71
+ vol[z, :, :] = img
72
+
73
+ return vol
74
+
75
+
76
+ def _onesmask_x(mesh: tm.Trimesh, voxel_dims: Tuple[int, int, int], voxel_spacing: float) -> np.ndarray:
77
+ """Create mask by ray casting in X direction."""
78
+ intersector = RayMeshIntersector(mesh)
79
+
80
+ # Create a grid of rays in YZ plane, shooting in X direction
81
+ grid_y, grid_z = np.mgrid[0 : voxel_dims[1], 0 : voxel_dims[2]]
82
+ ray_grid = np.vstack([-np.ones((grid_y.size,)), grid_y.ravel(), grid_z.ravel()]).T * voxel_spacing
83
+ ray_dir = np.zeros((ray_grid.shape[0], 3))
84
+ ray_dir[:, 0] = 1
85
+
86
+ loc, _, _ = intersector.intersects_location(ray_grid, ray_dir)
87
+
88
+ # Convert to voxel coordinates and sort by x
89
+ int_loc = np.round(loc / voxel_spacing).astype("int")
90
+ sort_idx = int_loc[:, 0].argsort()
91
+ int_loc = int_loc[sort_idx, :]
92
+
93
+ # Build volume by tracking crossings
94
+ img = np.zeros((voxel_dims[2], voxel_dims[1]), dtype="bool")
95
+ vol = np.zeros((voxel_dims[2], voxel_dims[1], voxel_dims[0]), dtype="bool")
96
+
97
+ for x in range(voxel_dims[0]):
98
+ idx = int_loc[:, 0] == x
99
+ img[int_loc[idx, 2], int_loc[idx, 1]] = np.logical_not(img[int_loc[idx, 2], int_loc[idx, 1]])
100
+ vol[:, :, x] = img
101
+
102
+ return vol
103
+
104
+
105
+ def mesh_to_volume(mesh: tm.Trimesh, voxel_dims: Tuple[int, int, int], voxel_spacing: float) -> np.ndarray:
106
+ """
107
+ Convert a watertight mesh to a binary volume using ray casting.
108
+
109
+ Args:
110
+ mesh: Trimesh object representing the mesh
111
+ voxel_dims: Dimensions of the output volume (x, y, z)
112
+ voxel_spacing: Spacing between voxels in physical units
113
+
114
+ Returns:
115
+ Binary volume as numpy array with shape (z, y, x)
116
+ """
117
+ vols = []
118
+ with ThreadPoolExecutor(max_workers=2) as executor:
119
+ futs = [
120
+ executor.submit(_onesmask_x, mesh.copy(), voxel_dims, voxel_spacing),
121
+ executor.submit(_onesmask_z, mesh.copy(), voxel_dims, voxel_spacing),
122
+ ]
123
+
124
+ for f in as_completed(futs):
125
+ vols.append(f.result())
126
+
127
+ return np.logical_and(vols[0], vols[1])
128
+
129
+
130
+ def mesh_to_boundary_volume(
131
+ mesh: tm.Trimesh,
132
+ voxel_dims: Tuple[int, int, int],
133
+ voxel_spacing: float,
134
+ sampling_density: float = 1.0,
135
+ ) -> np.ndarray:
136
+ """
137
+ Convert a mesh to a binary volume by voxelizing only the surface/boundary.
138
+
139
+ Args:
140
+ mesh: Trimesh object representing the mesh
141
+ voxel_dims: Dimensions of the output volume (x, y, z)
142
+ voxel_spacing: Spacing between voxels in physical units
143
+ sampling_density: Density of surface sampling (samples per voxel edge length)
144
+
145
+ Returns:
146
+ Binary volume as numpy array with shape (z, y, x)
147
+ """
148
+ # Sample points on the mesh surface
149
+ # Calculate number of points based on surface area and sampling density
150
+ surface_area = mesh.area
151
+ # Estimate points per unit area based on voxel spacing and density
152
+ points_per_area = (sampling_density / voxel_spacing) ** 2
153
+ n_points = max(int(surface_area * points_per_area), 1000) # Minimum 1000 points
154
+
155
+ # Sample points uniformly on the surface
156
+ surface_points, _ = tm.sample.sample_surface(mesh, n_points)
157
+
158
+ # Convert surface points to voxel coordinates
159
+ voxel_coords = np.round(surface_points / voxel_spacing).astype(int)
160
+
161
+ # Create binary volume
162
+ vol = np.zeros((voxel_dims[2], voxel_dims[1], voxel_dims[0]), dtype=bool)
163
+
164
+ # Filter coordinates to be within bounds
165
+ valid_mask = (
166
+ (voxel_coords[:, 0] >= 0)
167
+ & (voxel_coords[:, 0] < voxel_dims[0])
168
+ & (voxel_coords[:, 1] >= 0)
169
+ & (voxel_coords[:, 1] < voxel_dims[1])
170
+ & (voxel_coords[:, 2] >= 0)
171
+ & (voxel_coords[:, 2] < voxel_dims[2])
172
+ )
173
+ voxel_coords = voxel_coords[valid_mask]
174
+
175
+ # Set voxels at surface points
176
+ if len(voxel_coords) > 0:
177
+ # Convert from (x,y,z) to (z,y,x) indexing for the volume
178
+ vol[voxel_coords[:, 2], voxel_coords[:, 1], voxel_coords[:, 0]] = True
179
+
180
+ return vol
181
+
182
+
183
+ def segmentation_from_mesh(
184
+ mesh: "CopickMesh",
185
+ run: "CopickRun",
186
+ object_name: str,
187
+ session_id: str,
188
+ user_id: str,
189
+ voxel_spacing: float,
190
+ tomo_type: str = "wbp",
191
+ is_multilabel: bool = False,
192
+ mode: str = "watertight",
193
+ boundary_sampling_density: float = 1.0,
194
+ invert: bool = False,
195
+ ) -> Optional[Tuple["CopickSegmentation", Dict[str, int]]]:
196
+ """
197
+ Convert a CopickMesh to a segmentation volume.
198
+
199
+ Args:
200
+ mesh: CopickMesh object to convert
201
+ run: CopickRun object
202
+ object_name: Name for the output segmentation
203
+ session_id: Session ID for the output segmentation
204
+ user_id: User ID for the output segmentation
205
+ voxel_spacing: Voxel spacing for the segmentation
206
+ tomo_type: Type of tomogram to use for reference dimensions
207
+ is_multilabel: Whether the segmentation is multilabel
208
+ mode: Voxelization mode ('watertight' or 'boundary')
209
+ boundary_sampling_density: Surface sampling density for boundary mode (samples per voxel edge length)
210
+ invert: Whether to invert the volume (fill outside instead of inside)
211
+
212
+ Returns:
213
+ Tuple of (CopickSegmentation object, stats dict) or None if creation failed.
214
+ Stats dict contains 'voxels_created'.
215
+ """
216
+ try:
217
+ # Get the trimesh object from CopickMesh
218
+ mesh_obj = ensure_mesh(mesh.mesh)
219
+ if mesh_obj is None:
220
+ logger.error("Empty mesh")
221
+ return None
222
+
223
+ # Get reference dimensions from tomogram
224
+ vs = run.get_voxel_spacing(voxel_spacing)
225
+ if not vs:
226
+ logger.error(f"Voxel spacing {voxel_spacing} not found")
227
+ return None
228
+
229
+ tomos = vs.get_tomograms(tomo_type=tomo_type)
230
+ if not tomos:
231
+ logger.error(f"Tomogram type {tomo_type} not found")
232
+ return None
233
+
234
+ # Get dimensions from zarr
235
+ import zarr
236
+
237
+ tomo_array = zarr.open(tomos[0].zarr())["0"]
238
+ vox_dim = tomo_array.shape[::-1] # zarr is (z,y,x), we want (x,y,z)
239
+
240
+ # Convert mesh to volume based on mode
241
+ if mode == "watertight":
242
+ vol = mesh_to_volume(mesh_obj, vox_dim, voxel_spacing)
243
+ elif mode == "boundary":
244
+ vol = mesh_to_boundary_volume(mesh_obj, vox_dim, voxel_spacing, boundary_sampling_density)
245
+ else:
246
+ raise ValueError(f"Unknown voxelization mode: {mode}. Must be 'watertight' or 'boundary'.")
247
+
248
+ # Apply inversion if requested
249
+ if invert:
250
+ vol = ~vol
251
+
252
+ # Create or get segmentation
253
+ seg = run.new_segmentation(
254
+ name=object_name,
255
+ user_id=user_id,
256
+ session_id=session_id,
257
+ is_multilabel=is_multilabel,
258
+ voxel_size=voxel_spacing,
259
+ exist_ok=True,
260
+ )
261
+
262
+ # Store the volume using modern copick API
263
+ seg.from_numpy(vol.astype(np.uint8))
264
+
265
+ stats = {"voxels_created": int(np.sum(vol))}
266
+ logger.info(f"Created segmentation with {stats['voxels_created']} voxels")
267
+ return seg, stats
268
+
269
+ except Exception as e:
270
+ logger.error(f"Error creating segmentation: {e}")
271
+ return None
272
+
273
+
274
+ # Create worker function using common infrastructure
275
+ _segmentation_from_mesh_worker = create_batch_worker(segmentation_from_mesh, "segmentation", "mesh", min_points=0)
276
+
277
+
278
+ # Create batch converter using common infrastructure
279
+ segmentation_from_mesh_batch = create_batch_converter(
280
+ segmentation_from_mesh,
281
+ "Converting meshes to segmentations",
282
+ "segmentation",
283
+ "mesh",
284
+ min_points=0,
285
+ )
286
+
287
+ # Lazy batch converter for new architecture
288
+ segmentation_from_mesh_lazy_batch = create_lazy_batch_converter(
289
+ converter_func=segmentation_from_mesh,
290
+ task_description="Converting meshes to segmentations",
291
+ )
@@ -1,9 +1,29 @@
1
+ from typing import TYPE_CHECKING, Dict, Optional, Tuple
2
+
1
3
  import numpy as np
2
4
  import zarr
5
+ from copick.util.log import get_logger
3
6
  from scipy.ndimage import zoom
4
7
 
8
+ from copick_utils.converters.converter_common import (
9
+ create_batch_converter,
10
+ create_batch_worker,
11
+ )
12
+ from copick_utils.converters.lazy_converter import create_lazy_batch_converter
13
+
14
+ if TYPE_CHECKING:
15
+ from copick.models import CopickObject, CopickPicks, CopickRun, CopickSegmentation
16
+
17
+ logger = get_logger(__name__)
5
18
 
6
- def from_picks(pick, seg_volume, radius: float = 10.0, label_value: int = 1, voxel_spacing: float = 10):
19
+
20
+ def from_picks(
21
+ pick: "CopickPicks",
22
+ seg_volume: np.ndarray,
23
+ radius: float = 10.0,
24
+ label_value: int = 1,
25
+ voxel_spacing: float = 10,
26
+ ) -> np.ndarray:
7
27
  """
8
28
  Paints picks into a segmentation volume as spheres.
9
29
 
@@ -74,7 +94,7 @@ def from_picks(pick, seg_volume, radius: float = 10.0, label_value: int = 1, vox
74
94
  return seg_volume
75
95
 
76
96
 
77
- def downsample_to_exact_shape(array, target_shape):
97
+ def downsample_to_exact_shape(array: np.ndarray, target_shape: tuple) -> np.ndarray:
78
98
  """
79
99
  Downsamples a 3D array to match the target shape using nearest-neighbor interpolation.
80
100
  Ensures that the resulting array has the exact target shape.
@@ -83,17 +103,17 @@ def downsample_to_exact_shape(array, target_shape):
83
103
  return zoom(array, zoom_factors, order=0)
84
104
 
85
105
 
86
- def segmentation_from_picks(
87
- radius,
88
- painting_segmentation_name,
89
- run,
90
- voxel_spacing,
91
- tomo_type,
92
- pickable_object,
93
- pick_set,
94
- user_id="paintedPicks",
95
- session_id="0",
96
- ):
106
+ def _create_segmentation_from_picks_legacy(
107
+ radius: float,
108
+ painting_segmentation_name: str,
109
+ run: "CopickRun",
110
+ voxel_spacing: float,
111
+ tomo_type: str,
112
+ pickable_object: "CopickObject",
113
+ pick_set: "CopickPicks",
114
+ user_id: str = "paintedPicks",
115
+ session_id: str = "0",
116
+ ) -> "CopickSegmentation":
97
117
  """
98
118
  Paints picks from a run into a multiscale segmentation array, representing them as spheres in 3D space.
99
119
 
@@ -185,3 +205,93 @@ def segmentation_from_picks(
185
205
  segmentation_group[level_name][:] = scaled_array
186
206
 
187
207
  return seg
208
+
209
+
210
+ def segmentation_from_picks(
211
+ picks: "CopickPicks",
212
+ run: "CopickRun",
213
+ object_name: str,
214
+ session_id: str,
215
+ user_id: str,
216
+ radius: float,
217
+ voxel_spacing: float,
218
+ tomo_type: str = "wbp",
219
+ ) -> Optional[Tuple["CopickSegmentation", Dict[str, int]]]:
220
+ """
221
+ Convert CopickPicks to a segmentation by painting spheres.
222
+
223
+ Args:
224
+ picks: CopickPicks object to convert
225
+ run: CopickRun object
226
+ object_name: Name for the output segmentation
227
+ session_id: Session ID for the output segmentation
228
+ user_id: User ID for the output segmentation
229
+ radius: Radius of the spheres in physical units
230
+ voxel_spacing: Voxel spacing for the segmentation
231
+ tomo_type: Type of tomogram to use as reference
232
+
233
+ Returns:
234
+ Tuple of (CopickSegmentation object, stats dict) or None if creation failed.
235
+ Stats dict contains 'points_converted' and 'voxels_created'.
236
+ """
237
+ try:
238
+ # Get the pickable object for label information
239
+ root = run.root
240
+ pickable_object = root.get_object(picks.pickable_object_name)
241
+ if not pickable_object:
242
+ logger.error(f"Object '{picks.pickable_object_name}' not found in config")
243
+ return None
244
+
245
+ if not picks.points:
246
+ logger.error("No points found in pick set")
247
+ return None
248
+
249
+ # Create segmentation using the legacy function
250
+ seg = _create_segmentation_from_picks_legacy(
251
+ radius=radius,
252
+ painting_segmentation_name=object_name,
253
+ run=run,
254
+ voxel_spacing=voxel_spacing,
255
+ tomo_type=tomo_type,
256
+ pickable_object=pickable_object,
257
+ pick_set=picks,
258
+ user_id=user_id,
259
+ session_id=session_id,
260
+ )
261
+
262
+ # Calculate statistics
263
+ # For now, we don't have easy access to the actual voxel count, so we estimate
264
+ # based on number of spheres and their volume
265
+ sphere_volume_voxels = (4 / 3) * np.pi * (radius / voxel_spacing) ** 3
266
+ estimated_voxels = int(len(picks.points) * sphere_volume_voxels)
267
+
268
+ stats = {
269
+ "points_converted": len(picks.points),
270
+ "voxels_created": estimated_voxels,
271
+ }
272
+ logger.info(f"Created segmentation from {stats['points_converted']} picks")
273
+ return seg, stats
274
+
275
+ except Exception as e:
276
+ logger.error(f"Error creating segmentation: {e}")
277
+ return None
278
+
279
+
280
+ # Create worker function using common infrastructure
281
+ _segmentation_from_picks_worker = create_batch_worker(segmentation_from_picks, "segmentation", "picks", min_points=1)
282
+
283
+
284
+ # Create batch converter using common infrastructure
285
+ segmentation_from_picks_batch = create_batch_converter(
286
+ segmentation_from_picks,
287
+ "Converting picks to segmentations",
288
+ "segmentation",
289
+ "picks",
290
+ min_points=1,
291
+ )
292
+
293
+ # Lazy batch converter for new architecture
294
+ segmentation_from_picks_lazy_batch = create_lazy_batch_converter(
295
+ converter_func=segmentation_from_picks,
296
+ task_description="Converting picks to segmentations",
297
+ )