copick-utils 0.6.1__py3-none-any.whl → 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. copick_utils/__init__.py +1 -1
  2. copick_utils/cli/__init__.py +33 -0
  3. copick_utils/cli/clipmesh.py +161 -0
  4. copick_utils/cli/clippicks.py +154 -0
  5. copick_utils/cli/clipseg.py +163 -0
  6. copick_utils/cli/conversion_commands.py +32 -0
  7. copick_utils/cli/enclosed.py +191 -0
  8. copick_utils/cli/filter_components.py +166 -0
  9. copick_utils/cli/fit_spline.py +191 -0
  10. copick_utils/cli/hull.py +138 -0
  11. copick_utils/cli/input_output_selection.py +76 -0
  12. copick_utils/cli/logical_commands.py +29 -0
  13. copick_utils/cli/mesh2picks.py +170 -0
  14. copick_utils/cli/mesh2seg.py +167 -0
  15. copick_utils/cli/meshop.py +262 -0
  16. copick_utils/cli/picks2ellipsoid.py +171 -0
  17. copick_utils/cli/picks2mesh.py +181 -0
  18. copick_utils/cli/picks2plane.py +156 -0
  19. copick_utils/cli/picks2seg.py +134 -0
  20. copick_utils/cli/picks2sphere.py +170 -0
  21. copick_utils/cli/picks2surface.py +164 -0
  22. copick_utils/cli/picksin.py +146 -0
  23. copick_utils/cli/picksout.py +148 -0
  24. copick_utils/cli/processing_commands.py +18 -0
  25. copick_utils/cli/seg2mesh.py +135 -0
  26. copick_utils/cli/seg2picks.py +128 -0
  27. copick_utils/cli/segop.py +248 -0
  28. copick_utils/cli/separate_components.py +155 -0
  29. copick_utils/cli/skeletonize.py +164 -0
  30. copick_utils/cli/util.py +580 -0
  31. copick_utils/cli/validbox.py +155 -0
  32. copick_utils/converters/__init__.py +35 -0
  33. copick_utils/converters/converter_common.py +543 -0
  34. copick_utils/converters/ellipsoid_from_picks.py +335 -0
  35. copick_utils/converters/lazy_converter.py +576 -0
  36. copick_utils/converters/mesh_from_picks.py +209 -0
  37. copick_utils/converters/mesh_from_segmentation.py +119 -0
  38. copick_utils/converters/picks_from_mesh.py +542 -0
  39. copick_utils/converters/picks_from_segmentation.py +168 -0
  40. copick_utils/converters/plane_from_picks.py +251 -0
  41. copick_utils/converters/segmentation_from_mesh.py +291 -0
  42. copick_utils/{segmentation → converters}/segmentation_from_picks.py +123 -13
  43. copick_utils/converters/sphere_from_picks.py +306 -0
  44. copick_utils/converters/surface_from_picks.py +337 -0
  45. copick_utils/logical/__init__.py +43 -0
  46. copick_utils/logical/distance_operations.py +604 -0
  47. copick_utils/logical/enclosed_operations.py +222 -0
  48. copick_utils/logical/mesh_operations.py +443 -0
  49. copick_utils/logical/point_operations.py +303 -0
  50. copick_utils/logical/segmentation_operations.py +399 -0
  51. copick_utils/process/__init__.py +47 -0
  52. copick_utils/process/connected_components.py +360 -0
  53. copick_utils/process/filter_components.py +306 -0
  54. copick_utils/process/hull.py +106 -0
  55. copick_utils/process/skeletonize.py +326 -0
  56. copick_utils/process/spline_fitting.py +648 -0
  57. copick_utils/process/validbox.py +333 -0
  58. copick_utils/util/__init__.py +6 -0
  59. copick_utils/util/config_models.py +614 -0
  60. {copick_utils-0.6.1.dist-info → copick_utils-1.0.1.dist-info}/METADATA +15 -2
  61. copick_utils-1.0.1.dist-info/RECORD +71 -0
  62. {copick_utils-0.6.1.dist-info → copick_utils-1.0.1.dist-info}/WHEEL +1 -1
  63. copick_utils-1.0.1.dist-info/entry_points.txt +29 -0
  64. copick_utils/segmentation/picks_from_segmentation.py +0 -81
  65. copick_utils-0.6.1.dist-info/RECORD +0 -14
  66. /copick_utils/{segmentation → io}/__init__.py +0 -0
  67. {copick_utils-0.6.1.dist-info → copick_utils-1.0.1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,306 @@
1
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
2
+
3
+ import numpy as np
4
+ import trimesh as tm
5
+ from copick.util.log import get_logger
6
+ from scipy.optimize import minimize
7
+
8
+ from copick_utils.converters.converter_common import (
9
+ cluster,
10
+ create_batch_converter,
11
+ create_batch_worker,
12
+ store_mesh_with_stats,
13
+ validate_points,
14
+ )
15
+ from copick_utils.converters.lazy_converter import create_lazy_batch_converter
16
+
17
+ if TYPE_CHECKING:
18
+ from copick.models import CopickMesh, CopickRun
19
+
20
+ logger = get_logger(__name__)
21
+
22
+
23
+ def fit_sphere_to_points(points: np.ndarray) -> Tuple[np.ndarray, float]:
24
+ """Fit a sphere to a set of 3D points using least squares.
25
+
26
+ Args:
27
+ points: Nx3 array of points.
28
+
29
+ Returns:
30
+ Tuple of (center, radius).
31
+ """
32
+ if len(points) < 4:
33
+ raise ValueError("Need at least 4 points to fit a sphere")
34
+
35
+ def sphere_residuals(params, points):
36
+ """Calculate residuals for sphere fitting."""
37
+ cx, cy, cz, r = params
38
+ center = np.array([cx, cy, cz])
39
+ distances = np.linalg.norm(points - center, axis=1)
40
+ return distances - r
41
+
42
+ # Initial guess: center at centroid, radius as average distance to centroid
43
+ centroid = np.mean(points, axis=0)
44
+ distances = np.linalg.norm(points - centroid, axis=1)
45
+ initial_radius = np.mean(distances)
46
+
47
+ initial_params = [centroid[0], centroid[1], centroid[2], initial_radius]
48
+
49
+ # Fit sphere using least squares
50
+ result = minimize(lambda params: np.sum(sphere_residuals(params, points) ** 2), initial_params, method="L-BFGS-B")
51
+
52
+ if result.success:
53
+ cx, cy, cz, r = result.x
54
+ center = np.array([cx, cy, cz])
55
+ radius = abs(r) # Ensure positive radius
56
+ return center, radius
57
+ else:
58
+ # Fallback to simple centroid and average distance
59
+ radius = np.mean(np.linalg.norm(points - centroid, axis=1))
60
+ return centroid, radius
61
+
62
+
63
+ def deduplicate_spheres(
64
+ spheres: List[Tuple[np.ndarray, float]],
65
+ min_distance: float = None,
66
+ ) -> List[Tuple[np.ndarray, float]]:
67
+ """Merge spheres that are too close to each other.
68
+
69
+ Args:
70
+ spheres: List of (center, radius) tuples.
71
+ min_distance: Minimum distance between sphere centers. If None, uses average radius.
72
+
73
+ Returns:
74
+ List of deduplicated (center, radius) tuples.
75
+ """
76
+ if len(spheres) <= 1:
77
+ return spheres
78
+
79
+ if min_distance is None:
80
+ # Use average radius as minimum distance
81
+ avg_radius = np.mean([radius for _, radius in spheres])
82
+ min_distance = avg_radius * 0.5
83
+
84
+ deduplicated = []
85
+ used = set()
86
+
87
+ for i, (center1, radius1) in enumerate(spheres):
88
+ if i in used:
89
+ continue
90
+
91
+ # Find all spheres close to this one
92
+ close_spheres = [(center1, radius1)]
93
+ used.add(i)
94
+
95
+ for j, (center2, radius2) in enumerate(spheres):
96
+ if j in used or i == j:
97
+ continue
98
+
99
+ distance = np.linalg.norm(center1 - center2)
100
+ if distance <= min_distance:
101
+ close_spheres.append((center2, radius2))
102
+ used.add(j)
103
+
104
+ if len(close_spheres) == 1:
105
+ # Single sphere, keep as is
106
+ deduplicated.append((center1, radius1))
107
+ else:
108
+ # Merge multiple close spheres
109
+ centers = np.array([center for center, _ in close_spheres])
110
+ radii = np.array([radius for _, radius in close_spheres])
111
+
112
+ # Use weighted average for center (weight by volume)
113
+ volumes = (4 / 3) * np.pi * radii**3
114
+ weights = volumes / np.sum(volumes)
115
+ merged_center = np.average(centers, axis=0, weights=weights)
116
+
117
+ # Use volume-weighted average for radius
118
+ merged_radius = np.average(radii, weights=weights)
119
+
120
+ deduplicated.append((merged_center, merged_radius))
121
+ logger.info(f"Merged {len(close_spheres)} overlapping spheres into one")
122
+
123
+ return deduplicated
124
+
125
+
126
+ def create_sphere_mesh(center: np.ndarray, radius: float, subdivisions: int = 2) -> tm.Trimesh:
127
+ """Create a sphere mesh with given center and radius.
128
+
129
+ Args:
130
+ center: 3D center point.
131
+ radius: Sphere radius.
132
+ subdivisions: Number of subdivisions for sphere resolution.
133
+
134
+ Returns:
135
+ Trimesh sphere object.
136
+ """
137
+ # Create unit sphere and scale/translate
138
+ sphere = tm.creation.icosphere(subdivisions=subdivisions, radius=radius)
139
+ sphere.apply_translation(center)
140
+ return sphere
141
+
142
+
143
+ def sphere_from_picks(
144
+ points: np.ndarray,
145
+ run: "CopickRun",
146
+ object_name: str,
147
+ session_id: str,
148
+ user_id: str,
149
+ use_clustering: bool = False,
150
+ clustering_method: str = "dbscan",
151
+ clustering_params: Optional[Dict[str, Any]] = None,
152
+ subdivisions: int = 2,
153
+ all_clusters: bool = False,
154
+ deduplicate_spheres_flag: bool = True,
155
+ min_sphere_distance: Optional[float] = None,
156
+ individual_meshes: bool = False,
157
+ session_id_template: Optional[str] = None,
158
+ ) -> Optional[Tuple["CopickMesh", Dict[str, int]]]:
159
+ """Create sphere mesh(es) from pick points.
160
+
161
+ Args:
162
+ points: Nx3 array of pick positions.
163
+ run: Copick run object.
164
+ object_name: Name of the mesh object.
165
+ session_id: Session ID for the mesh.
166
+ user_id: User ID for the mesh.
167
+ use_clustering: Whether to cluster points first.
168
+ clustering_method: Clustering method ('dbscan', 'kmeans').
169
+ clustering_params: Parameters for clustering.
170
+ e.g.
171
+ - {'eps': 5.0, 'min_samples': 3} for DBSCAN
172
+ - {'n_clusters': 3} for KMeans
173
+ subdivisions: Number of subdivisions for sphere resolution.
174
+ all_clusters: If True and clustering is used, use all clusters. If False, use only largest cluster.
175
+ deduplicate_spheres_flag: Whether to merge overlapping spheres.
176
+ min_sphere_distance: Minimum distance between sphere centers for deduplication.
177
+ individual_meshes: If True, create separate mesh objects for each sphere.
178
+ session_id_template: Template for individual mesh session IDs.
179
+
180
+ Returns:
181
+ Tuple of (CopickMesh object, stats dict) or None if creation failed.
182
+ Stats dict contains 'vertices_created' and 'faces_created' totals.
183
+ """
184
+ if not validate_points(points, 4, "sphere"):
185
+ return None
186
+
187
+ if clustering_params is None:
188
+ clustering_params = {}
189
+
190
+ # Handle clustering workflow with special sphere logic
191
+ if use_clustering:
192
+ point_clusters = cluster(points, clustering_method, 4, **clustering_params)
193
+
194
+ if not point_clusters:
195
+ logger.warning("No valid clusters found")
196
+ return None
197
+
198
+ logger.info(f"Found {len(point_clusters)} clusters")
199
+
200
+ if all_clusters and len(point_clusters) > 1:
201
+ # Create sphere parameters from all clusters
202
+ sphere_params = []
203
+ for i, cluster_points in enumerate(point_clusters):
204
+ try:
205
+ center, radius = fit_sphere_to_points(cluster_points)
206
+ sphere_params.append((center, radius))
207
+ logger.info(f"Cluster {i}: sphere at {center} with radius {radius:.2f}")
208
+ except Exception as e:
209
+ logger.critical(f"Failed to fit sphere to cluster {i}: {e}")
210
+ continue
211
+
212
+ if not sphere_params:
213
+ logger.warning("No valid spheres created from clusters")
214
+ return None
215
+
216
+ # Deduplicate overlapping spheres if requested
217
+ if deduplicate_spheres_flag:
218
+ final_spheres = deduplicate_spheres(sphere_params, min_sphere_distance)
219
+ else:
220
+ final_spheres = sphere_params
221
+
222
+ if individual_meshes:
223
+ # Create separate mesh objects for each sphere
224
+ created_meshes = []
225
+ total_vertices = 0
226
+ total_faces = 0
227
+
228
+ for i, (center, radius) in enumerate(final_spheres):
229
+ sphere_mesh = create_sphere_mesh(center, radius, subdivisions)
230
+
231
+ # Generate session ID using template if provided
232
+ if session_id_template:
233
+ sphere_session_id = session_id_template.format(
234
+ base_session_id=session_id,
235
+ instance_id=i,
236
+ )
237
+ else:
238
+ sphere_session_id = f"{session_id}-{i:03d}"
239
+
240
+ try:
241
+ copick_mesh = run.new_mesh(object_name, sphere_session_id, user_id, exist_ok=True)
242
+ copick_mesh.mesh = sphere_mesh
243
+ copick_mesh.store()
244
+ created_meshes.append(copick_mesh)
245
+ total_vertices += len(sphere_mesh.vertices)
246
+ total_faces += len(sphere_mesh.faces)
247
+ logger.info(f"Created individual sphere mesh {i} with {len(sphere_mesh.vertices)} vertices")
248
+ except Exception as e:
249
+ logger.error(f"Failed to create mesh {i}: {e}")
250
+ continue
251
+
252
+ # Return the first mesh and total stats
253
+ if created_meshes:
254
+ stats = {"vertices_created": total_vertices, "faces_created": total_faces}
255
+ return created_meshes[0], stats
256
+ else:
257
+ return None
258
+ else:
259
+ # Create meshes from final spheres and combine them
260
+ all_meshes = []
261
+ for center, radius in final_spheres:
262
+ sphere_mesh = create_sphere_mesh(center, radius, subdivisions)
263
+ all_meshes.append(sphere_mesh)
264
+
265
+ # Combine all meshes
266
+ combined_mesh = tm.util.concatenate(all_meshes)
267
+ else:
268
+ # Use largest cluster
269
+ cluster_sizes = [len(cluster) for cluster in point_clusters]
270
+ largest_cluster_idx = np.argmax(cluster_sizes)
271
+ points_to_use = point_clusters[largest_cluster_idx]
272
+ logger.info(f"Using largest cluster with {len(points_to_use)} points")
273
+
274
+ center, radius = fit_sphere_to_points(points_to_use)
275
+ combined_mesh = create_sphere_mesh(center, radius, subdivisions)
276
+ else:
277
+ # Fit single sphere to all points
278
+ center, radius = fit_sphere_to_points(points)
279
+ combined_mesh = create_sphere_mesh(center, radius, subdivisions)
280
+ logger.info(f"Fitted sphere at {center} with radius {radius:.2f}")
281
+
282
+ # Store mesh and return stats
283
+ try:
284
+ return store_mesh_with_stats(run, combined_mesh, object_name, session_id, user_id, "sphere")
285
+ except Exception as e:
286
+ logger.critical(f"Error creating mesh: {e}")
287
+ return None
288
+
289
+
290
+ # Create worker function using common infrastructure
291
+ _sphere_from_picks_worker = create_batch_worker(sphere_from_picks, "sphere", min_points=4)
292
+
293
+
294
+ # Create batch converter using common infrastructure
295
+ sphere_from_picks_batch = create_batch_converter(
296
+ sphere_from_picks,
297
+ "Converting picks to sphere meshes",
298
+ "sphere",
299
+ min_points=4,
300
+ )
301
+
302
+ # Lazy batch converter for new architecture
303
+ sphere_from_picks_lazy_batch = create_lazy_batch_converter(
304
+ converter_func=sphere_from_picks,
305
+ task_description="Converting picks to sphere meshes",
306
+ )
@@ -0,0 +1,337 @@
1
+ from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple
2
+
3
+ import numpy as np
4
+ import trimesh as tm
5
+ from copick.util.log import get_logger
6
+ from scipy.interpolate import Rbf, griddata
7
+ from scipy.spatial import Delaunay
8
+ from sklearn.decomposition import PCA
9
+
10
+ from copick_utils.converters.converter_common import (
11
+ cluster,
12
+ create_batch_converter,
13
+ create_batch_worker,
14
+ store_mesh_with_stats,
15
+ validate_points,
16
+ )
17
+ from copick_utils.converters.lazy_converter import create_lazy_batch_converter
18
+
19
+ if TYPE_CHECKING:
20
+ from copick.models import CopickMesh, CopickRun
21
+
22
+ logger = get_logger(__name__)
23
+
24
+
25
+ def fit_2d_surface_to_points(
26
+ points: np.ndarray,
27
+ method: str = "delaunay",
28
+ grid_resolution: int = 50,
29
+ ) -> tm.Trimesh:
30
+ """Fit a 2D surface to 3D points using different interpolation methods.
31
+
32
+ Args:
33
+ points: Nx3 array of points.
34
+ method: Surface fitting method ('delaunay', 'rbf', 'grid').
35
+ grid_resolution: Resolution for grid-based methods.
36
+
37
+ Returns:
38
+ Trimesh surface object.
39
+ """
40
+ if len(points) < 3:
41
+ raise ValueError("Need at least 3 points to fit a surface")
42
+
43
+ if method == "delaunay":
44
+ return delaunay_surface(points)
45
+ elif method == "rbf":
46
+ return rbf_surface(points, grid_resolution)
47
+ elif method == "grid":
48
+ return grid_surface(points, grid_resolution)
49
+ else:
50
+ raise ValueError(f"Unknown surface method: {method}")
51
+
52
+
53
+ def delaunay_surface(points: np.ndarray) -> tm.Trimesh:
54
+ """Create a surface using Delaunay triangulation.
55
+
56
+ Args:
57
+ points: Nx3 array of points.
58
+
59
+ Returns:
60
+ Trimesh surface object.
61
+ """
62
+ # Find the best 2D projection plane using PCA
63
+ center = np.mean(points, axis=0)
64
+ centered_points = points - center
65
+
66
+ pca = PCA(n_components=3)
67
+ pca.fit(centered_points)
68
+
69
+ # Use first two principal components for 2D projection
70
+ projected_2d = pca.transform(centered_points)[:, :2]
71
+
72
+ # Create Delaunay triangulation in 2D
73
+ tri = Delaunay(projected_2d)
74
+
75
+ # Use original 3D points as vertices with 2D triangulation
76
+ return tm.Trimesh(vertices=points, faces=tri.simplices)
77
+
78
+
79
+ def rbf_surface(points: np.ndarray, grid_resolution: int) -> tm.Trimesh:
80
+ """Create a surface using RBF (Radial Basis Function) interpolation.
81
+
82
+ Args:
83
+ points: Nx3 array of points.
84
+ grid_resolution: Resolution of the output grid.
85
+
86
+ Returns:
87
+ Trimesh surface object.
88
+ """
89
+ # Find the dominant plane using PCA
90
+ center = np.mean(points, axis=0)
91
+ centered_points = points - center
92
+
93
+ pca = PCA(n_components=3)
94
+ pca.fit(centered_points)
95
+
96
+ # Project points onto the first two principal components
97
+ projected_2d = pca.transform(centered_points)[:, :2]
98
+ heights = pca.transform(centered_points)[:, 2] # Third component as height
99
+
100
+ # Create grid for interpolation
101
+ x_min, x_max = projected_2d[:, 0].min(), projected_2d[:, 0].max()
102
+ y_min, y_max = projected_2d[:, 1].min(), projected_2d[:, 1].max()
103
+
104
+ xi = np.linspace(x_min, x_max, grid_resolution)
105
+ yi = np.linspace(y_min, y_max, grid_resolution)
106
+ xi_grid, yi_grid = np.meshgrid(xi, yi)
107
+
108
+ # RBF interpolation
109
+ rbf = Rbf(projected_2d[:, 0], projected_2d[:, 1], heights, function="thin_plate")
110
+ zi_grid = rbf(xi_grid, yi_grid)
111
+
112
+ # Convert grid back to 3D coordinates
113
+ grid_points_2d = np.column_stack([xi_grid.flatten(), yi_grid.flatten(), zi_grid.flatten()])
114
+ grid_points_3d = pca.inverse_transform(grid_points_2d) + center
115
+
116
+ # Create triangulation for the grid
117
+ grid_points_3d.reshape((grid_resolution, grid_resolution, 3))
118
+ faces = []
119
+
120
+ for i in range(grid_resolution - 1):
121
+ for j in range(grid_resolution - 1):
122
+ # Two triangles per grid cell
123
+ v1 = i * grid_resolution + j
124
+ v2 = i * grid_resolution + (j + 1)
125
+ v3 = (i + 1) * grid_resolution + j
126
+ v4 = (i + 1) * grid_resolution + (j + 1)
127
+
128
+ faces.extend([[v1, v2, v3], [v2, v4, v3]])
129
+
130
+ return tm.Trimesh(vertices=grid_points_3d, faces=faces)
131
+
132
+
133
+ def grid_surface(points: np.ndarray, grid_resolution: int) -> tm.Trimesh:
134
+ """Create a surface using grid-based interpolation.
135
+
136
+ Args:
137
+ points: Nx3 array of points.
138
+ grid_resolution: Resolution of the output grid.
139
+
140
+ Returns:
141
+ Trimesh surface object.
142
+ """
143
+ # Find bounding box
144
+ min_coords = np.min(points, axis=0)
145
+ max_coords = np.max(points, axis=0)
146
+
147
+ # Find the dimension with smallest range (likely the "height" dimension)
148
+ ranges = max_coords - min_coords
149
+ height_dim = np.argmin(ranges)
150
+
151
+ # Use other two dimensions for grid
152
+ other_dims = [i for i in range(3) if i != height_dim]
153
+
154
+ # Create grid
155
+ x_coords = np.linspace(min_coords[other_dims[0]], max_coords[other_dims[0]], grid_resolution)
156
+ y_coords = np.linspace(min_coords[other_dims[1]], max_coords[other_dims[1]], grid_resolution)
157
+ xi, yi = np.meshgrid(x_coords, y_coords)
158
+
159
+ # Interpolate height values
160
+ zi = griddata(
161
+ points[:, other_dims],
162
+ points[:, height_dim],
163
+ (xi, yi),
164
+ method="linear",
165
+ fill_value=np.mean(points[:, height_dim]),
166
+ )
167
+
168
+ # Build 3D vertices
169
+ vertices = np.zeros((grid_resolution * grid_resolution, 3))
170
+ vertices[:, other_dims[0]] = xi.flatten()
171
+ vertices[:, other_dims[1]] = yi.flatten()
172
+ vertices[:, height_dim] = zi.flatten()
173
+
174
+ # Create triangulation
175
+ faces = []
176
+ for i in range(grid_resolution - 1):
177
+ for j in range(grid_resolution - 1):
178
+ # Two triangles per grid cell
179
+ v1 = i * grid_resolution + j
180
+ v2 = i * grid_resolution + (j + 1)
181
+ v3 = (i + 1) * grid_resolution + j
182
+ v4 = (i + 1) * grid_resolution + (j + 1)
183
+
184
+ faces.extend([[v1, v2, v3], [v2, v4, v3]])
185
+
186
+ return tm.Trimesh(vertices=vertices, faces=faces)
187
+
188
+
189
+ def surface_from_picks(
190
+ points: np.ndarray,
191
+ run: "CopickRun",
192
+ object_name: str,
193
+ session_id: str,
194
+ user_id: str,
195
+ surface_method: str = "delaunay",
196
+ grid_resolution: int = 50,
197
+ use_clustering: bool = False,
198
+ clustering_method: str = "dbscan",
199
+ clustering_params: Optional[Dict[str, Any]] = None,
200
+ all_clusters: bool = True,
201
+ individual_meshes: bool = False,
202
+ session_id_template: Optional[str] = None,
203
+ ) -> Optional[Tuple["CopickMesh", Dict[str, int]]]:
204
+ """Create surface mesh(es) from pick points.
205
+
206
+ Args:
207
+ points: Nx3 array of pick positions.
208
+ run: Copick run object.
209
+ object_name: Name of the mesh object.
210
+ session_id: Session ID for the mesh.
211
+ user_id: User ID for the mesh.
212
+ surface_method: Surface fitting method ('delaunay', 'rbf', 'grid').
213
+ grid_resolution: Resolution for grid-based methods.
214
+ use_clustering: Whether to cluster points first.
215
+ clustering_method: Clustering method ('dbscan', 'kmeans').
216
+ clustering_params: Parameters for clustering.
217
+ e.g.
218
+ - {'eps': 5.0, 'min_samples': 3} for DBSCAN
219
+ - {'n_clusters': 3} for KMeans
220
+ all_clusters: If True, use all clusters; if False, use only the largest cluster.
221
+ individual_meshes: If True, create separate mesh objects for each surface.
222
+ session_id_template: Template for individual mesh session IDs.
223
+
224
+ Returns:
225
+ Tuple of (CopickMesh object, stats dict) or None if creation failed.
226
+ Stats dict contains 'vertices_created' and 'faces_created' totals.
227
+ """
228
+ if not validate_points(points, 3, "surface"):
229
+ return None
230
+
231
+ if clustering_params is None:
232
+ clustering_params = {}
233
+
234
+ # Define surface creation function
235
+ def create_surface_from_points(cluster_points):
236
+ return fit_2d_surface_to_points(cluster_points, surface_method, grid_resolution)
237
+
238
+ # Handle clustering workflow with special surface logic
239
+ if use_clustering:
240
+ point_clusters = cluster(
241
+ points,
242
+ clustering_method,
243
+ min_points_per_cluster=3, # Surfaces need at least 3 points
244
+ **clustering_params,
245
+ )
246
+
247
+ if not point_clusters:
248
+ logger.warning("No valid clusters found")
249
+ return None
250
+
251
+ logger.info(f"Found {len(point_clusters)} clusters")
252
+
253
+ if all_clusters and len(point_clusters) > 1:
254
+ if individual_meshes:
255
+ # Create separate mesh objects for each surface
256
+ created_meshes = []
257
+ total_vertices = 0
258
+ total_faces = 0
259
+
260
+ for i, cluster_points in enumerate(point_clusters):
261
+ try:
262
+ surface_mesh = create_surface_from_points(cluster_points)
263
+
264
+ # Generate session ID using template if provided
265
+ if session_id_template:
266
+ surface_session_id = session_id_template.format(
267
+ base_session_id=session_id,
268
+ instance_id=i,
269
+ )
270
+ else:
271
+ surface_session_id = f"{session_id}-{i:03d}"
272
+
273
+ copick_mesh = run.new_mesh(object_name, surface_session_id, user_id, exist_ok=True)
274
+ copick_mesh.mesh = surface_mesh
275
+ copick_mesh.store()
276
+ created_meshes.append(copick_mesh)
277
+ total_vertices += len(surface_mesh.vertices)
278
+ total_faces += len(surface_mesh.faces)
279
+ logger.info(
280
+ f"Created individual surface mesh {i} with {len(surface_mesh.vertices)} vertices",
281
+ )
282
+ except Exception as e:
283
+ logger.error(f"Failed to create mesh {i}: {e}")
284
+ continue
285
+
286
+ # Return the first mesh and total stats
287
+ if created_meshes:
288
+ stats = {"vertices_created": total_vertices, "faces_created": total_faces}
289
+ return created_meshes[0], stats
290
+ else:
291
+ return None
292
+ else:
293
+ # Create meshes from all clusters and combine them
294
+ all_meshes = []
295
+ for cluster_points in point_clusters:
296
+ surface_mesh = create_surface_from_points(cluster_points)
297
+ all_meshes.append(surface_mesh)
298
+
299
+ # Combine all meshes
300
+ combined_mesh = tm.util.concatenate(all_meshes)
301
+ else:
302
+ # Use largest cluster
303
+ cluster_sizes = [len(cluster) for cluster in point_clusters]
304
+ largest_cluster_idx = np.argmax(cluster_sizes)
305
+ points_to_use = point_clusters[largest_cluster_idx]
306
+ logger.info(f"Using largest cluster with {len(points_to_use)} points")
307
+
308
+ combined_mesh = create_surface_from_points(points_to_use)
309
+ else:
310
+ # Use all points without clustering
311
+ combined_mesh = create_surface_from_points(points)
312
+
313
+ # Store mesh and return stats
314
+ try:
315
+ return store_mesh_with_stats(run, combined_mesh, object_name, session_id, user_id, "surface")
316
+ except Exception as e:
317
+ logger.critical(f"Error creating mesh: {e}")
318
+ return None
319
+
320
+
321
+ # Create worker function using common infrastructure
322
+ _surface_from_picks_worker = create_batch_worker(surface_from_picks, "surface", min_points=3)
323
+
324
+
325
+ # Create batch converter using common infrastructure
326
+ surface_from_picks_batch = create_batch_converter(
327
+ surface_from_picks,
328
+ "Converting picks to surface meshes",
329
+ "surface",
330
+ min_points=3,
331
+ )
332
+
333
+ # Lazy batch converter for new architecture
334
+ surface_from_picks_lazy_batch = create_lazy_batch_converter(
335
+ converter_func=surface_from_picks,
336
+ task_description="Converting picks to surface meshes",
337
+ )
@@ -0,0 +1,43 @@
1
+ """Logical operations for copick objects (meshes, segmentations, picks)."""
2
+
3
+ from copick_utils.logical.distance_operations import (
4
+ limit_mesh_by_distance,
5
+ limit_picks_by_distance,
6
+ limit_segmentation_by_distance,
7
+ )
8
+ from copick_utils.logical.mesh_operations import (
9
+ mesh_difference,
10
+ mesh_exclusion,
11
+ mesh_intersection,
12
+ mesh_union,
13
+ )
14
+ from copick_utils.logical.point_operations import (
15
+ picks_exclusion_by_mesh,
16
+ picks_inclusion_by_mesh,
17
+ )
18
+ from copick_utils.logical.segmentation_operations import (
19
+ segmentation_difference,
20
+ segmentation_exclusion,
21
+ segmentation_intersection,
22
+ segmentation_union,
23
+ )
24
+
25
+ __all__ = [
26
+ # Mesh boolean operations
27
+ "mesh_union",
28
+ "mesh_difference",
29
+ "mesh_exclusion",
30
+ "mesh_intersection",
31
+ # Segmentation boolean operations
32
+ "segmentation_union",
33
+ "segmentation_difference",
34
+ "segmentation_exclusion",
35
+ "segmentation_intersection",
36
+ # Distance-based limiting operations
37
+ "limit_mesh_by_distance",
38
+ "limit_segmentation_by_distance",
39
+ "limit_picks_by_distance",
40
+ # Point inclusion/exclusion operations
41
+ "picks_inclusion_by_mesh",
42
+ "picks_exclusion_by_mesh",
43
+ ]