copick-utils 0.6.0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. copick_utils/__init__.py +1 -0
  2. copick_utils/cli/__init__.py +33 -0
  3. copick_utils/cli/clipmesh.py +161 -0
  4. copick_utils/cli/clippicks.py +154 -0
  5. copick_utils/cli/clipseg.py +163 -0
  6. copick_utils/cli/conversion_commands.py +32 -0
  7. copick_utils/cli/enclosed.py +191 -0
  8. copick_utils/cli/filter_components.py +166 -0
  9. copick_utils/cli/fit_spline.py +191 -0
  10. copick_utils/cli/hull.py +138 -0
  11. copick_utils/cli/input_output_selection.py +76 -0
  12. copick_utils/cli/logical_commands.py +29 -0
  13. copick_utils/cli/mesh2picks.py +170 -0
  14. copick_utils/cli/mesh2seg.py +167 -0
  15. copick_utils/cli/meshop.py +262 -0
  16. copick_utils/cli/picks2ellipsoid.py +171 -0
  17. copick_utils/cli/picks2mesh.py +181 -0
  18. copick_utils/cli/picks2plane.py +156 -0
  19. copick_utils/cli/picks2seg.py +134 -0
  20. copick_utils/cli/picks2sphere.py +170 -0
  21. copick_utils/cli/picks2surface.py +164 -0
  22. copick_utils/cli/picksin.py +146 -0
  23. copick_utils/cli/picksout.py +148 -0
  24. copick_utils/cli/processing_commands.py +18 -0
  25. copick_utils/cli/seg2mesh.py +135 -0
  26. copick_utils/cli/seg2picks.py +128 -0
  27. copick_utils/cli/segop.py +248 -0
  28. copick_utils/cli/separate_components.py +155 -0
  29. copick_utils/cli/skeletonize.py +164 -0
  30. copick_utils/cli/util.py +580 -0
  31. copick_utils/cli/validbox.py +155 -0
  32. copick_utils/converters/__init__.py +35 -0
  33. copick_utils/converters/converter_common.py +543 -0
  34. copick_utils/converters/ellipsoid_from_picks.py +335 -0
  35. copick_utils/converters/lazy_converter.py +576 -0
  36. copick_utils/converters/mesh_from_picks.py +209 -0
  37. copick_utils/converters/mesh_from_segmentation.py +119 -0
  38. copick_utils/converters/picks_from_mesh.py +542 -0
  39. copick_utils/converters/picks_from_segmentation.py +168 -0
  40. copick_utils/converters/plane_from_picks.py +251 -0
  41. copick_utils/converters/segmentation_from_mesh.py +291 -0
  42. copick_utils/{segmentation → converters}/segmentation_from_picks.py +151 -15
  43. copick_utils/converters/sphere_from_picks.py +306 -0
  44. copick_utils/converters/surface_from_picks.py +337 -0
  45. copick_utils/features/skimage.py +33 -13
  46. copick_utils/io/readers.py +62 -59
  47. copick_utils/io/writers.py +9 -14
  48. copick_utils/logical/__init__.py +43 -0
  49. copick_utils/logical/distance_operations.py +604 -0
  50. copick_utils/logical/enclosed_operations.py +222 -0
  51. copick_utils/logical/mesh_operations.py +443 -0
  52. copick_utils/logical/point_operations.py +303 -0
  53. copick_utils/logical/segmentation_operations.py +399 -0
  54. copick_utils/pickers/grid_picker.py +5 -4
  55. copick_utils/process/__init__.py +47 -0
  56. copick_utils/process/connected_components.py +360 -0
  57. copick_utils/process/filter_components.py +306 -0
  58. copick_utils/process/hull.py +106 -0
  59. copick_utils/process/skeletonize.py +326 -0
  60. copick_utils/process/spline_fitting.py +648 -0
  61. copick_utils/process/validbox.py +333 -0
  62. copick_utils/util/__init__.py +6 -0
  63. copick_utils/util/config_models.py +614 -0
  64. {copick_utils-0.6.0.dist-info → copick_utils-1.0.0.dist-info}/METADATA +38 -12
  65. copick_utils-1.0.0.dist-info/RECORD +71 -0
  66. {copick_utils-0.6.0.dist-info → copick_utils-1.0.0.dist-info}/WHEEL +1 -1
  67. copick_utils-1.0.0.dist-info/entry_points.txt +29 -0
  68. copick_utils/__about__.py +0 -4
  69. copick_utils/segmentation/picks_from_segmentation.py +0 -67
  70. copick_utils-0.6.0.dist-info/RECORD +0 -15
  71. /copick_utils/{segmentation → io}/__init__.py +0 -0
  72. /copick_utils-0.6.0.dist-info/LICENSE.txt → /copick_utils-1.0.0.dist-info/licenses/LICENSE +0 -0
@@ -0,0 +1,222 @@
1
+ """Operations for finding and absorbing enclosed segmentation components."""
2
+
3
+ from typing import TYPE_CHECKING, Dict, Optional, Tuple
4
+
5
+ import numpy as np
6
+ from copick.util.log import get_logger
7
+ from scipy.ndimage import binary_dilation, generate_binary_structure, label
8
+
9
+ from copick_utils.converters.lazy_converter import create_lazy_batch_converter
10
+
11
+ if TYPE_CHECKING:
12
+ from copick.models import CopickRun, CopickSegmentation
13
+
14
+ logger = get_logger(__name__)
15
+
16
+
17
+ def _remove_enclosed_components(
18
+ seg_inner: np.ndarray,
19
+ seg_outer: np.ndarray,
20
+ voxel_spacing: float,
21
+ margin: int = 1,
22
+ connectivity: str = "all",
23
+ min_size: Optional[float] = None,
24
+ max_size: Optional[float] = None,
25
+ ) -> Tuple[np.ndarray, int, list]:
26
+ """
27
+ Remove connected components from seg_inner that are fully surrounded by seg_outer.
28
+
29
+ Args:
30
+ seg_inner: Binary mask of inner segmentation (numpy array)
31
+ seg_outer: Binary mask of outer segmentation (numpy array)
32
+ voxel_spacing: Voxel spacing in angstroms
33
+ margin: Number of voxels to dilate for checking surrounding (default: 1)
34
+ connectivity: Connectivity for connected components (default: "all")
35
+ "face" = face connectivity (6-connected in 3D)
36
+ "face-edge" = face+edge connectivity (18-connected in 3D)
37
+ "all" = face+edge+corner connectivity (26-connected in 3D)
38
+ min_size: Minimum component volume in cubic angstroms (ų) to consider (None = no minimum)
39
+ max_size: Maximum component volume in cubic angstroms (ų) to consider (None = no maximum)
40
+
41
+ Returns:
42
+ Tuple of (seg_inner_cleaned, num_removed, component_info)
43
+ - seg_inner_cleaned: Cleaned inner segmentation with enclosed components removed
44
+ - num_removed: Number of components that were removed
45
+ - component_info: List of dicts with info about each component
46
+ """
47
+ # Create a copy to avoid modifying the original
48
+ seg_inner_cleaned = seg_inner.copy()
49
+
50
+ # Map connectivity string to numeric value
51
+ connectivity_map = {
52
+ "face": 1,
53
+ "face-edge": 2,
54
+ "all": 3,
55
+ }
56
+ connectivity_value = connectivity_map.get(connectivity, 3)
57
+
58
+ # Define connectivity structure
59
+ struct = generate_binary_structure(seg_inner.ndim, connectivity_value)
60
+
61
+ # Label connected components in inner segmentation
62
+ labeled_inner, num_components = label(seg_inner, structure=struct)
63
+
64
+ # Calculate voxel volume in cubic angstroms
65
+ voxel_volume = voxel_spacing**3
66
+
67
+ component_info = []
68
+ num_removed = 0
69
+
70
+ # Check each component
71
+ for component_id in range(1, num_components + 1):
72
+ # Extract this component
73
+ component_mask = labeled_inner == component_id
74
+ component_voxels = int(np.sum(component_mask))
75
+ component_volume = component_voxels * voxel_volume
76
+
77
+ # Apply size filtering (in cubic angstroms)
78
+ passes_size_filter = True
79
+ if min_size is not None and component_volume < min_size:
80
+ passes_size_filter = False
81
+ if max_size is not None and component_volume > max_size:
82
+ passes_size_filter = False
83
+
84
+ # Dilate the component
85
+ dilated_component = binary_dilation(component_mask, structure=struct, iterations=margin)
86
+
87
+ # Check if dilated component is fully contained in outer segmentation
88
+ is_surrounded = bool(np.all(dilated_component <= seg_outer))
89
+
90
+ # Decide whether to remove
91
+ should_remove = is_surrounded and passes_size_filter
92
+
93
+ # Store information
94
+ info = {
95
+ "component_id": component_id,
96
+ "voxels": component_voxels,
97
+ "volume": component_volume,
98
+ "is_surrounded": is_surrounded,
99
+ "passes_size_filter": passes_size_filter,
100
+ "removed": should_remove,
101
+ }
102
+ component_info.append(info)
103
+
104
+ # If surrounded and passes size filter, remove from inner segmentation
105
+ if should_remove:
106
+ seg_inner_cleaned = np.logical_and(seg_inner_cleaned, ~component_mask)
107
+ num_removed += 1
108
+
109
+ return seg_inner_cleaned.astype(np.uint8), num_removed, component_info
110
+
111
+
112
+ def segmentation_enclosed(
113
+ segmentation1: "CopickSegmentation",
114
+ segmentation2: "CopickSegmentation",
115
+ run: "CopickRun",
116
+ object_name: str,
117
+ session_id: str,
118
+ user_id: str,
119
+ voxel_spacing: float,
120
+ is_multilabel: bool = False,
121
+ margin: int = 1,
122
+ connectivity: str = "all",
123
+ min_size: Optional[float] = None,
124
+ max_size: Optional[float] = None,
125
+ **kwargs,
126
+ ) -> Optional[Tuple["CopickSegmentation", Dict[str, int]]]:
127
+ """
128
+ Remove enclosed components from segmentation1 (inner) that are surrounded by segmentation2 (outer).
129
+
130
+ Args:
131
+ segmentation1: Inner CopickSegmentation object (segmentation to clean)
132
+ segmentation2: Outer CopickSegmentation object (reference/enclosing segmentation)
133
+ run: CopickRun object
134
+ object_name: Name for the output segmentation (cleaned version of segmentation1)
135
+ session_id: Session ID for the output segmentation
136
+ user_id: User ID for the output segmentation
137
+ voxel_spacing: Voxel spacing for the output segmentation in angstroms
138
+ is_multilabel: Whether the segmentation is multilabel
139
+ margin: Number of voxels to dilate for checking surrounding (default: 1)
140
+ connectivity: Connectivity for connected components (default: "all")
141
+ "face" = 6-connected, "face-edge" = 18-connected, "all" = 26-connected
142
+ min_size: Minimum component volume in cubic angstroms (ų) to consider (None = no minimum)
143
+ max_size: Maximum component volume in cubic angstroms (ų) to consider (None = no maximum)
144
+ **kwargs: Additional keyword arguments
145
+
146
+ Returns:
147
+ Tuple of (CopickSegmentation object, stats dict) or None if operation failed.
148
+ Stats dict contains 'voxels_kept', 'components_removed', and 'components_evaluated'.
149
+ """
150
+ try:
151
+ # Load segmentation arrays
152
+ seg1_array = segmentation1.numpy()
153
+ seg2_array = segmentation2.numpy()
154
+
155
+ if seg1_array is None or seg2_array is None:
156
+ logger.error("Could not load segmentation data")
157
+ return None
158
+
159
+ if seg1_array.size == 0 or seg2_array.size == 0:
160
+ logger.error("Empty segmentation data")
161
+ return None
162
+
163
+ # Ensure arrays have the same shape
164
+ if seg1_array.shape != seg2_array.shape:
165
+ logger.error(f"Segmentation arrays must have the same shape: {seg1_array.shape} vs {seg2_array.shape}")
166
+ return None
167
+
168
+ # Check that segmentations have compatible voxel spacing
169
+ if abs(segmentation1.voxel_size - segmentation2.voxel_size) > 1e-6:
170
+ logger.warning(
171
+ f"Segmentations have different voxel spacing: {segmentation1.voxel_size} vs {segmentation2.voxel_size}",
172
+ )
173
+
174
+ # Convert to boolean arrays
175
+ bool1 = seg1_array.astype(bool)
176
+ bool2 = seg2_array.astype(bool)
177
+
178
+ # Remove enclosed components from inner segmentation
179
+ result_array, num_removed, component_info = _remove_enclosed_components(
180
+ bool1,
181
+ bool2,
182
+ voxel_spacing=voxel_spacing,
183
+ margin=margin,
184
+ connectivity=connectivity,
185
+ min_size=min_size,
186
+ max_size=max_size,
187
+ )
188
+
189
+ # Create output segmentation (cleaned version of segmentation1)
190
+ output_seg = run.new_segmentation(
191
+ name=object_name,
192
+ user_id=user_id,
193
+ session_id=session_id,
194
+ is_multilabel=is_multilabel,
195
+ voxel_size=voxel_spacing,
196
+ exist_ok=True,
197
+ )
198
+
199
+ # Store the result
200
+ output_seg.from_numpy(result_array)
201
+
202
+ stats = {
203
+ "voxels_kept": int(np.sum(result_array)),
204
+ "components_removed": num_removed,
205
+ "components_evaluated": len(component_info),
206
+ }
207
+ logger.info(
208
+ f"Removed {stats['components_removed']}/{stats['components_evaluated']} enclosed components "
209
+ f"({stats['voxels_kept']} voxels remaining)",
210
+ )
211
+ return output_seg, stats
212
+
213
+ except Exception as e:
214
+ logger.error(f"Error performing segmentation enclosed operation: {e}")
215
+ return None
216
+
217
+
218
+ # Lazy batch converter for new architecture
219
+ segmentation_enclosed_lazy_batch = create_lazy_batch_converter(
220
+ converter_func=segmentation_enclosed,
221
+ task_description="Removing enclosed segmentation components",
222
+ )
@@ -0,0 +1,443 @@
1
+ """Mesh operations (union, intersection, difference, exclusion, concatenate)."""
2
+
3
+ from typing import TYPE_CHECKING, Dict, List, Optional, Tuple
4
+
5
+ import trimesh as tm
6
+ from copick.util.log import get_logger
7
+
8
+ from copick_utils.converters.converter_common import (
9
+ create_batch_converter,
10
+ create_batch_worker,
11
+ store_mesh_with_stats,
12
+ )
13
+ from copick_utils.converters.lazy_converter import create_lazy_batch_converter
14
+
15
+ if TYPE_CHECKING:
16
+ from copick.models import CopickMesh, CopickRun
17
+
18
+ logger = get_logger(__name__)
19
+
20
+
21
+ def _perform_mesh_boolean_operation(mesh1: tm.Trimesh, mesh2: tm.Trimesh, operation: str) -> Optional[tm.Trimesh]:
22
+ """
23
+ Perform boolean operation between two meshes.
24
+
25
+ Args:
26
+ mesh1: First mesh
27
+ mesh2: Second mesh
28
+ operation: Type of boolean operation ('union', 'difference', 'intersection', 'exclusion', 'concatenate')
29
+
30
+ Returns:
31
+ Result mesh or None if operation failed
32
+ """
33
+ try:
34
+ if operation == "union":
35
+ result = mesh1.union(mesh2)
36
+ elif operation == "difference":
37
+ result = mesh1.difference(mesh2)
38
+ elif operation == "intersection":
39
+ result = mesh1.intersection(mesh2)
40
+ elif operation == "exclusion":
41
+ # Exclusion = (A union B) - (A intersection B)
42
+ union_mesh = mesh1.union(mesh2)
43
+ intersection_mesh = mesh1.intersection(mesh2)
44
+ result = union_mesh.difference(intersection_mesh)
45
+ elif operation == "concatenate":
46
+ # Simple concatenation without boolean operations
47
+ result = tm.util.concatenate([mesh1, mesh2])
48
+ else:
49
+ raise ValueError(f"Unknown operation: {operation}")
50
+
51
+ # Handle the case where result might be a Scene or empty
52
+ if isinstance(result, tm.Scene):
53
+ if len(result.geometry) == 0:
54
+ logger.warning(f"{operation.capitalize()} operation resulted in empty geometry")
55
+ return None
56
+ # Concatenate all geometries in the scene
57
+ result = tm.util.concatenate(list(result.geometry.values()))
58
+ elif isinstance(result, tm.Trimesh):
59
+ if result.vertices.shape[0] == 0:
60
+ logger.warning(f"{operation.capitalize()} operation resulted in empty mesh")
61
+ return None
62
+ else:
63
+ logger.warning(f"{operation.capitalize()} operation returned unexpected type: {type(result)}")
64
+ return None
65
+
66
+ return result
67
+
68
+ except Exception as e:
69
+ logger.error(f"{operation.capitalize()} operation failed: {e}")
70
+ return None
71
+
72
+
73
+ def mesh_boolean_operation(
74
+ mesh1: "CopickMesh",
75
+ mesh2: "CopickMesh",
76
+ run: "CopickRun",
77
+ object_name: str,
78
+ session_id: str,
79
+ user_id: str,
80
+ operation: str,
81
+ **kwargs,
82
+ ) -> Optional[Tuple["CopickMesh", Dict[str, int]]]:
83
+ """
84
+ Perform boolean operation between two CopickMesh objects.
85
+
86
+ Args:
87
+ mesh1: First CopickMesh object
88
+ mesh2: Second CopickMesh object
89
+ run: CopickRun object
90
+ object_name: Name for the output mesh
91
+ session_id: Session ID for the output mesh
92
+ user_id: User ID for the output mesh
93
+ operation: Type of operation ('union', 'difference', 'intersection', 'exclusion', 'concatenate')
94
+ **kwargs: Additional keyword arguments
95
+
96
+ Returns:
97
+ Tuple of (CopickMesh object, stats dict) or None if operation failed.
98
+ Stats dict contains 'vertices_created' and 'faces_created'.
99
+ """
100
+ try:
101
+ # Get trimesh objects
102
+ trimesh1 = mesh1.mesh
103
+ trimesh2 = mesh2.mesh
104
+
105
+ if trimesh1 is None or trimesh2 is None:
106
+ logger.error("Could not load mesh data")
107
+ return None
108
+
109
+ # Ensure we have proper Trimesh objects
110
+ if isinstance(trimesh1, tm.Scene):
111
+ if len(trimesh1.geometry) == 0:
112
+ logger.error("First mesh is empty")
113
+ return None
114
+ trimesh1 = tm.util.concatenate(list(trimesh1.geometry.values()))
115
+
116
+ if isinstance(trimesh2, tm.Scene):
117
+ if len(trimesh2.geometry) == 0:
118
+ logger.error("Second mesh is empty")
119
+ return None
120
+ trimesh2 = tm.util.concatenate(list(trimesh2.geometry.values()))
121
+
122
+ # Perform boolean operation
123
+ result_mesh = _perform_mesh_boolean_operation(trimesh1, trimesh2, operation)
124
+
125
+ if result_mesh is None:
126
+ logger.error(f"Boolean {operation} operation failed")
127
+ return None
128
+
129
+ # Store the result
130
+ copick_mesh, stats = store_mesh_with_stats(
131
+ run=run,
132
+ mesh=result_mesh,
133
+ object_name=object_name,
134
+ session_id=session_id,
135
+ user_id=user_id,
136
+ shape_name=f"{operation} result",
137
+ )
138
+
139
+ logger.info(f"Created {operation} mesh with {stats['vertices_created']} vertices")
140
+ return copick_mesh, stats
141
+
142
+ except Exception as e:
143
+ logger.error(f"Error performing {operation}: {e}")
144
+ return None
145
+
146
+
147
+ # Individual operation functions
148
+ def mesh_union(
149
+ mesh1: "CopickMesh",
150
+ mesh2: "CopickMesh",
151
+ run: "CopickRun",
152
+ object_name: str,
153
+ session_id: str,
154
+ user_id: str,
155
+ **kwargs,
156
+ ) -> Optional[Tuple["CopickMesh", Dict[str, int]]]:
157
+ """Union of two meshes."""
158
+ return mesh_boolean_operation(mesh1, mesh2, run, object_name, session_id, user_id, "union", **kwargs)
159
+
160
+
161
+ def mesh_difference(
162
+ mesh1: "CopickMesh",
163
+ mesh2: "CopickMesh",
164
+ run: "CopickRun",
165
+ object_name: str,
166
+ session_id: str,
167
+ user_id: str,
168
+ **kwargs,
169
+ ) -> Optional[Tuple["CopickMesh", Dict[str, int]]]:
170
+ """Difference of two meshes (mesh1 - mesh2)."""
171
+ return mesh_boolean_operation(mesh1, mesh2, run, object_name, session_id, user_id, "difference", **kwargs)
172
+
173
+
174
+ def mesh_intersection(
175
+ mesh1: "CopickMesh",
176
+ mesh2: "CopickMesh",
177
+ run: "CopickRun",
178
+ object_name: str,
179
+ session_id: str,
180
+ user_id: str,
181
+ **kwargs,
182
+ ) -> Optional[Tuple["CopickMesh", Dict[str, int]]]:
183
+ """Intersection of two meshes."""
184
+ return mesh_boolean_operation(mesh1, mesh2, run, object_name, session_id, user_id, "intersection", **kwargs)
185
+
186
+
187
+ def mesh_exclusion(
188
+ mesh1: "CopickMesh",
189
+ mesh2: "CopickMesh",
190
+ run: "CopickRun",
191
+ object_name: str,
192
+ session_id: str,
193
+ user_id: str,
194
+ **kwargs,
195
+ ) -> Optional[Tuple["CopickMesh", Dict[str, int]]]:
196
+ """Exclusive or (XOR) of two meshes."""
197
+ return mesh_boolean_operation(mesh1, mesh2, run, object_name, session_id, user_id, "exclusion", **kwargs)
198
+
199
+
200
+ def mesh_concatenate(
201
+ mesh1: "CopickMesh",
202
+ mesh2: "CopickMesh",
203
+ run: "CopickRun",
204
+ object_name: str,
205
+ session_id: str,
206
+ user_id: str,
207
+ **kwargs,
208
+ ) -> Optional[Tuple["CopickMesh", Dict[str, int]]]:
209
+ """Concatenate two meshes without boolean operations."""
210
+ return mesh_boolean_operation(mesh1, mesh2, run, object_name, session_id, user_id, "concatenate", **kwargs)
211
+
212
+
213
+ def mesh_multi_union(
214
+ meshes: List["CopickMesh"],
215
+ run: "CopickRun",
216
+ object_name: str,
217
+ session_id: str,
218
+ user_id: str,
219
+ **kwargs,
220
+ ) -> Optional[Tuple["CopickMesh", Dict[str, int]]]:
221
+ """
222
+ Perform N-way boolean union of multiple meshes.
223
+
224
+ Args:
225
+ meshes: List of CopickMesh objects (N≥2)
226
+ run: CopickRun object
227
+ object_name: Name for output mesh
228
+ session_id: Session ID for output
229
+ user_id: User ID for output
230
+ **kwargs: Additional arguments
231
+
232
+ Returns:
233
+ Tuple of (CopickMesh, stats) or None if failed
234
+ """
235
+ try:
236
+ if len(meshes) < 2:
237
+ logger.error("Need at least 2 meshes for N-way union")
238
+ return None
239
+
240
+ # Load all meshes and convert to Trimesh objects
241
+ trimeshes = []
242
+ for i, mesh in enumerate(meshes):
243
+ trimesh_obj = mesh.mesh
244
+ if trimesh_obj is None:
245
+ logger.error(f"Could not load mesh {i+1} (session: {mesh.session_id})")
246
+ return None
247
+
248
+ # Handle Scene objects
249
+ if isinstance(trimesh_obj, tm.Scene):
250
+ if len(trimesh_obj.geometry) == 0:
251
+ logger.error(f"Mesh {i+1} is empty")
252
+ return None
253
+ trimesh_obj = tm.util.concatenate(list(trimesh_obj.geometry.values()))
254
+
255
+ trimeshes.append(trimesh_obj)
256
+
257
+ # Perform cumulative boolean union
258
+ result = trimeshes[0]
259
+ for i, trimesh_obj in enumerate(trimeshes[1:], start=2):
260
+ try:
261
+ result = result.union(trimesh_obj)
262
+ # Handle Scene result
263
+ if isinstance(result, tm.Scene):
264
+ if len(result.geometry) == 0:
265
+ logger.error(f"Union failed at mesh {i}: empty result")
266
+ return None
267
+ result = tm.util.concatenate(list(result.geometry.values()))
268
+ except Exception as e:
269
+ logger.error(f"Union failed at mesh {i}: {e}")
270
+ return None
271
+
272
+ # Store the result
273
+ copick_mesh, stats = store_mesh_with_stats(
274
+ run=run,
275
+ mesh=result,
276
+ object_name=object_name,
277
+ session_id=session_id,
278
+ user_id=user_id,
279
+ shape_name=f"{len(meshes)}-way union result",
280
+ )
281
+
282
+ logger.info(f"Created {len(meshes)}-way union with {stats['vertices_created']} vertices")
283
+ return copick_mesh, stats
284
+
285
+ except Exception as e:
286
+ logger.error(f"Error in N-way mesh union: {e}")
287
+ return None
288
+
289
+
290
+ def mesh_multi_concatenate(
291
+ meshes: List["CopickMesh"],
292
+ run: "CopickRun",
293
+ object_name: str,
294
+ session_id: str,
295
+ user_id: str,
296
+ **kwargs,
297
+ ) -> Optional[Tuple["CopickMesh", Dict[str, int]]]:
298
+ """
299
+ Concatenate N meshes without boolean operations.
300
+
301
+ Args:
302
+ meshes: List of CopickMesh objects (N≥2)
303
+ run: CopickRun object
304
+ object_name: Name for output mesh
305
+ session_id: Session ID for output
306
+ user_id: User ID for output
307
+ **kwargs: Additional arguments
308
+
309
+ Returns:
310
+ Tuple of (CopickMesh, stats) or None if failed
311
+ """
312
+ try:
313
+ if len(meshes) < 2:
314
+ logger.error("Need at least 2 meshes for N-way concatenation")
315
+ return None
316
+
317
+ # Load all meshes
318
+ trimeshes = []
319
+ for i, mesh in enumerate(meshes):
320
+ trimesh_obj = mesh.mesh
321
+ if trimesh_obj is None:
322
+ logger.error(f"Could not load mesh {i+1} (session: {mesh.session_id})")
323
+ return None
324
+
325
+ # Handle Scene objects
326
+ if isinstance(trimesh_obj, tm.Scene):
327
+ if len(trimesh_obj.geometry) == 0:
328
+ logger.error(f"Mesh {i+1} is empty")
329
+ return None
330
+ trimesh_obj = tm.util.concatenate(list(trimesh_obj.geometry.values()))
331
+
332
+ trimeshes.append(trimesh_obj)
333
+
334
+ # Concatenate all meshes
335
+ result = tm.util.concatenate(trimeshes)
336
+
337
+ # Store the result
338
+ copick_mesh, stats = store_mesh_with_stats(
339
+ run=run,
340
+ mesh=result,
341
+ object_name=object_name,
342
+ session_id=session_id,
343
+ user_id=user_id,
344
+ shape_name=f"{len(meshes)}-mesh concatenation",
345
+ )
346
+
347
+ logger.info(f"Concatenated {len(meshes)} meshes: {stats['vertices_created']} vertices")
348
+ return copick_mesh, stats
349
+
350
+ except Exception as e:
351
+ logger.error(f"Error in N-way concatenation: {e}")
352
+ return None
353
+
354
+
355
+ # Create batch workers for each operation
356
+ _mesh_union_worker = create_batch_worker(mesh_union, "mesh", "mesh", min_points=0)
357
+ _mesh_difference_worker = create_batch_worker(mesh_difference, "mesh", "mesh", min_points=0)
358
+ _mesh_intersection_worker = create_batch_worker(mesh_intersection, "mesh", "mesh", min_points=0)
359
+ _mesh_exclusion_worker = create_batch_worker(mesh_exclusion, "mesh", "mesh", min_points=0)
360
+ _mesh_concatenate_worker = create_batch_worker(mesh_concatenate, "mesh", "mesh", min_points=0)
361
+
362
+ # Create batch converters
363
+ mesh_union_batch = create_batch_converter(
364
+ mesh_union,
365
+ "Computing mesh unions",
366
+ "mesh",
367
+ "mesh",
368
+ min_points=0,
369
+ dual_input=True,
370
+ )
371
+
372
+ mesh_difference_batch = create_batch_converter(
373
+ mesh_difference,
374
+ "Computing mesh differences",
375
+ "mesh",
376
+ "mesh",
377
+ min_points=0,
378
+ dual_input=True,
379
+ )
380
+
381
+ mesh_intersection_batch = create_batch_converter(
382
+ mesh_intersection,
383
+ "Computing mesh intersections",
384
+ "mesh",
385
+ "mesh",
386
+ min_points=0,
387
+ dual_input=True,
388
+ )
389
+
390
+ mesh_exclusion_batch = create_batch_converter(
391
+ mesh_exclusion,
392
+ "Computing mesh exclusions",
393
+ "mesh",
394
+ "mesh",
395
+ min_points=0,
396
+ dual_input=True,
397
+ )
398
+
399
+ mesh_concatenate_batch = create_batch_converter(
400
+ mesh_concatenate,
401
+ "Computing mesh concatenations",
402
+ "mesh",
403
+ "mesh",
404
+ min_points=0,
405
+ dual_input=True,
406
+ )
407
+
408
+ # Lazy batch converters for new architecture
409
+ mesh_union_lazy_batch = create_lazy_batch_converter(
410
+ converter_func=mesh_union,
411
+ task_description="Computing mesh unions",
412
+ )
413
+
414
+ mesh_difference_lazy_batch = create_lazy_batch_converter(
415
+ converter_func=mesh_difference,
416
+ task_description="Computing mesh differences",
417
+ )
418
+
419
+ mesh_intersection_lazy_batch = create_lazy_batch_converter(
420
+ converter_func=mesh_intersection,
421
+ task_description="Computing mesh intersections",
422
+ )
423
+
424
+ mesh_exclusion_lazy_batch = create_lazy_batch_converter(
425
+ converter_func=mesh_exclusion,
426
+ task_description="Computing mesh exclusions",
427
+ )
428
+
429
+ mesh_concatenate_lazy_batch = create_lazy_batch_converter(
430
+ converter_func=mesh_concatenate,
431
+ task_description="Computing mesh concatenations",
432
+ )
433
+
434
+ # Lazy batch converters for N-way operations
435
+ mesh_multi_union_lazy_batch = create_lazy_batch_converter(
436
+ converter_func=mesh_multi_union,
437
+ task_description="Computing N-way mesh unions",
438
+ )
439
+
440
+ mesh_multi_concatenate_lazy_batch = create_lazy_batch_converter(
441
+ converter_func=mesh_multi_concatenate,
442
+ task_description="Computing N-way mesh concatenations",
443
+ )