copick-utils 0.6.1__py3-none-any.whl → 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- copick_utils/__init__.py +1 -1
- copick_utils/cli/__init__.py +33 -0
- copick_utils/cli/clipmesh.py +161 -0
- copick_utils/cli/clippicks.py +154 -0
- copick_utils/cli/clipseg.py +163 -0
- copick_utils/cli/conversion_commands.py +32 -0
- copick_utils/cli/enclosed.py +191 -0
- copick_utils/cli/filter_components.py +166 -0
- copick_utils/cli/fit_spline.py +191 -0
- copick_utils/cli/hull.py +138 -0
- copick_utils/cli/input_output_selection.py +76 -0
- copick_utils/cli/logical_commands.py +29 -0
- copick_utils/cli/mesh2picks.py +170 -0
- copick_utils/cli/mesh2seg.py +167 -0
- copick_utils/cli/meshop.py +262 -0
- copick_utils/cli/picks2ellipsoid.py +171 -0
- copick_utils/cli/picks2mesh.py +181 -0
- copick_utils/cli/picks2plane.py +156 -0
- copick_utils/cli/picks2seg.py +134 -0
- copick_utils/cli/picks2sphere.py +170 -0
- copick_utils/cli/picks2surface.py +164 -0
- copick_utils/cli/picksin.py +146 -0
- copick_utils/cli/picksout.py +148 -0
- copick_utils/cli/processing_commands.py +18 -0
- copick_utils/cli/seg2mesh.py +135 -0
- copick_utils/cli/seg2picks.py +128 -0
- copick_utils/cli/segop.py +248 -0
- copick_utils/cli/separate_components.py +155 -0
- copick_utils/cli/skeletonize.py +164 -0
- copick_utils/cli/util.py +580 -0
- copick_utils/cli/validbox.py +155 -0
- copick_utils/converters/__init__.py +35 -0
- copick_utils/converters/converter_common.py +543 -0
- copick_utils/converters/ellipsoid_from_picks.py +335 -0
- copick_utils/converters/lazy_converter.py +576 -0
- copick_utils/converters/mesh_from_picks.py +209 -0
- copick_utils/converters/mesh_from_segmentation.py +119 -0
- copick_utils/converters/picks_from_mesh.py +542 -0
- copick_utils/converters/picks_from_segmentation.py +168 -0
- copick_utils/converters/plane_from_picks.py +251 -0
- copick_utils/converters/segmentation_from_mesh.py +291 -0
- copick_utils/{segmentation → converters}/segmentation_from_picks.py +123 -13
- copick_utils/converters/sphere_from_picks.py +306 -0
- copick_utils/converters/surface_from_picks.py +337 -0
- copick_utils/logical/__init__.py +43 -0
- copick_utils/logical/distance_operations.py +604 -0
- copick_utils/logical/enclosed_operations.py +222 -0
- copick_utils/logical/mesh_operations.py +443 -0
- copick_utils/logical/point_operations.py +303 -0
- copick_utils/logical/segmentation_operations.py +399 -0
- copick_utils/process/__init__.py +47 -0
- copick_utils/process/connected_components.py +360 -0
- copick_utils/process/filter_components.py +306 -0
- copick_utils/process/hull.py +106 -0
- copick_utils/process/skeletonize.py +326 -0
- copick_utils/process/spline_fitting.py +648 -0
- copick_utils/process/validbox.py +333 -0
- copick_utils/util/__init__.py +6 -0
- copick_utils/util/config_models.py +614 -0
- {copick_utils-0.6.1.dist-info → copick_utils-1.0.1.dist-info}/METADATA +15 -2
- copick_utils-1.0.1.dist-info/RECORD +71 -0
- {copick_utils-0.6.1.dist-info → copick_utils-1.0.1.dist-info}/WHEEL +1 -1
- copick_utils-1.0.1.dist-info/entry_points.txt +29 -0
- copick_utils/segmentation/picks_from_segmentation.py +0 -81
- copick_utils-0.6.1.dist-info/RECORD +0 -14
- /copick_utils/{segmentation → io}/__init__.py +0 -0
- {copick_utils-0.6.1.dist-info → copick_utils-1.0.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,303 @@
|
|
|
1
|
+
"""Point inclusion/exclusion operations for picks relative to meshes and segmentations."""
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, Dict, Optional, Tuple
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
import trimesh as tm
|
|
7
|
+
from copick.util.log import get_logger
|
|
8
|
+
|
|
9
|
+
from copick_utils.converters.converter_common import (
|
|
10
|
+
create_batch_converter,
|
|
11
|
+
create_batch_worker,
|
|
12
|
+
)
|
|
13
|
+
from copick_utils.converters.lazy_converter import create_lazy_batch_converter
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING:
|
|
16
|
+
from copick.models import CopickMesh, CopickPicks, CopickRun, CopickSegmentation
|
|
17
|
+
|
|
18
|
+
logger = get_logger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _check_points_in_mesh(points: np.ndarray, mesh: tm.Trimesh) -> np.ndarray:
|
|
22
|
+
"""
|
|
23
|
+
Check which points are inside a watertight mesh.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
points: Array of points to check (N, 3)
|
|
27
|
+
mesh: Watertight trimesh object
|
|
28
|
+
|
|
29
|
+
Returns:
|
|
30
|
+
Boolean array indicating which points are inside the mesh
|
|
31
|
+
"""
|
|
32
|
+
try:
|
|
33
|
+
# Check if mesh is watertight
|
|
34
|
+
if not mesh.is_watertight:
|
|
35
|
+
logger.warning("Mesh is not watertight, using bounding box approximation")
|
|
36
|
+
# Fallback: use bounding box
|
|
37
|
+
bounds = mesh.bounds
|
|
38
|
+
inside = np.all((points >= bounds[0]) & (points <= bounds[1]), axis=1)
|
|
39
|
+
return inside
|
|
40
|
+
|
|
41
|
+
# Use contains method for watertight meshes
|
|
42
|
+
inside = mesh.contains(points)
|
|
43
|
+
return inside
|
|
44
|
+
|
|
45
|
+
except Exception as e:
|
|
46
|
+
logger.warning(f"Error checking point containment: {e}")
|
|
47
|
+
# Fallback: use bounding box
|
|
48
|
+
bounds = mesh.bounds
|
|
49
|
+
inside = np.all((points >= bounds[0]) & (points <= bounds[1]), axis=1)
|
|
50
|
+
return inside
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _check_points_in_segmentation(
|
|
54
|
+
points: np.ndarray,
|
|
55
|
+
segmentation_array: np.ndarray,
|
|
56
|
+
voxel_spacing: float,
|
|
57
|
+
) -> np.ndarray:
|
|
58
|
+
"""
|
|
59
|
+
Check which points are inside a segmentation volume.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
points: Array of points to check (N, 3) in physical coordinates
|
|
63
|
+
segmentation_array: Binary segmentation array
|
|
64
|
+
voxel_spacing: Spacing between voxels
|
|
65
|
+
|
|
66
|
+
Returns:
|
|
67
|
+
Boolean array indicating which points are inside the segmentation
|
|
68
|
+
"""
|
|
69
|
+
# Convert points to voxel coordinates
|
|
70
|
+
voxel_coords = np.round(points / voxel_spacing).astype(int)
|
|
71
|
+
|
|
72
|
+
# Check bounds
|
|
73
|
+
valid_bounds = (
|
|
74
|
+
(voxel_coords[:, 0] >= 0)
|
|
75
|
+
& (voxel_coords[:, 0] < segmentation_array.shape[2])
|
|
76
|
+
& (voxel_coords[:, 1] >= 0)
|
|
77
|
+
& (voxel_coords[:, 1] < segmentation_array.shape[1])
|
|
78
|
+
& (voxel_coords[:, 2] >= 0)
|
|
79
|
+
& (voxel_coords[:, 2] < segmentation_array.shape[0])
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
inside = np.zeros(len(points), dtype=bool)
|
|
83
|
+
|
|
84
|
+
# Check only points within bounds
|
|
85
|
+
valid_coords = voxel_coords[valid_bounds]
|
|
86
|
+
if len(valid_coords) > 0:
|
|
87
|
+
# Check if voxels are non-zero (inside segmentation)
|
|
88
|
+
voxel_values = segmentation_array[valid_coords[:, 2], valid_coords[:, 1], valid_coords[:, 0]]
|
|
89
|
+
inside[valid_bounds] = voxel_values > 0
|
|
90
|
+
|
|
91
|
+
return inside
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def picks_inclusion_by_mesh(
|
|
95
|
+
picks: "CopickPicks",
|
|
96
|
+
run: "CopickRun",
|
|
97
|
+
pick_object_name: str,
|
|
98
|
+
pick_session_id: str,
|
|
99
|
+
pick_user_id: str,
|
|
100
|
+
reference_mesh: Optional["CopickMesh"] = None,
|
|
101
|
+
reference_segmentation: Optional["CopickSegmentation"] = None,
|
|
102
|
+
) -> Optional[Tuple["CopickPicks", Dict[str, int]]]:
|
|
103
|
+
"""
|
|
104
|
+
Filter picks to include only those inside a reference mesh or segmentation.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
picks: CopickPicks to filter
|
|
108
|
+
reference_mesh: Reference CopickMesh (either this or reference_segmentation must be provided)
|
|
109
|
+
reference_segmentation: Reference CopickSegmentation
|
|
110
|
+
run: CopickRun object
|
|
111
|
+
pick_object_name: Name for the output picks
|
|
112
|
+
pick_session_id: Session ID for the output picks
|
|
113
|
+
pick_user_id: User ID for the output picks
|
|
114
|
+
**kwargs: Additional keyword arguments
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
Tuple of (CopickPicks object, stats dict) or None if operation failed.
|
|
118
|
+
Stats dict contains 'points_created'.
|
|
119
|
+
"""
|
|
120
|
+
try:
|
|
121
|
+
if reference_mesh is None and reference_segmentation is None:
|
|
122
|
+
raise ValueError("Either reference_mesh or reference_segmentation must be provided")
|
|
123
|
+
|
|
124
|
+
# Load pick data
|
|
125
|
+
points, transforms = picks.numpy()
|
|
126
|
+
if points is None or len(points) == 0:
|
|
127
|
+
logger.error("Could not load pick data")
|
|
128
|
+
return None
|
|
129
|
+
|
|
130
|
+
pick_positions = points[:, :3] # Use only x, y, z coordinates
|
|
131
|
+
|
|
132
|
+
# Check which points are inside the reference
|
|
133
|
+
if reference_mesh is not None:
|
|
134
|
+
ref_mesh = reference_mesh.mesh
|
|
135
|
+
if ref_mesh is None:
|
|
136
|
+
logger.error("Could not load reference mesh data")
|
|
137
|
+
return None
|
|
138
|
+
|
|
139
|
+
if isinstance(ref_mesh, tm.Scene):
|
|
140
|
+
if len(ref_mesh.geometry) == 0:
|
|
141
|
+
logger.error("Reference mesh is empty")
|
|
142
|
+
return None
|
|
143
|
+
ref_mesh = tm.util.concatenate(list(ref_mesh.geometry.values()))
|
|
144
|
+
|
|
145
|
+
inside_mask = _check_points_in_mesh(pick_positions, ref_mesh)
|
|
146
|
+
|
|
147
|
+
else: # reference_segmentation is not None
|
|
148
|
+
ref_seg_array = reference_segmentation.numpy()
|
|
149
|
+
if ref_seg_array is None or ref_seg_array.size == 0:
|
|
150
|
+
logger.error("Could not load reference segmentation data")
|
|
151
|
+
return None
|
|
152
|
+
|
|
153
|
+
inside_mask = _check_points_in_segmentation(
|
|
154
|
+
pick_positions,
|
|
155
|
+
ref_seg_array,
|
|
156
|
+
reference_segmentation.voxel_size,
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
if not np.any(inside_mask):
|
|
160
|
+
logger.warning("No picks found inside reference volume")
|
|
161
|
+
return None
|
|
162
|
+
|
|
163
|
+
# Filter picks to include only those inside
|
|
164
|
+
included_points = points[inside_mask]
|
|
165
|
+
included_transforms = transforms[inside_mask] if transforms is not None else None
|
|
166
|
+
|
|
167
|
+
# Create output picks
|
|
168
|
+
output_picks = run.new_picks(pick_object_name, pick_session_id, pick_user_id, exist_ok=True)
|
|
169
|
+
output_picks.from_numpy(positions=included_points, transforms=included_transforms)
|
|
170
|
+
output_picks.store()
|
|
171
|
+
|
|
172
|
+
stats = {"points_created": len(included_points)}
|
|
173
|
+
logger.info(f"Included {stats['points_created']} picks inside reference volume")
|
|
174
|
+
return output_picks, stats
|
|
175
|
+
|
|
176
|
+
except Exception as e:
|
|
177
|
+
logger.error(f"Error filtering picks by inclusion: {e}")
|
|
178
|
+
return None
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def picks_exclusion_by_mesh(
|
|
182
|
+
picks: "CopickPicks",
|
|
183
|
+
run: "CopickRun",
|
|
184
|
+
pick_object_name: str,
|
|
185
|
+
pick_session_id: str,
|
|
186
|
+
pick_user_id: str,
|
|
187
|
+
reference_mesh: Optional["CopickMesh"] = None,
|
|
188
|
+
reference_segmentation: Optional["CopickSegmentation"] = None,
|
|
189
|
+
) -> Optional[Tuple["CopickPicks", Dict[str, int]]]:
|
|
190
|
+
"""
|
|
191
|
+
Filter picks to exclude those inside a reference mesh or segmentation.
|
|
192
|
+
|
|
193
|
+
Args:
|
|
194
|
+
picks: CopickPicks to filter
|
|
195
|
+
reference_mesh: Reference CopickMesh (either this or reference_segmentation must be provided)
|
|
196
|
+
reference_segmentation: Reference CopickSegmentation
|
|
197
|
+
run: CopickRun object
|
|
198
|
+
pick_object_name: Name for the output picks
|
|
199
|
+
pick_session_id: Session ID for the output picks
|
|
200
|
+
pick_user_id: User ID for the output picks
|
|
201
|
+
**kwargs: Additional keyword arguments
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
Tuple of (CopickPicks object, stats dict) or None if operation failed.
|
|
205
|
+
Stats dict contains 'points_created'.
|
|
206
|
+
"""
|
|
207
|
+
try:
|
|
208
|
+
if reference_mesh is None and reference_segmentation is None:
|
|
209
|
+
raise ValueError("Either reference_mesh or reference_segmentation must be provided")
|
|
210
|
+
|
|
211
|
+
# Load pick data
|
|
212
|
+
points, transforms = picks.numpy()
|
|
213
|
+
if points is None or len(points) == 0:
|
|
214
|
+
logger.error("Could not load pick data")
|
|
215
|
+
return None
|
|
216
|
+
|
|
217
|
+
pick_positions = points[:, :3] # Use only x, y, z coordinates
|
|
218
|
+
|
|
219
|
+
# Check which points are inside the reference
|
|
220
|
+
if reference_mesh is not None:
|
|
221
|
+
ref_mesh = reference_mesh.mesh
|
|
222
|
+
if ref_mesh is None:
|
|
223
|
+
logger.error("Could not load reference mesh data")
|
|
224
|
+
return None
|
|
225
|
+
|
|
226
|
+
if isinstance(ref_mesh, tm.Scene):
|
|
227
|
+
if len(ref_mesh.geometry) == 0:
|
|
228
|
+
logger.error("Reference mesh is empty")
|
|
229
|
+
return None
|
|
230
|
+
ref_mesh = tm.util.concatenate(list(ref_mesh.geometry.values()))
|
|
231
|
+
|
|
232
|
+
inside_mask = _check_points_in_mesh(pick_positions, ref_mesh)
|
|
233
|
+
|
|
234
|
+
else: # reference_segmentation is not None
|
|
235
|
+
ref_seg_array = reference_segmentation.numpy()
|
|
236
|
+
if ref_seg_array is None or ref_seg_array.size == 0:
|
|
237
|
+
logger.error("Could not load reference segmentation data")
|
|
238
|
+
return None
|
|
239
|
+
|
|
240
|
+
inside_mask = _check_points_in_segmentation(
|
|
241
|
+
pick_positions,
|
|
242
|
+
ref_seg_array,
|
|
243
|
+
reference_segmentation.voxel_size,
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
# Invert mask to exclude points inside
|
|
247
|
+
outside_mask = ~inside_mask
|
|
248
|
+
|
|
249
|
+
if not np.any(outside_mask):
|
|
250
|
+
logger.warning("No picks found outside reference volume")
|
|
251
|
+
return None
|
|
252
|
+
|
|
253
|
+
# Filter picks to exclude those inside
|
|
254
|
+
excluded_points = points[outside_mask]
|
|
255
|
+
excluded_transforms = transforms[outside_mask] if transforms is not None else None
|
|
256
|
+
|
|
257
|
+
# Create output picks
|
|
258
|
+
output_picks = run.new_picks(pick_object_name, pick_session_id, pick_user_id, exist_ok=True)
|
|
259
|
+
output_picks.from_numpy(positions=excluded_points, transforms=excluded_transforms)
|
|
260
|
+
output_picks.store()
|
|
261
|
+
|
|
262
|
+
stats = {"points_created": len(excluded_points)}
|
|
263
|
+
logger.info(
|
|
264
|
+
f"Excluded {len(points) - stats['points_created']} picks inside reference volume, kept {stats['points_created']} picks",
|
|
265
|
+
)
|
|
266
|
+
return output_picks, stats
|
|
267
|
+
|
|
268
|
+
except Exception as e:
|
|
269
|
+
logger.error(f"Error filtering picks by exclusion: {e}")
|
|
270
|
+
return None
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
# Create batch workers
|
|
274
|
+
_picks_inclusion_by_mesh_worker = create_batch_worker(picks_inclusion_by_mesh, "picks", "picks", min_points=1)
|
|
275
|
+
_picks_exclusion_by_mesh_worker = create_batch_worker(picks_exclusion_by_mesh, "picks", "picks", min_points=1)
|
|
276
|
+
|
|
277
|
+
# Create batch converters
|
|
278
|
+
picks_inclusion_by_mesh_batch = create_batch_converter(
|
|
279
|
+
picks_inclusion_by_mesh,
|
|
280
|
+
"Filtering picks by inclusion",
|
|
281
|
+
"picks",
|
|
282
|
+
"picks",
|
|
283
|
+
min_points=1,
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
picks_exclusion_by_mesh_batch = create_batch_converter(
|
|
287
|
+
picks_exclusion_by_mesh,
|
|
288
|
+
"Filtering picks by exclusion",
|
|
289
|
+
"picks",
|
|
290
|
+
"picks",
|
|
291
|
+
min_points=1,
|
|
292
|
+
)
|
|
293
|
+
|
|
294
|
+
# Lazy batch converters for new architecture
|
|
295
|
+
picks_inclusion_by_mesh_lazy_batch = create_lazy_batch_converter(
|
|
296
|
+
converter_func=picks_inclusion_by_mesh,
|
|
297
|
+
task_description="Filtering picks by inclusion",
|
|
298
|
+
)
|
|
299
|
+
|
|
300
|
+
picks_exclusion_by_mesh_lazy_batch = create_lazy_batch_converter(
|
|
301
|
+
converter_func=picks_exclusion_by_mesh,
|
|
302
|
+
task_description="Filtering picks by exclusion",
|
|
303
|
+
)
|
|
@@ -0,0 +1,399 @@
|
|
|
1
|
+
"""Boolean operations on segmentations (union, intersection, difference, exclusion)."""
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, Dict, List, Optional, Tuple
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
from copick.util.log import get_logger
|
|
7
|
+
|
|
8
|
+
from copick_utils.converters.converter_common import (
|
|
9
|
+
create_batch_converter,
|
|
10
|
+
create_batch_worker,
|
|
11
|
+
)
|
|
12
|
+
from copick_utils.converters.lazy_converter import create_lazy_batch_converter
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from copick.models import CopickRun, CopickSegmentation
|
|
16
|
+
|
|
17
|
+
logger = get_logger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _perform_segmentation_boolean_operation(
|
|
21
|
+
seg1_array: np.ndarray,
|
|
22
|
+
seg2_array: np.ndarray,
|
|
23
|
+
operation: str,
|
|
24
|
+
) -> np.ndarray:
|
|
25
|
+
"""
|
|
26
|
+
Perform boolean operation between two segmentation arrays.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
seg1_array: First segmentation array (should be binary: 0 or 1)
|
|
30
|
+
seg2_array: Second segmentation array (should be binary: 0 or 1)
|
|
31
|
+
operation: Type of boolean operation ('union', 'difference', 'intersection', 'exclusion')
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Result segmentation array
|
|
35
|
+
"""
|
|
36
|
+
# Ensure arrays have the same shape
|
|
37
|
+
if seg1_array.shape != seg2_array.shape:
|
|
38
|
+
raise ValueError(f"Segmentation arrays must have the same shape: {seg1_array.shape} vs {seg2_array.shape}")
|
|
39
|
+
|
|
40
|
+
# Convert to boolean arrays
|
|
41
|
+
bool1 = seg1_array.astype(bool)
|
|
42
|
+
bool2 = seg2_array.astype(bool)
|
|
43
|
+
|
|
44
|
+
if operation == "union":
|
|
45
|
+
result = np.logical_or(bool1, bool2)
|
|
46
|
+
elif operation == "difference":
|
|
47
|
+
result = np.logical_and(bool1, np.logical_not(bool2))
|
|
48
|
+
elif operation == "intersection":
|
|
49
|
+
result = np.logical_and(bool1, bool2)
|
|
50
|
+
elif operation == "exclusion":
|
|
51
|
+
# Exclusion = (A or B) and not (A and B)
|
|
52
|
+
result = np.logical_and(np.logical_or(bool1, bool2), np.logical_not(np.logical_and(bool1, bool2)))
|
|
53
|
+
else:
|
|
54
|
+
raise ValueError(f"Unknown boolean operation: {operation}")
|
|
55
|
+
|
|
56
|
+
return result.astype(np.uint8)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def segmentation_boolean_operation(
|
|
60
|
+
segmentation1: "CopickSegmentation",
|
|
61
|
+
segmentation2: "CopickSegmentation",
|
|
62
|
+
run: "CopickRun",
|
|
63
|
+
object_name: str,
|
|
64
|
+
session_id: str,
|
|
65
|
+
user_id: str,
|
|
66
|
+
operation: str,
|
|
67
|
+
voxel_spacing: float,
|
|
68
|
+
is_multilabel: bool = False,
|
|
69
|
+
**kwargs,
|
|
70
|
+
) -> Optional[Tuple["CopickSegmentation", Dict[str, int]]]:
|
|
71
|
+
"""
|
|
72
|
+
Perform boolean operation between two CopickSegmentation objects.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
segmentation1: First CopickSegmentation object
|
|
76
|
+
segmentation2: Second CopickSegmentation object
|
|
77
|
+
run: CopickRun object
|
|
78
|
+
object_name: Name for the output segmentation
|
|
79
|
+
session_id: Session ID for the output segmentation
|
|
80
|
+
user_id: User ID for the output segmentation
|
|
81
|
+
operation: Type of boolean operation ('union', 'difference', 'intersection', 'exclusion')
|
|
82
|
+
voxel_spacing: Voxel spacing for the output segmentation
|
|
83
|
+
is_multilabel: Whether the segmentation is multilabel
|
|
84
|
+
**kwargs: Additional keyword arguments
|
|
85
|
+
|
|
86
|
+
Returns:
|
|
87
|
+
Tuple of (CopickSegmentation object, stats dict) or None if operation failed.
|
|
88
|
+
Stats dict contains 'voxels_created'.
|
|
89
|
+
"""
|
|
90
|
+
try:
|
|
91
|
+
# Load segmentation arrays
|
|
92
|
+
seg1_array = segmentation1.numpy()
|
|
93
|
+
seg2_array = segmentation2.numpy()
|
|
94
|
+
|
|
95
|
+
if seg1_array is None or seg2_array is None:
|
|
96
|
+
logger.error("Could not load segmentation data")
|
|
97
|
+
return None
|
|
98
|
+
|
|
99
|
+
if seg1_array.size == 0 or seg2_array.size == 0:
|
|
100
|
+
logger.error("Empty segmentation data")
|
|
101
|
+
return None
|
|
102
|
+
|
|
103
|
+
# Check that segmentations have compatible voxel spacing
|
|
104
|
+
if abs(segmentation1.voxel_size - segmentation2.voxel_size) > 1e-6:
|
|
105
|
+
logger.warning(
|
|
106
|
+
f"Segmentations have different voxel spacing: {segmentation1.voxel_size} vs {segmentation2.voxel_size}",
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
# Perform boolean operation
|
|
110
|
+
result_array = _perform_segmentation_boolean_operation(seg1_array, seg2_array, operation)
|
|
111
|
+
|
|
112
|
+
# Create output segmentation
|
|
113
|
+
output_seg = run.new_segmentation(
|
|
114
|
+
name=object_name,
|
|
115
|
+
user_id=user_id,
|
|
116
|
+
session_id=session_id,
|
|
117
|
+
is_multilabel=is_multilabel,
|
|
118
|
+
voxel_size=voxel_spacing,
|
|
119
|
+
exist_ok=True,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
# Store the result
|
|
123
|
+
output_seg.from_numpy(result_array)
|
|
124
|
+
|
|
125
|
+
stats = {"voxels_created": int(np.sum(result_array))}
|
|
126
|
+
logger.info(f"Created {operation} segmentation with {stats['voxels_created']} voxels")
|
|
127
|
+
return output_seg, stats
|
|
128
|
+
|
|
129
|
+
except Exception as e:
|
|
130
|
+
logger.error(f"Error performing segmentation {operation}: {e}")
|
|
131
|
+
return None
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
# Individual operation functions
|
|
135
|
+
def segmentation_union(
|
|
136
|
+
segmentation1: "CopickSegmentation",
|
|
137
|
+
segmentation2: "CopickSegmentation",
|
|
138
|
+
run: "CopickRun",
|
|
139
|
+
object_name: str,
|
|
140
|
+
session_id: str,
|
|
141
|
+
user_id: str,
|
|
142
|
+
voxel_spacing: float,
|
|
143
|
+
is_multilabel: bool = False,
|
|
144
|
+
**kwargs,
|
|
145
|
+
) -> Optional[Tuple["CopickSegmentation", Dict[str, int]]]:
|
|
146
|
+
"""Union of two segmentations."""
|
|
147
|
+
return segmentation_boolean_operation(
|
|
148
|
+
segmentation1,
|
|
149
|
+
segmentation2,
|
|
150
|
+
run,
|
|
151
|
+
object_name,
|
|
152
|
+
session_id,
|
|
153
|
+
user_id,
|
|
154
|
+
"union",
|
|
155
|
+
voxel_spacing,
|
|
156
|
+
is_multilabel,
|
|
157
|
+
**kwargs,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def segmentation_difference(
|
|
162
|
+
segmentation1: "CopickSegmentation",
|
|
163
|
+
segmentation2: "CopickSegmentation",
|
|
164
|
+
run: "CopickRun",
|
|
165
|
+
object_name: str,
|
|
166
|
+
session_id: str,
|
|
167
|
+
user_id: str,
|
|
168
|
+
voxel_spacing: float,
|
|
169
|
+
is_multilabel: bool = False,
|
|
170
|
+
**kwargs,
|
|
171
|
+
) -> Optional[Tuple["CopickSegmentation", Dict[str, int]]]:
|
|
172
|
+
"""Difference of two segmentations (seg1 - seg2)."""
|
|
173
|
+
return segmentation_boolean_operation(
|
|
174
|
+
segmentation1,
|
|
175
|
+
segmentation2,
|
|
176
|
+
run,
|
|
177
|
+
object_name,
|
|
178
|
+
session_id,
|
|
179
|
+
user_id,
|
|
180
|
+
"difference",
|
|
181
|
+
voxel_spacing,
|
|
182
|
+
is_multilabel,
|
|
183
|
+
**kwargs,
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def segmentation_intersection(
|
|
188
|
+
segmentation1: "CopickSegmentation",
|
|
189
|
+
segmentation2: "CopickSegmentation",
|
|
190
|
+
run: "CopickRun",
|
|
191
|
+
object_name: str,
|
|
192
|
+
session_id: str,
|
|
193
|
+
user_id: str,
|
|
194
|
+
voxel_spacing: float,
|
|
195
|
+
is_multilabel: bool = False,
|
|
196
|
+
**kwargs,
|
|
197
|
+
) -> Optional[Tuple["CopickSegmentation", Dict[str, int]]]:
|
|
198
|
+
"""Intersection of two segmentations."""
|
|
199
|
+
return segmentation_boolean_operation(
|
|
200
|
+
segmentation1,
|
|
201
|
+
segmentation2,
|
|
202
|
+
run,
|
|
203
|
+
object_name,
|
|
204
|
+
session_id,
|
|
205
|
+
user_id,
|
|
206
|
+
"intersection",
|
|
207
|
+
voxel_spacing,
|
|
208
|
+
is_multilabel,
|
|
209
|
+
**kwargs,
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def segmentation_exclusion(
|
|
214
|
+
segmentation1: "CopickSegmentation",
|
|
215
|
+
segmentation2: "CopickSegmentation",
|
|
216
|
+
run: "CopickRun",
|
|
217
|
+
object_name: str,
|
|
218
|
+
session_id: str,
|
|
219
|
+
user_id: str,
|
|
220
|
+
voxel_spacing: float,
|
|
221
|
+
is_multilabel: bool = False,
|
|
222
|
+
**kwargs,
|
|
223
|
+
) -> Optional[Tuple["CopickSegmentation", Dict[str, int]]]:
|
|
224
|
+
"""Exclusive or (XOR) of two segmentations."""
|
|
225
|
+
return segmentation_boolean_operation(
|
|
226
|
+
segmentation1,
|
|
227
|
+
segmentation2,
|
|
228
|
+
run,
|
|
229
|
+
object_name,
|
|
230
|
+
session_id,
|
|
231
|
+
user_id,
|
|
232
|
+
"exclusion",
|
|
233
|
+
voxel_spacing,
|
|
234
|
+
is_multilabel,
|
|
235
|
+
**kwargs,
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
def segmentation_multi_union(
|
|
240
|
+
segmentations: List["CopickSegmentation"],
|
|
241
|
+
run: "CopickRun",
|
|
242
|
+
object_name: str,
|
|
243
|
+
session_id: str,
|
|
244
|
+
user_id: str,
|
|
245
|
+
voxel_spacing: float,
|
|
246
|
+
is_multilabel: bool = False,
|
|
247
|
+
**kwargs,
|
|
248
|
+
) -> Optional[Tuple["CopickSegmentation", Dict[str, int]]]:
|
|
249
|
+
"""
|
|
250
|
+
Perform N-way union of multiple segmentations.
|
|
251
|
+
|
|
252
|
+
All input segmentations are converted to binary and combined using logical OR.
|
|
253
|
+
|
|
254
|
+
Args:
|
|
255
|
+
segmentations: List of CopickSegmentation objects (N≥2)
|
|
256
|
+
run: CopickRun object
|
|
257
|
+
object_name: Name for output segmentation
|
|
258
|
+
session_id: Session ID for output
|
|
259
|
+
user_id: User ID for output
|
|
260
|
+
voxel_spacing: Voxel spacing for output
|
|
261
|
+
is_multilabel: Whether output is multilabel (default: False for binary union)
|
|
262
|
+
**kwargs: Additional arguments
|
|
263
|
+
|
|
264
|
+
Returns:
|
|
265
|
+
Tuple of (CopickSegmentation, stats) or None if failed
|
|
266
|
+
"""
|
|
267
|
+
try:
|
|
268
|
+
if len(segmentations) < 2:
|
|
269
|
+
logger.error("Need at least 2 segmentations for N-way union")
|
|
270
|
+
return None
|
|
271
|
+
|
|
272
|
+
# Load all segmentation arrays
|
|
273
|
+
arrays = []
|
|
274
|
+
for i, seg in enumerate(segmentations):
|
|
275
|
+
arr = seg.numpy()
|
|
276
|
+
if arr is None or arr.size == 0:
|
|
277
|
+
logger.error(f"Could not load segmentation {i+1} (session: {seg.session_id})")
|
|
278
|
+
return None
|
|
279
|
+
arrays.append(arr)
|
|
280
|
+
|
|
281
|
+
# Verify all have same shape
|
|
282
|
+
ref_shape = arrays[0].shape
|
|
283
|
+
for i, arr in enumerate(arrays[1:], start=2):
|
|
284
|
+
if arr.shape != ref_shape:
|
|
285
|
+
logger.error(f"Shape mismatch in segmentation {i}: {arr.shape} vs {ref_shape}")
|
|
286
|
+
return None
|
|
287
|
+
|
|
288
|
+
# Perform N-way union (accumulative logical OR)
|
|
289
|
+
result = arrays[0].astype(bool)
|
|
290
|
+
for arr in arrays[1:]:
|
|
291
|
+
result = np.logical_or(result, arr.astype(bool))
|
|
292
|
+
|
|
293
|
+
result_array = result.astype(np.uint8)
|
|
294
|
+
|
|
295
|
+
# Create output segmentation
|
|
296
|
+
output_seg = run.new_segmentation(
|
|
297
|
+
name=object_name,
|
|
298
|
+
user_id=user_id,
|
|
299
|
+
session_id=session_id,
|
|
300
|
+
is_multilabel=is_multilabel,
|
|
301
|
+
voxel_size=voxel_spacing,
|
|
302
|
+
exist_ok=True,
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
output_seg.from_numpy(result_array)
|
|
306
|
+
|
|
307
|
+
stats = {"voxels_created": int(np.sum(result_array))}
|
|
308
|
+
logger.info(f"Created {len(segmentations)}-way union with {stats['voxels_created']} voxels")
|
|
309
|
+
return output_seg, stats
|
|
310
|
+
|
|
311
|
+
except Exception as e:
|
|
312
|
+
logger.error(f"Error performing N-way union: {e}")
|
|
313
|
+
return None
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
# Create batch workers for each operation
|
|
317
|
+
_segmentation_union_worker = create_batch_worker(segmentation_union, "segmentation", "segmentation", min_points=0)
|
|
318
|
+
_segmentation_difference_worker = create_batch_worker(
|
|
319
|
+
segmentation_difference,
|
|
320
|
+
"segmentation",
|
|
321
|
+
"segmentation",
|
|
322
|
+
min_points=0,
|
|
323
|
+
)
|
|
324
|
+
_segmentation_intersection_worker = create_batch_worker(
|
|
325
|
+
segmentation_intersection,
|
|
326
|
+
"segmentation",
|
|
327
|
+
"segmentation",
|
|
328
|
+
min_points=0,
|
|
329
|
+
)
|
|
330
|
+
_segmentation_exclusion_worker = create_batch_worker(
|
|
331
|
+
segmentation_exclusion,
|
|
332
|
+
"segmentation",
|
|
333
|
+
"segmentation",
|
|
334
|
+
min_points=0,
|
|
335
|
+
)
|
|
336
|
+
|
|
337
|
+
# Create batch converters
|
|
338
|
+
segmentation_union_batch = create_batch_converter(
|
|
339
|
+
segmentation_union,
|
|
340
|
+
"Computing segmentation unions",
|
|
341
|
+
"segmentation",
|
|
342
|
+
"segmentation",
|
|
343
|
+
min_points=0,
|
|
344
|
+
dual_input=True,
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
segmentation_difference_batch = create_batch_converter(
|
|
348
|
+
segmentation_difference,
|
|
349
|
+
"Computing segmentation differences",
|
|
350
|
+
"segmentation",
|
|
351
|
+
"segmentation",
|
|
352
|
+
min_points=0,
|
|
353
|
+
dual_input=True,
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
segmentation_intersection_batch = create_batch_converter(
|
|
357
|
+
segmentation_intersection,
|
|
358
|
+
"Computing segmentation intersections",
|
|
359
|
+
"segmentation",
|
|
360
|
+
"segmentation",
|
|
361
|
+
min_points=0,
|
|
362
|
+
dual_input=True,
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
segmentation_exclusion_batch = create_batch_converter(
|
|
366
|
+
segmentation_exclusion,
|
|
367
|
+
"Computing segmentation exclusions",
|
|
368
|
+
"segmentation",
|
|
369
|
+
"segmentation",
|
|
370
|
+
min_points=0,
|
|
371
|
+
dual_input=True,
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
# Lazy batch converters for new architecture
|
|
375
|
+
segmentation_union_lazy_batch = create_lazy_batch_converter(
|
|
376
|
+
converter_func=segmentation_union,
|
|
377
|
+
task_description="Computing segmentation unions",
|
|
378
|
+
)
|
|
379
|
+
|
|
380
|
+
segmentation_difference_lazy_batch = create_lazy_batch_converter(
|
|
381
|
+
converter_func=segmentation_difference,
|
|
382
|
+
task_description="Computing segmentation differences",
|
|
383
|
+
)
|
|
384
|
+
|
|
385
|
+
segmentation_intersection_lazy_batch = create_lazy_batch_converter(
|
|
386
|
+
converter_func=segmentation_intersection,
|
|
387
|
+
task_description="Computing segmentation intersections",
|
|
388
|
+
)
|
|
389
|
+
|
|
390
|
+
segmentation_exclusion_lazy_batch = create_lazy_batch_converter(
|
|
391
|
+
converter_func=segmentation_exclusion,
|
|
392
|
+
task_description="Computing segmentation exclusions",
|
|
393
|
+
)
|
|
394
|
+
|
|
395
|
+
# Lazy batch converter for N-way union
|
|
396
|
+
segmentation_multi_union_lazy_batch = create_lazy_batch_converter(
|
|
397
|
+
converter_func=segmentation_multi_union,
|
|
398
|
+
task_description="Computing N-way segmentation unions",
|
|
399
|
+
)
|