copick-utils 0.6.0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. copick_utils/__init__.py +1 -0
  2. copick_utils/cli/__init__.py +33 -0
  3. copick_utils/cli/clipmesh.py +161 -0
  4. copick_utils/cli/clippicks.py +154 -0
  5. copick_utils/cli/clipseg.py +163 -0
  6. copick_utils/cli/conversion_commands.py +32 -0
  7. copick_utils/cli/enclosed.py +191 -0
  8. copick_utils/cli/filter_components.py +166 -0
  9. copick_utils/cli/fit_spline.py +191 -0
  10. copick_utils/cli/hull.py +138 -0
  11. copick_utils/cli/input_output_selection.py +76 -0
  12. copick_utils/cli/logical_commands.py +29 -0
  13. copick_utils/cli/mesh2picks.py +170 -0
  14. copick_utils/cli/mesh2seg.py +167 -0
  15. copick_utils/cli/meshop.py +262 -0
  16. copick_utils/cli/picks2ellipsoid.py +171 -0
  17. copick_utils/cli/picks2mesh.py +181 -0
  18. copick_utils/cli/picks2plane.py +156 -0
  19. copick_utils/cli/picks2seg.py +134 -0
  20. copick_utils/cli/picks2sphere.py +170 -0
  21. copick_utils/cli/picks2surface.py +164 -0
  22. copick_utils/cli/picksin.py +146 -0
  23. copick_utils/cli/picksout.py +148 -0
  24. copick_utils/cli/processing_commands.py +18 -0
  25. copick_utils/cli/seg2mesh.py +135 -0
  26. copick_utils/cli/seg2picks.py +128 -0
  27. copick_utils/cli/segop.py +248 -0
  28. copick_utils/cli/separate_components.py +155 -0
  29. copick_utils/cli/skeletonize.py +164 -0
  30. copick_utils/cli/util.py +580 -0
  31. copick_utils/cli/validbox.py +155 -0
  32. copick_utils/converters/__init__.py +35 -0
  33. copick_utils/converters/converter_common.py +543 -0
  34. copick_utils/converters/ellipsoid_from_picks.py +335 -0
  35. copick_utils/converters/lazy_converter.py +576 -0
  36. copick_utils/converters/mesh_from_picks.py +209 -0
  37. copick_utils/converters/mesh_from_segmentation.py +119 -0
  38. copick_utils/converters/picks_from_mesh.py +542 -0
  39. copick_utils/converters/picks_from_segmentation.py +168 -0
  40. copick_utils/converters/plane_from_picks.py +251 -0
  41. copick_utils/converters/segmentation_from_mesh.py +291 -0
  42. copick_utils/{segmentation → converters}/segmentation_from_picks.py +151 -15
  43. copick_utils/converters/sphere_from_picks.py +306 -0
  44. copick_utils/converters/surface_from_picks.py +337 -0
  45. copick_utils/features/skimage.py +33 -13
  46. copick_utils/io/readers.py +62 -59
  47. copick_utils/io/writers.py +9 -14
  48. copick_utils/logical/__init__.py +43 -0
  49. copick_utils/logical/distance_operations.py +604 -0
  50. copick_utils/logical/enclosed_operations.py +222 -0
  51. copick_utils/logical/mesh_operations.py +443 -0
  52. copick_utils/logical/point_operations.py +303 -0
  53. copick_utils/logical/segmentation_operations.py +399 -0
  54. copick_utils/pickers/grid_picker.py +5 -4
  55. copick_utils/process/__init__.py +47 -0
  56. copick_utils/process/connected_components.py +360 -0
  57. copick_utils/process/filter_components.py +306 -0
  58. copick_utils/process/hull.py +106 -0
  59. copick_utils/process/skeletonize.py +326 -0
  60. copick_utils/process/spline_fitting.py +648 -0
  61. copick_utils/process/validbox.py +333 -0
  62. copick_utils/util/__init__.py +6 -0
  63. copick_utils/util/config_models.py +614 -0
  64. {copick_utils-0.6.0.dist-info → copick_utils-1.0.0.dist-info}/METADATA +38 -12
  65. copick_utils-1.0.0.dist-info/RECORD +71 -0
  66. {copick_utils-0.6.0.dist-info → copick_utils-1.0.0.dist-info}/WHEEL +1 -1
  67. copick_utils-1.0.0.dist-info/entry_points.txt +29 -0
  68. copick_utils/__about__.py +0 -4
  69. copick_utils/segmentation/picks_from_segmentation.py +0 -67
  70. copick_utils-0.6.0.dist-info/RECORD +0 -15
  71. /copick_utils/{segmentation → io}/__init__.py +0 -0
  72. /copick_utils-0.6.0.dist-info/LICENSE.txt → /copick_utils-1.0.0.dist-info/licenses/LICENSE +0 -0
@@ -0,0 +1,337 @@
1
+ from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple
2
+
3
+ import numpy as np
4
+ import trimesh as tm
5
+ from copick.util.log import get_logger
6
+ from scipy.interpolate import Rbf, griddata
7
+ from scipy.spatial import Delaunay
8
+ from sklearn.decomposition import PCA
9
+
10
+ from copick_utils.converters.converter_common import (
11
+ cluster,
12
+ create_batch_converter,
13
+ create_batch_worker,
14
+ store_mesh_with_stats,
15
+ validate_points,
16
+ )
17
+ from copick_utils.converters.lazy_converter import create_lazy_batch_converter
18
+
19
+ if TYPE_CHECKING:
20
+ from copick.models import CopickMesh, CopickRun
21
+
22
+ logger = get_logger(__name__)
23
+
24
+
25
+ def fit_2d_surface_to_points(
26
+ points: np.ndarray,
27
+ method: str = "delaunay",
28
+ grid_resolution: int = 50,
29
+ ) -> tm.Trimesh:
30
+ """Fit a 2D surface to 3D points using different interpolation methods.
31
+
32
+ Args:
33
+ points: Nx3 array of points.
34
+ method: Surface fitting method ('delaunay', 'rbf', 'grid').
35
+ grid_resolution: Resolution for grid-based methods.
36
+
37
+ Returns:
38
+ Trimesh surface object.
39
+ """
40
+ if len(points) < 3:
41
+ raise ValueError("Need at least 3 points to fit a surface")
42
+
43
+ if method == "delaunay":
44
+ return delaunay_surface(points)
45
+ elif method == "rbf":
46
+ return rbf_surface(points, grid_resolution)
47
+ elif method == "grid":
48
+ return grid_surface(points, grid_resolution)
49
+ else:
50
+ raise ValueError(f"Unknown surface method: {method}")
51
+
52
+
53
+ def delaunay_surface(points: np.ndarray) -> tm.Trimesh:
54
+ """Create a surface using Delaunay triangulation.
55
+
56
+ Args:
57
+ points: Nx3 array of points.
58
+
59
+ Returns:
60
+ Trimesh surface object.
61
+ """
62
+ # Find the best 2D projection plane using PCA
63
+ center = np.mean(points, axis=0)
64
+ centered_points = points - center
65
+
66
+ pca = PCA(n_components=3)
67
+ pca.fit(centered_points)
68
+
69
+ # Use first two principal components for 2D projection
70
+ projected_2d = pca.transform(centered_points)[:, :2]
71
+
72
+ # Create Delaunay triangulation in 2D
73
+ tri = Delaunay(projected_2d)
74
+
75
+ # Use original 3D points as vertices with 2D triangulation
76
+ return tm.Trimesh(vertices=points, faces=tri.simplices)
77
+
78
+
79
+ def rbf_surface(points: np.ndarray, grid_resolution: int) -> tm.Trimesh:
80
+ """Create a surface using RBF (Radial Basis Function) interpolation.
81
+
82
+ Args:
83
+ points: Nx3 array of points.
84
+ grid_resolution: Resolution of the output grid.
85
+
86
+ Returns:
87
+ Trimesh surface object.
88
+ """
89
+ # Find the dominant plane using PCA
90
+ center = np.mean(points, axis=0)
91
+ centered_points = points - center
92
+
93
+ pca = PCA(n_components=3)
94
+ pca.fit(centered_points)
95
+
96
+ # Project points onto the first two principal components
97
+ projected_2d = pca.transform(centered_points)[:, :2]
98
+ heights = pca.transform(centered_points)[:, 2] # Third component as height
99
+
100
+ # Create grid for interpolation
101
+ x_min, x_max = projected_2d[:, 0].min(), projected_2d[:, 0].max()
102
+ y_min, y_max = projected_2d[:, 1].min(), projected_2d[:, 1].max()
103
+
104
+ xi = np.linspace(x_min, x_max, grid_resolution)
105
+ yi = np.linspace(y_min, y_max, grid_resolution)
106
+ xi_grid, yi_grid = np.meshgrid(xi, yi)
107
+
108
+ # RBF interpolation
109
+ rbf = Rbf(projected_2d[:, 0], projected_2d[:, 1], heights, function="thin_plate")
110
+ zi_grid = rbf(xi_grid, yi_grid)
111
+
112
+ # Convert grid back to 3D coordinates
113
+ grid_points_2d = np.column_stack([xi_grid.flatten(), yi_grid.flatten(), zi_grid.flatten()])
114
+ grid_points_3d = pca.inverse_transform(grid_points_2d) + center
115
+
116
+ # Create triangulation for the grid
117
+ grid_points_3d.reshape((grid_resolution, grid_resolution, 3))
118
+ faces = []
119
+
120
+ for i in range(grid_resolution - 1):
121
+ for j in range(grid_resolution - 1):
122
+ # Two triangles per grid cell
123
+ v1 = i * grid_resolution + j
124
+ v2 = i * grid_resolution + (j + 1)
125
+ v3 = (i + 1) * grid_resolution + j
126
+ v4 = (i + 1) * grid_resolution + (j + 1)
127
+
128
+ faces.extend([[v1, v2, v3], [v2, v4, v3]])
129
+
130
+ return tm.Trimesh(vertices=grid_points_3d, faces=faces)
131
+
132
+
133
+ def grid_surface(points: np.ndarray, grid_resolution: int) -> tm.Trimesh:
134
+ """Create a surface using grid-based interpolation.
135
+
136
+ Args:
137
+ points: Nx3 array of points.
138
+ grid_resolution: Resolution of the output grid.
139
+
140
+ Returns:
141
+ Trimesh surface object.
142
+ """
143
+ # Find bounding box
144
+ min_coords = np.min(points, axis=0)
145
+ max_coords = np.max(points, axis=0)
146
+
147
+ # Find the dimension with smallest range (likely the "height" dimension)
148
+ ranges = max_coords - min_coords
149
+ height_dim = np.argmin(ranges)
150
+
151
+ # Use other two dimensions for grid
152
+ other_dims = [i for i in range(3) if i != height_dim]
153
+
154
+ # Create grid
155
+ x_coords = np.linspace(min_coords[other_dims[0]], max_coords[other_dims[0]], grid_resolution)
156
+ y_coords = np.linspace(min_coords[other_dims[1]], max_coords[other_dims[1]], grid_resolution)
157
+ xi, yi = np.meshgrid(x_coords, y_coords)
158
+
159
+ # Interpolate height values
160
+ zi = griddata(
161
+ points[:, other_dims],
162
+ points[:, height_dim],
163
+ (xi, yi),
164
+ method="linear",
165
+ fill_value=np.mean(points[:, height_dim]),
166
+ )
167
+
168
+ # Build 3D vertices
169
+ vertices = np.zeros((grid_resolution * grid_resolution, 3))
170
+ vertices[:, other_dims[0]] = xi.flatten()
171
+ vertices[:, other_dims[1]] = yi.flatten()
172
+ vertices[:, height_dim] = zi.flatten()
173
+
174
+ # Create triangulation
175
+ faces = []
176
+ for i in range(grid_resolution - 1):
177
+ for j in range(grid_resolution - 1):
178
+ # Two triangles per grid cell
179
+ v1 = i * grid_resolution + j
180
+ v2 = i * grid_resolution + (j + 1)
181
+ v3 = (i + 1) * grid_resolution + j
182
+ v4 = (i + 1) * grid_resolution + (j + 1)
183
+
184
+ faces.extend([[v1, v2, v3], [v2, v4, v3]])
185
+
186
+ return tm.Trimesh(vertices=vertices, faces=faces)
187
+
188
+
189
+ def surface_from_picks(
190
+ points: np.ndarray,
191
+ run: "CopickRun",
192
+ object_name: str,
193
+ session_id: str,
194
+ user_id: str,
195
+ surface_method: str = "delaunay",
196
+ grid_resolution: int = 50,
197
+ use_clustering: bool = False,
198
+ clustering_method: str = "dbscan",
199
+ clustering_params: Optional[Dict[str, Any]] = None,
200
+ all_clusters: bool = True,
201
+ individual_meshes: bool = False,
202
+ session_id_template: Optional[str] = None,
203
+ ) -> Optional[Tuple["CopickMesh", Dict[str, int]]]:
204
+ """Create surface mesh(es) from pick points.
205
+
206
+ Args:
207
+ points: Nx3 array of pick positions.
208
+ run: Copick run object.
209
+ object_name: Name of the mesh object.
210
+ session_id: Session ID for the mesh.
211
+ user_id: User ID for the mesh.
212
+ surface_method: Surface fitting method ('delaunay', 'rbf', 'grid').
213
+ grid_resolution: Resolution for grid-based methods.
214
+ use_clustering: Whether to cluster points first.
215
+ clustering_method: Clustering method ('dbscan', 'kmeans').
216
+ clustering_params: Parameters for clustering.
217
+ e.g.
218
+ - {'eps': 5.0, 'min_samples': 3} for DBSCAN
219
+ - {'n_clusters': 3} for KMeans
220
+ all_clusters: If True, use all clusters; if False, use only the largest cluster.
221
+ individual_meshes: If True, create separate mesh objects for each surface.
222
+ session_id_template: Template for individual mesh session IDs.
223
+
224
+ Returns:
225
+ Tuple of (CopickMesh object, stats dict) or None if creation failed.
226
+ Stats dict contains 'vertices_created' and 'faces_created' totals.
227
+ """
228
+ if not validate_points(points, 3, "surface"):
229
+ return None
230
+
231
+ if clustering_params is None:
232
+ clustering_params = {}
233
+
234
+ # Define surface creation function
235
+ def create_surface_from_points(cluster_points):
236
+ return fit_2d_surface_to_points(cluster_points, surface_method, grid_resolution)
237
+
238
+ # Handle clustering workflow with special surface logic
239
+ if use_clustering:
240
+ point_clusters = cluster(
241
+ points,
242
+ clustering_method,
243
+ min_points_per_cluster=3, # Surfaces need at least 3 points
244
+ **clustering_params,
245
+ )
246
+
247
+ if not point_clusters:
248
+ logger.warning("No valid clusters found")
249
+ return None
250
+
251
+ logger.info(f"Found {len(point_clusters)} clusters")
252
+
253
+ if all_clusters and len(point_clusters) > 1:
254
+ if individual_meshes:
255
+ # Create separate mesh objects for each surface
256
+ created_meshes = []
257
+ total_vertices = 0
258
+ total_faces = 0
259
+
260
+ for i, cluster_points in enumerate(point_clusters):
261
+ try:
262
+ surface_mesh = create_surface_from_points(cluster_points)
263
+
264
+ # Generate session ID using template if provided
265
+ if session_id_template:
266
+ surface_session_id = session_id_template.format(
267
+ base_session_id=session_id,
268
+ instance_id=i,
269
+ )
270
+ else:
271
+ surface_session_id = f"{session_id}-{i:03d}"
272
+
273
+ copick_mesh = run.new_mesh(object_name, surface_session_id, user_id, exist_ok=True)
274
+ copick_mesh.mesh = surface_mesh
275
+ copick_mesh.store()
276
+ created_meshes.append(copick_mesh)
277
+ total_vertices += len(surface_mesh.vertices)
278
+ total_faces += len(surface_mesh.faces)
279
+ logger.info(
280
+ f"Created individual surface mesh {i} with {len(surface_mesh.vertices)} vertices",
281
+ )
282
+ except Exception as e:
283
+ logger.error(f"Failed to create mesh {i}: {e}")
284
+ continue
285
+
286
+ # Return the first mesh and total stats
287
+ if created_meshes:
288
+ stats = {"vertices_created": total_vertices, "faces_created": total_faces}
289
+ return created_meshes[0], stats
290
+ else:
291
+ return None
292
+ else:
293
+ # Create meshes from all clusters and combine them
294
+ all_meshes = []
295
+ for cluster_points in point_clusters:
296
+ surface_mesh = create_surface_from_points(cluster_points)
297
+ all_meshes.append(surface_mesh)
298
+
299
+ # Combine all meshes
300
+ combined_mesh = tm.util.concatenate(all_meshes)
301
+ else:
302
+ # Use largest cluster
303
+ cluster_sizes = [len(cluster) for cluster in point_clusters]
304
+ largest_cluster_idx = np.argmax(cluster_sizes)
305
+ points_to_use = point_clusters[largest_cluster_idx]
306
+ logger.info(f"Using largest cluster with {len(points_to_use)} points")
307
+
308
+ combined_mesh = create_surface_from_points(points_to_use)
309
+ else:
310
+ # Use all points without clustering
311
+ combined_mesh = create_surface_from_points(points)
312
+
313
+ # Store mesh and return stats
314
+ try:
315
+ return store_mesh_with_stats(run, combined_mesh, object_name, session_id, user_id, "surface")
316
+ except Exception as e:
317
+ logger.critical(f"Error creating mesh: {e}")
318
+ return None
319
+
320
+
321
+ # Create worker function using common infrastructure
322
+ _surface_from_picks_worker = create_batch_worker(surface_from_picks, "surface", min_points=3)
323
+
324
+
325
+ # Create batch converter using common infrastructure
326
+ surface_from_picks_batch = create_batch_converter(
327
+ surface_from_picks,
328
+ "Converting picks to surface meshes",
329
+ "surface",
330
+ min_points=3,
331
+ )
332
+
333
+ # Lazy batch converter for new architecture
334
+ surface_from_picks_lazy_batch = create_lazy_batch_converter(
335
+ converter_func=surface_from_picks,
336
+ task_description="Converting picks to surface meshes",
337
+ )
@@ -1,19 +1,30 @@
1
1
  import numpy as np
2
- from skimage.feature import multiscale_basic_features
3
2
  import zarr
4
3
  from numcodecs import Blosc
4
+ from skimage.feature import multiscale_basic_features
5
+
5
6
 
6
- def compute_skimage_features(tomogram, feature_type, copick_root, intensity=True, edges=True, texture=True, sigma_min=0.5, sigma_max=16.0, feature_chunk_size=None):
7
+ def compute_skimage_features(
8
+ tomogram,
9
+ feature_type,
10
+ copick_root,
11
+ intensity=True,
12
+ edges=True,
13
+ texture=True,
14
+ sigma_min=0.5,
15
+ sigma_max=16.0,
16
+ feature_chunk_size=None,
17
+ ):
7
18
  """
8
19
  Processes the tomogram chunkwise and computes the multiscale basic features.
9
20
  Allows for optional feature chunk size.
10
21
  """
11
- image = zarr.open(tomogram.zarr(), mode='r')['0']
22
+ image = zarr.open(tomogram.zarr(), mode="r")["0"]
12
23
  input_chunk_size = feature_chunk_size if feature_chunk_size else image.chunks
13
24
  chunk_size = input_chunk_size if len(input_chunk_size) == 3 else input_chunk_size[1:]
14
-
25
+
15
26
  overlap = int(chunk_size[0] / 2)
16
-
27
+
17
28
  print(f"Processing image with shape {image.shape}")
18
29
  print(f"Using chunk size: {chunk_size}, overlap: {overlap}")
19
30
 
@@ -25,7 +36,7 @@ def compute_skimage_features(tomogram, feature_type, copick_root, intensity=True
25
36
  edges=edges,
26
37
  texture=texture,
27
38
  sigma_min=sigma_min,
28
- sigma_max=sigma_max
39
+ sigma_max=sigma_max,
29
40
  )
30
41
  num_features = test_features.shape[-1]
31
42
 
@@ -43,10 +54,10 @@ def compute_skimage_features(tomogram, feature_type, copick_root, intensity=True
43
54
  out_array = zarr.create(
44
55
  shape=(num_features, *image.shape),
45
56
  chunks=feature_chunk_size,
46
- dtype='float32',
47
- compressor=Blosc(cname='zstd', clevel=3, shuffle=2),
57
+ dtype="float32",
58
+ compressor=Blosc(cname="zstd", clevel=3, shuffle=2),
48
59
  store=feature_store,
49
- overwrite=True
60
+ overwrite=True,
50
61
  )
51
62
 
52
63
  # Process each chunk
@@ -67,7 +78,7 @@ def compute_skimage_features(tomogram, feature_type, copick_root, intensity=True
67
78
  edges=edges,
68
79
  texture=texture,
69
80
  sigma_min=sigma_min,
70
- sigma_max=sigma_max
81
+ sigma_max=sigma_max,
71
82
  )
72
83
 
73
84
  # Adjust indices for overlap
@@ -78,7 +89,12 @@ def compute_skimage_features(tomogram, feature_type, copick_root, intensity=True
78
89
  # Ensure contiguous array and correct slicing
79
90
  contiguous_chunk = np.ascontiguousarray(chunk_features[z_slice, y_slice, x_slice].transpose(3, 0, 1, 2))
80
91
 
81
- out_array[0:num_features, z:z + chunk_size[0], y:y + chunk_size[1], x:x + chunk_size[2]] = contiguous_chunk
92
+ out_array[
93
+ 0:num_features,
94
+ z : z + chunk_size[0],
95
+ y : y + chunk_size[1],
96
+ x : x + chunk_size[2],
97
+ ] = contiguous_chunk
82
98
 
83
99
  print(f"Features saved under feature type '{feature_type}'")
84
100
  return copick_features
@@ -91,6 +107,10 @@ if __name__ == "__main__":
91
107
  tomogram=tomo,
92
108
  feature_type="skimageFeatures",
93
109
  copick_root=root,
94
- intensity=True, edges=True, texture=True, sigma_min=0.5, sigma_max=16.0,
95
- feature_chunk_size=None # Default to detected chunk size
110
+ intensity=True,
111
+ edges=True,
112
+ texture=True,
113
+ sigma_min=0.5,
114
+ sigma_max=16.0,
115
+ feature_chunk_size=None, # Default to detected chunk size
96
116
  )
@@ -1,33 +1,34 @@
1
1
  import numpy as np
2
2
 
3
- def tomogram(run,
4
- voxel_size: float = 10,
5
- algorithm: str = 'wbp',
6
- raise_error: bool = False):
7
-
3
+
4
+ def tomogram(run, voxel_size: float = 10, algorithm: str = "wbp", raise_error: bool = False):
8
5
  voxel_spacing_obj = run.get_voxel_spacing(voxel_size)
9
6
 
10
7
  if voxel_spacing_obj is None:
11
8
  # Query Avaiable Voxel Spacings
12
9
  availableVoxelSpacings = [tomo.voxel_size for tomo in run.voxel_spacings]
13
10
 
14
- # Report to the user which voxel spacings they can use
15
- message = (f"[Warning] No tomogram found for {run.name} with voxel size {voxel_size} and tomogram type {algorithm}"
16
- f"Available spacings are: {', '.join(map(str, availableVoxelSpacings))}" )
11
+ # Report to the user which voxel spacings they can use
12
+ message = (
13
+ f"[Warning] No tomogram found for {run.name} with voxel size {voxel_size} and tomogram type {algorithm}"
14
+ f"Available spacings are: {', '.join(map(str, availableVoxelSpacings))}"
15
+ )
17
16
  if raise_error:
18
17
  raise ValueError(message)
19
18
  else:
20
19
  print(message)
21
20
  return None
22
-
21
+
23
22
  tomogram = voxel_spacing_obj.get_tomogram(algorithm)
24
23
  if tomogram is None:
25
24
  # Get available algorithms
26
25
  availableAlgorithms = [tomo.tomo_type for tomo in run.get_voxel_spacing(voxel_size).tomograms]
27
-
26
+
28
27
  # Report to the user which algorithms are available
29
- message = (f"[Warning] No tomogram found for {run.name} with voxel size {voxel_size} and tomogram type {algorithm}"
30
- f"Available algorithms are: {', '.join(availableAlgorithms)}")
28
+ message = (
29
+ f"[Warning] No tomogram found for {run.name} with voxel size {voxel_size} and tomogram type {algorithm}"
30
+ f"Available algorithms are: {', '.join(availableAlgorithms)}"
31
+ )
31
32
  if raise_error:
32
33
  raise ValueError(message)
33
34
  else:
@@ -36,32 +37,29 @@ def tomogram(run,
36
37
 
37
38
  return tomogram.numpy()
38
39
 
39
- def segmentation(run,
40
- voxel_spacing: float,
41
- segmentation_name: str,
42
- session_id=None,
43
- user_id=None,
44
- raise_error = False):
45
40
 
46
- seg = run.get_segmentations(name=segmentation_name,
47
- session_id = session_id,
48
- user_id = user_id,
49
- voxel_size = voxel_spacing)
41
+ def segmentation(run, voxel_spacing: float, segmentation_name: str, session_id=None, user_id=None, raise_error=False):
42
+ seg = run.get_segmentations(
43
+ name=segmentation_name,
44
+ session_id=session_id,
45
+ user_id=user_id,
46
+ voxel_size=voxel_spacing,
47
+ )
50
48
 
51
49
  # No Segmentations Are Available, Result in Error
52
50
  if len(seg) == 0:
53
51
  # Get all available segmentations with their metadata
54
52
  available_segs = run.get_segmentations(voxel_size=voxel_spacing)
55
53
  seg_info = [(s.name, s.user_id, s.session_id) for s in available_segs]
56
-
54
+
57
55
  # Format the information for display
58
- seg_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})"
59
- for name, uid, sid in seg_info]
60
-
61
- message = ( f'\nNo segmentation found matching:\n'
62
- f' name: {segmentation_name}, user_id: {user_id}, session_id: {session_id}\n'
63
- f'Available segmentations in {run.name} are:\n ' +
64
- '\n '.join(seg_details) )
56
+ seg_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})" for name, uid, sid in seg_info]
57
+
58
+ message = (
59
+ f"\nNo segmentation found matching:\n"
60
+ f" name: {segmentation_name}, user_id: {user_id}, session_id: {session_id}\n"
61
+ f"Available segmentations in {run.name} are:\n " + "\n ".join(seg_details)
62
+ )
65
63
  if raise_error:
66
64
  raise ValueError(message)
67
65
  else:
@@ -70,20 +68,24 @@ def segmentation(run,
70
68
 
71
69
  # No Segmentations Are Available, Result in Error
72
70
  if len(seg) > 1:
73
- print(f'[Warning] More Than 1 Segmentation is Available for the Query Information. '
74
- f'Available Segmentations are: {seg} '
75
- f'Defaulting to Loading: {seg[0]}\n')
71
+ print(
72
+ f"[Warning] More Than 1 Segmentation is Available for the Query Information. "
73
+ f"Available Segmentations are: {seg} "
74
+ f"Defaulting to Loading: {seg[0]}\n",
75
+ )
76
76
  seg = seg[0]
77
77
 
78
78
  return seg.numpy()
79
79
 
80
- def coordinates(run, # CoPick run object containing the segmentation data
81
- name: str, # Name of the object or protein for which coordinates are being extracted
82
- user_id: str, # Identifier of the user that generated the picks
83
- session_id: str = None, # Identifier of the session that generated the picks
84
- voxel_size: float = 10, # Voxel size of the tomogram, used for scaling the coordinates
85
- raise_error: bool = False):
86
-
80
+
81
+ def coordinates(
82
+ run, # CoPick run object containing the segmentation data
83
+ name: str, # Name of the object or protein for which coordinates are being extracted
84
+ user_id: str, # Identifier of the user that generated the picks
85
+ session_id: str = None, # Identifier of the session that generated the picks
86
+ voxel_size: float = 10, # Voxel size of the tomogram, used for scaling the coordinates
87
+ raise_error: bool = False,
88
+ ):
87
89
  # Retrieve the pick points associated with the specified object and user ID
88
90
  picks = run.get_picks(object_name=name, user_id=user_id, session_id=session_id)
89
91
 
@@ -92,15 +94,15 @@ def coordinates(run, # CoPick run object containing the segm
92
94
 
93
95
  available_picks = run.get_picks()
94
96
  picks_info = [(s.pickable_object_name, s.user_id, s.session_id) for s in available_picks]
95
-
97
+
96
98
  # Format the information for display
97
- picks_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})"
98
- for name, uid, sid in picks_info]
99
-
100
- message = ( f'\nNo picks found matching:\n'
101
- f' name: {name}, user_id: {user_id}, session_id: {session_id}\n'
102
- f'Available picks are:\n '
103
- + '\n '.join(picks_details) )
99
+ picks_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})" for name, uid, sid in picks_info]
100
+
101
+ message = (
102
+ f"\nNo picks found matching:\n"
103
+ f" name: {name}, user_id: {user_id}, session_id: {session_id}\n"
104
+ f"Available picks are:\n " + "\n ".join(picks_details)
105
+ )
104
106
  if raise_error:
105
107
  raise ValueError(message)
106
108
  else:
@@ -109,24 +111,25 @@ def coordinates(run, # CoPick run object containing the segm
109
111
  elif len(picks) > 1:
110
112
  # Format pick information for display
111
113
  picks_info = [(p.pickable_object_name, p.user_id, p.session_id) for p in picks]
112
- picks_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})"
113
- for name, uid, sid in picks_info]
114
+ picks_details = [f"(name: {name}, user_id: {uid}, session_id: {sid})" for name, uid, sid in picks_info]
114
115
 
115
- print(f'[Warning] More than 1 pick is available for the query information.'
116
- f'\nAvailable picks are:\n ' +
117
- '\n '.join(picks_details) +
118
- f'\nDefaulting to loading:\n {picks[0]}\n')
116
+ print(
117
+ "[Warning] More than 1 pick is available for the query information."
118
+ "\nAvailable picks are:\n " + "\n ".join(picks_details) + f"\nDefaulting to loading:\n {picks[0]}\n",
119
+ )
119
120
  points = picks[0].points
120
121
 
121
122
  # Initialize an array to store the coordinates
122
- nPoints = len(picks[0].points) # Number of points retrieved
123
- coordinates = np.zeros([len(picks[0].points), 3]) # Create an empty array to hold the (z, y, x) coordinates
123
+ nPoints = len(picks[0].points) # Number of points retrieved
124
+ coordinates = np.zeros([len(picks[0].points), 3]) # Create an empty array to hold the (z, y, x) coordinates
124
125
 
125
126
  # Iterate over all points and convert their locations to coordinates in voxel space
126
127
  for ii in range(nPoints):
127
- coordinates[ii,] = [points[ii].location.z / voxel_size, # Scale z-coordinate by voxel size
128
- points[ii].location.y / voxel_size, # Scale y-coordinate by voxel size
129
- points[ii].location.x / voxel_size] # Scale x-coordinate by voxel size
128
+ coordinates[ii,] = [
129
+ points[ii].location.z / voxel_size, # Scale z-coordinate by voxel size
130
+ points[ii].location.y / voxel_size, # Scale y-coordinate by voxel size
131
+ points[ii].location.x / voxel_size,
132
+ ] # Scale x-coordinate by voxel size
130
133
 
131
134
  # Return the array of coordinates
132
135
  return coordinates