copick-utils 0.5.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,9 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024-present Kyle Harrington <czi@kyleharrington.com>
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
6
+
7
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
8
+
9
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -0,0 +1,73 @@
1
+ Metadata-Version: 2.3
2
+ Name: copick-utils
3
+ Version: 0.5.0
4
+ Summary: Utilities for copick
5
+ License: MIT
6
+ Author: Kyle Harrington
7
+ Author-email: czi@kyleharrington.com
8
+ Requires-Python: >=3.9
9
+ Classifier: Development Status :: 4 - Beta
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Programming Language :: Python
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.9
14
+ Classifier: Programming Language :: Python :: 3.10
15
+ Classifier: Programming Language :: Python :: 3.11
16
+ Classifier: Programming Language :: Python :: 3.12
17
+ Classifier: Programming Language :: Python :: 3.13
18
+ Classifier: Programming Language :: Python :: 3.8
19
+ Classifier: Programming Language :: Python :: Implementation :: CPython
20
+ Classifier: Programming Language :: Python :: Implementation :: PyPy
21
+ Requires-Dist: copick (>=0.8.0)
22
+ Project-URL: Documentation, https://github.com/KyleHarrington/copick-utils#readme
23
+ Project-URL: Issues, https://github.com/KyleHarrington/copick-utils/issues
24
+ Project-URL: Source, https://github.com/KyleHarrington/copick-utils
25
+ Description-Content-Type: text/markdown
26
+
27
+ # copick-utils
28
+
29
+ [![PyPI - Version](https://img.shields.io/pypi/v/copick-utils.svg)](https://pypi.org/project/copick-utils)
30
+ [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/copick-utils.svg)](https://pypi.org/project/copick-utils)
31
+
32
+ -----
33
+
34
+ ## Table of Contents
35
+
36
+ - [Installation](#installation)
37
+ - [Citation](#citation)
38
+ - [License](#license)
39
+
40
+ ## Installation
41
+
42
+ ```console
43
+ pip install copick-utils
44
+ ```
45
+
46
+ ## Citation
47
+
48
+ If you use `copick-utils` in your research, please cite:
49
+
50
+ ```bibtex
51
+ @article{harrington2024open,
52
+ title={Open-source Tools for CryoET Particle Picking Machine Learning Competitions},
53
+ author={Harrington, Kyle I. and Zhao, Zhuowen and Schwartz, Jonathan and Kandel, Saugat and Ermel, Utz and Paraan, Mohammadreza and Potter, Clinton and Carragher, Bridget},
54
+ journal={bioRxiv},
55
+ year={2024},
56
+ doi={10.1101/2024.11.04.621608}
57
+ }
58
+ ```
59
+
60
+ This software was introduced in a NeurIPS 2024 Workshop on Machine Learning in Structural Biology as "Open-source Tools for CryoET Particle Picking Machine Learning Competitions".
61
+
62
+ ## License
63
+
64
+ `copick-utils` is distributed under the terms of the [MIT](https://spdx.org/licenses/MIT.html) license.
65
+
66
+ ## Code of Conduct
67
+
68
+ This project adheres to the Contributor Covenant [code of conduct](https://github.com/chanzuckerberg/.github/blob/main/CODE_OF_CONDUCT.md). By participating, you are expected to uphold this code. Please report unacceptable behavior to [opensource@chanzuckerberg.com](mailto:opensource@chanzuckerberg.com).
69
+
70
+ ## Reporting Security Issues
71
+
72
+ If you believe you have found a security issue, please responsibly disclose by contacting us at [security@chanzuckerberg.com](mailto:security@chanzuckerberg.com).
73
+
@@ -0,0 +1,46 @@
1
+ # copick-utils
2
+
3
+ [![PyPI - Version](https://img.shields.io/pypi/v/copick-utils.svg)](https://pypi.org/project/copick-utils)
4
+ [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/copick-utils.svg)](https://pypi.org/project/copick-utils)
5
+
6
+ -----
7
+
8
+ ## Table of Contents
9
+
10
+ - [Installation](#installation)
11
+ - [Citation](#citation)
12
+ - [License](#license)
13
+
14
+ ## Installation
15
+
16
+ ```console
17
+ pip install copick-utils
18
+ ```
19
+
20
+ ## Citation
21
+
22
+ If you use `copick-utils` in your research, please cite:
23
+
24
+ ```bibtex
25
+ @article{harrington2024open,
26
+ title={Open-source Tools for CryoET Particle Picking Machine Learning Competitions},
27
+ author={Harrington, Kyle I. and Zhao, Zhuowen and Schwartz, Jonathan and Kandel, Saugat and Ermel, Utz and Paraan, Mohammadreza and Potter, Clinton and Carragher, Bridget},
28
+ journal={bioRxiv},
29
+ year={2024},
30
+ doi={10.1101/2024.11.04.621608}
31
+ }
32
+ ```
33
+
34
+ This software was introduced in a NeurIPS 2024 Workshop on Machine Learning in Structural Biology as "Open-source Tools for CryoET Particle Picking Machine Learning Competitions".
35
+
36
+ ## License
37
+
38
+ `copick-utils` is distributed under the terms of the [MIT](https://spdx.org/licenses/MIT.html) license.
39
+
40
+ ## Code of Conduct
41
+
42
+ This project adheres to the Contributor Covenant [code of conduct](https://github.com/chanzuckerberg/.github/blob/main/CODE_OF_CONDUCT.md). By participating, you are expected to uphold this code. Please report unacceptable behavior to [opensource@chanzuckerberg.com](mailto:opensource@chanzuckerberg.com).
43
+
44
+ ## Reporting Security Issues
45
+
46
+ If you believe you have found a security issue, please responsibly disclose by contacting us at [security@chanzuckerberg.com](mailto:security@chanzuckerberg.com).
@@ -0,0 +1,59 @@
1
+ [build-system]
2
+ requires = ["poetry-core"]
3
+ build-backend = "poetry.core.masonry.api"
4
+
5
+ [tool.poetry]
6
+ name = "copick-utils"
7
+ version = "0.5.0"
8
+ description = "Utilities for copick"
9
+ readme = "README.md"
10
+ license = "MIT"
11
+ keywords = []
12
+ authors = [
13
+ "Kyle Harrington <czi@kyleharrington.com>",
14
+ "Jonathan Schwartz <jonathan.schwartz@czii.org>"
15
+ ]
16
+ classifiers = [
17
+ "Development Status :: 4 - Beta",
18
+ "Programming Language :: Python",
19
+ "Programming Language :: Python :: 3.8",
20
+ "Programming Language :: Python :: 3.9",
21
+ "Programming Language :: Python :: 3.10",
22
+ "Programming Language :: Python :: 3.11",
23
+ "Programming Language :: Python :: 3.12",
24
+ "Programming Language :: Python :: Implementation :: CPython",
25
+ "Programming Language :: Python :: Implementation :: PyPy",
26
+ ]
27
+ packages = [{ include = "copick_utils", from = "src" }]
28
+
29
+ [tool.poetry.dependencies]
30
+ python = ">=3.9"
31
+ copick = ">=0.8.0"
32
+
33
+ [tool.poetry.urls]
34
+ Documentation = "https://github.com/KyleHarrington/copick-utils#readme"
35
+ Issues = "https://github.com/KyleHarrington/copick-utils/issues"
36
+ Source = "https://github.com/KyleHarrington/copick-utils"
37
+
38
+ [tool.poetry.scripts]
39
+ # optional if you expose CLI commands
40
+ # copick-utils = "copick_utils.cli:main"
41
+
42
+ [tool.coverage.run]
43
+ source_pkgs = ["copick_utils", "tests"]
44
+ branch = true
45
+ parallel = true
46
+ omit = [
47
+ "src/copick_utils/__about__.py",
48
+ ]
49
+
50
+ [tool.coverage.paths]
51
+ copick_utils = ["src/copick_utils", "*/copick-utils/src/copick_utils"]
52
+ tests = ["tests", "*/copick-utils/tests"]
53
+
54
+ [tool.coverage.report]
55
+ exclude_lines = [
56
+ "no cov",
57
+ "if __name__ == .__main__.:",
58
+ "if TYPE_CHECKING:",
59
+ ]
@@ -0,0 +1,4 @@
1
+ # SPDX-FileCopyrightText: 2024-present Kyle Harrington <czi@kyleharrington.com>
2
+ #
3
+ # SPDX-License-Identifier: MIT
4
+ __version__ = "0.0.1"
@@ -0,0 +1,3 @@
1
+ # SPDX-FileCopyrightText: 2024-present Kyle Harrington <czi@kyleharrington.com>
2
+ #
3
+ # SPDX-License-Identifier: MIT
@@ -0,0 +1,96 @@
1
+ import numpy as np
2
+ from skimage.feature import multiscale_basic_features
3
+ import zarr
4
+ from numcodecs import Blosc
5
+
6
+ def compute_skimage_features(tomogram, feature_type, copick_root, intensity=True, edges=True, texture=True, sigma_min=0.5, sigma_max=16.0, feature_chunk_size=None):
7
+ """
8
+ Processes the tomogram chunkwise and computes the multiscale basic features.
9
+ Allows for optional feature chunk size.
10
+ """
11
+ image = zarr.open(tomogram.zarr(), mode='r')['0']
12
+ input_chunk_size = feature_chunk_size if feature_chunk_size else image.chunks
13
+ chunk_size = input_chunk_size if len(input_chunk_size) == 3 else input_chunk_size[1:]
14
+
15
+ overlap = int(chunk_size[0] / 2)
16
+
17
+ print(f"Processing image with shape {image.shape}")
18
+ print(f"Using chunk size: {chunk_size}, overlap: {overlap}")
19
+
20
+ # Determine number of features by running on a small test array
21
+ test_chunk = np.zeros((10, 10, 10), dtype=image.dtype)
22
+ test_features = multiscale_basic_features(
23
+ test_chunk,
24
+ intensity=intensity,
25
+ edges=edges,
26
+ texture=texture,
27
+ sigma_min=sigma_min,
28
+ sigma_max=sigma_max
29
+ )
30
+ num_features = test_features.shape[-1]
31
+
32
+ # Prepare output Zarr array directly in the tomogram store
33
+ print(f"Creating new feature store with {num_features} features...")
34
+ copick_features = tomogram.new_features(feature_type)
35
+ feature_store = copick_features.zarr()
36
+
37
+ # Use the provided feature chunk size if available, otherwise default to the input chunk size
38
+ if feature_chunk_size is None:
39
+ feature_chunk_size = (num_features, *chunk_size)
40
+ else:
41
+ feature_chunk_size = (num_features, *feature_chunk_size)
42
+
43
+ out_array = zarr.create(
44
+ shape=(num_features, *image.shape),
45
+ chunks=feature_chunk_size,
46
+ dtype='float32',
47
+ compressor=Blosc(cname='zstd', clevel=3, shuffle=2),
48
+ store=feature_store,
49
+ overwrite=True
50
+ )
51
+
52
+ # Process each chunk
53
+ for z in range(0, image.shape[0], chunk_size[0]):
54
+ for y in range(0, image.shape[1], chunk_size[1]):
55
+ for x in range(0, image.shape[2], chunk_size[2]):
56
+ z_start = max(z - overlap, 0)
57
+ z_end = min(z + chunk_size[0] + overlap, image.shape[0])
58
+ y_start = max(y - overlap, 0)
59
+ y_end = min(y + chunk_size[1] + overlap, image.shape[1])
60
+ x_start = max(x - overlap, 0)
61
+ x_end = min(x + chunk_size[2] + overlap, image.shape[2])
62
+
63
+ chunk = image[z_start:z_end, y_start:y_end, x_start:x_end]
64
+ chunk_features = multiscale_basic_features(
65
+ chunk,
66
+ intensity=intensity,
67
+ edges=edges,
68
+ texture=texture,
69
+ sigma_min=sigma_min,
70
+ sigma_max=sigma_max
71
+ )
72
+
73
+ # Adjust indices for overlap
74
+ z_slice = slice(overlap if z_start > 0 else 0, None if z_end == image.shape[0] else -overlap)
75
+ y_slice = slice(overlap if y_start > 0 else 0, None if y_end == image.shape[1] else -overlap)
76
+ x_slice = slice(overlap if x_start > 0 else 0, None if x_end == image.shape[2] else -overlap)
77
+
78
+ # Ensure contiguous array and correct slicing
79
+ contiguous_chunk = np.ascontiguousarray(chunk_features[z_slice, y_slice, x_slice].transpose(3, 0, 1, 2))
80
+
81
+ out_array[0:num_features, z:z + chunk_size[0], y:y + chunk_size[1], x:x + chunk_size[2]] = contiguous_chunk
82
+
83
+ print(f"Features saved under feature type '{feature_type}'")
84
+ return copick_features
85
+
86
+
87
+ if __name__ == "__main__":
88
+ root = None # copick.from_file
89
+ tomo = None # get a tomogram from root
90
+ compute_skimage_features(
91
+ tomogram=tomo,
92
+ feature_type="skimageFeatures",
93
+ copick_root=root,
94
+ intensity=True, edges=True, texture=True, sigma_min=0.5, sigma_max=16.0,
95
+ feature_chunk_size=None # Default to detected chunk size
96
+ )
@@ -0,0 +1,69 @@
1
+ import numpy as np
2
+ import zarr
3
+ from copick.models import CopickPoint
4
+
5
+ def grid_picker(pickable_obj, run, tomogram, grid_spacing_factor, session_id="0", user_id="gridPicker"):
6
+ """
7
+ Creates a grid of picks for a pickable object based on a tomogram and grid spacing factor.
8
+
9
+ Args:
10
+ pickable_obj: The pickable object (particle).
11
+ run: The Copick run.
12
+ tomogram: The tomogram data.
13
+ grid_spacing_factor: Factor to multiply the particle radius by to determine grid spacing.
14
+ session_id: The session ID for the segmentation.
15
+ user_id: The user ID for segmentation creation.
16
+ """
17
+ # Ensure it's a pickable particle object
18
+ if not pickable_obj.is_particle:
19
+ print(f"Object {pickable_obj.name} is not a particle.")
20
+ return
21
+
22
+ obj_name = pickable_obj.name
23
+ radius = pickable_obj.radius
24
+ if not radius:
25
+ print(f"Object {obj_name} does not have a valid radius.")
26
+ return
27
+
28
+ grid_spacing = radius * grid_spacing_factor
29
+
30
+ # Open the highest resolution of the tomogram
31
+ image = zarr.open(tomogram.zarr(), mode='r')['0']
32
+
33
+ # Create a grid of points
34
+ points = []
35
+ for z in np.arange(0, image.shape[0], grid_spacing):
36
+ for y in np.arange(0, image.shape[1], grid_spacing):
37
+ for x in np.arange(0, image.shape[2], grid_spacing):
38
+ points.append(CopickPoint(location={'x': x, 'y': y, 'z': z}))
39
+
40
+ # Save the picks
41
+ pick_set = run.new_picks(obj_name, session_id, user_id)
42
+ pick_set.points = points
43
+ pick_set.store()
44
+
45
+ print(f"Saved {len(points)} grid points for object {obj_name}.")
46
+ return pick_set
47
+
48
+ if __name__ == "__main__":
49
+ import copick
50
+
51
+ copick_config_path = "path/to/copick_config.json"
52
+ grid_spacing_factor = 1.5
53
+ tomo_type = "your_tomo_type"
54
+ voxel_spacing = 1.0
55
+ session_id = "example_session"
56
+ user_id = "example_user"
57
+ run_name = "example_run"
58
+
59
+ # Load the Copick root and the run
60
+ root = copick.from_file(copick_config_path)
61
+ run = root.get_run(run_name)
62
+
63
+ # Get the tomogram and pickable object
64
+ voxel_spacing_obj = run.get_voxel_spacing(voxel_spacing)
65
+ tomogram = voxel_spacing_obj.get_tomogram(tomo_type)
66
+
67
+ for pickable_obj in root.pickable_objects:
68
+ create_grid_of_picks(pickable_obj, run, tomogram, grid_spacing_factor, session_id, user_id)
69
+
@@ -0,0 +1,67 @@
1
+ import numpy as np
2
+ import scipy.ndimage as ndi
3
+ from skimage.segmentation import watershed
4
+ from skimage.measure import regionprops
5
+ from skimage.morphology import binary_erosion, binary_dilation, ball
6
+
7
+
8
+ def picks_from_segmentation(segmentation, segmentation_idx, maxima_filter_size, min_particle_size, max_particle_size, session_id, user_id, pickable_object, run, voxel_spacing=1):
9
+ """
10
+ Process a specific label in the segmentation, extract centroids, and save them as picks.
11
+
12
+ Args:
13
+ segmentation (np.ndarray): Multilabel segmentation array.
14
+ segmentation_idx (int): The specific label from the segmentation to process.
15
+ maxima_filter_size (int): Size of the maximum detection filter.
16
+ min_particle_size (int): Minimum size threshold for particles.
17
+ max_particle_size (int): Maximum size threshold for particles.
18
+ session_id (str): Session ID for pick saving.
19
+ user_id (str): User ID for pick saving.
20
+ pickable_object (str): The name of the object to save picks for.
21
+ run: A Copick run object that manages pick saving.
22
+ voxel_spacing (int): The voxel spacing used to scale pick locations (default 1).
23
+ """
24
+ # Create a binary mask for the specific segmentation label
25
+ binary_mask = (segmentation == segmentation_idx).astype(int)
26
+
27
+ # Skip if the segmentation label is not present
28
+ if np.sum(binary_mask) == 0:
29
+ print(f"No segmentation with label {segmentation_idx} found.")
30
+ return
31
+
32
+ # Structuring element for erosion and dilation
33
+ struct_elem = ball(1)
34
+ eroded = binary_erosion(binary_mask, struct_elem)
35
+ dilated = binary_dilation(eroded, struct_elem)
36
+
37
+ # Distance transform and local maxima detection
38
+ distance = ndi.distance_transform_edt(dilated)
39
+ local_max = (distance == ndi.maximum_filter(distance, footprint=np.ones((maxima_filter_size, maxima_filter_size, maxima_filter_size))))
40
+
41
+ # Watershed segmentation
42
+ markers, _ = ndi.label(local_max)
43
+ watershed_labels = watershed(-distance, markers, mask=dilated)
44
+
45
+ # Extract region properties and filter based on particle size
46
+ all_centroids = []
47
+ for region in regionprops(watershed_labels):
48
+ if min_particle_size <= region.area <= max_particle_size:
49
+ all_centroids.append(region.centroid)
50
+
51
+ # Save centroids as picks
52
+ if all_centroids:
53
+ pick_set = run.new_picks(pickable_object, session_id, user_id)
54
+
55
+ positions = np.array(all_centroids)[:, [2, 1, 0]] * voxel_spacing
56
+ pick_set.from_numpy(positions=positions)
57
+ pick_set.store()
58
+
59
+ print(f"Centroids for label {segmentation_idx} saved successfully.")
60
+ return pick_set
61
+ else:
62
+ print(f"No valid centroids found for label {segmentation_idx}.")
63
+ return None
64
+
65
+
66
+ # Example call to the function
67
+ # picks_from_segmentation(segmentation_array, label_id, 9, 1000, 50000, session_id, user_id, pickable_object_name, run_object)
@@ -0,0 +1,161 @@
1
+ import numpy as np
2
+ import zarr
3
+ from scipy.ndimage import zoom
4
+ import copick
5
+
6
+ def from_picks(pick,
7
+ seg_volume,
8
+ radius: float = 10.0,
9
+ label_value: int = 1,
10
+ voxel_spacing: float = 10):
11
+ """
12
+ Paints picks into a segmentation volume as spheres.
13
+
14
+ Parameters:
15
+ -----------
16
+ pick : copick.models.CopickPicks
17
+ Copick object containing `points`, where each point has a `location` attribute with `x`, `y`, `z` coordinates.
18
+ seg_volume : numpy.ndarray
19
+ 3D segmentation volume (numpy array) where the spheres are painted. Shape should be (Z, Y, X).
20
+ radius : float, optional
21
+ The radius of the spheres to be inserted in physical units (not voxel units). Default is 10.0.
22
+ label_value : int, optional
23
+ The integer value used to label the sphere regions in the segmentation volume. Default is 1.
24
+ voxel_spacing : float, optional
25
+ The spacing of voxels in the segmentation volume, used to scale the radius of the spheres. Default is 10.
26
+ Returns:
27
+ --------
28
+ numpy.ndarray
29
+ The modified segmentation volume with spheres inserted at pick locations.
30
+ """
31
+ def create_sphere(shape, center, radius, val):
32
+ zc, yc, xc = center
33
+ z, y, x = np.indices(shape)
34
+ distance_sq = (x - xc)**2 + (y - yc)**2 + (z - zc)**2
35
+ sphere = np.zeros(shape, dtype=np.float32)
36
+ sphere[distance_sq <= radius**2] = val
37
+ return sphere
38
+
39
+ def get_relative_target_coordinates(center, delta, shape):
40
+ low = max(int(np.floor(center - delta)), 0)
41
+ high = min(int(np.ceil(center + delta + 1)), shape)
42
+ return low, high
43
+
44
+ # Adjust radius for voxel spacing
45
+ radius_voxel = max(radius / voxel_spacing, 1)
46
+ delta = int(np.ceil(radius_voxel))
47
+
48
+ # Paint each pick as a sphere
49
+ for point in pick.points:
50
+ # Convert the pick's location from angstroms to voxel units
51
+ cx, cy, cz = point.location.x / voxel_spacing, point.location.y / voxel_spacing, point.location.z / voxel_spacing
52
+
53
+ # Calculate subarray bounds
54
+ xLow, xHigh = get_relative_target_coordinates(cx, delta, seg_volume.shape[2])
55
+ yLow, yHigh = get_relative_target_coordinates(cy, delta, seg_volume.shape[1])
56
+ zLow, zHigh = get_relative_target_coordinates(cz, delta, seg_volume.shape[0])
57
+
58
+ # Subarray shape
59
+ subarray_shape = (zHigh - zLow, yHigh - yLow, xHigh - xLow)
60
+ if any(dim <= 0 for dim in subarray_shape):
61
+ continue
62
+
63
+ # Compute the local center of the sphere within the subarray
64
+ local_center = (cz - zLow, cy - yLow, cx - xLow)
65
+ sphere = create_sphere(subarray_shape, local_center, radius_voxel, label_value)
66
+
67
+ # Assign Sphere to Segmentation Target Volume
68
+ seg_volume[zLow:zHigh, yLow:yHigh, xLow:xHigh] = np.maximum(seg_volume[zLow:zHigh, yLow:yHigh, xLow:xHigh], sphere)
69
+
70
+ return seg_volume
71
+
72
+
73
+ def downsample_to_exact_shape(array, target_shape):
74
+ """
75
+ Downsamples a 3D array to match the target shape using nearest-neighbor interpolation.
76
+ Ensures that the resulting array has the exact target shape.
77
+ """
78
+ zoom_factors = [t / s for t, s in zip(target_shape, array.shape)]
79
+ return zoom(array, zoom_factors, order=0)
80
+
81
+
82
+ def segmentation_from_picks(radius, painting_segmentation_name, run, voxel_spacing, tomo_type, pickable_object, pick_set, user_id="paintedPicks", session_id="0"):
83
+ """
84
+ Paints picks from a run into a multiscale segmentation array, representing them as spheres in 3D space.
85
+
86
+ Parameters:
87
+ -----------
88
+ radius : float
89
+ Radius of the spheres in physical units.
90
+ painting_segmentation_name : str
91
+ The name of the segmentation dataset to be created or modified.
92
+ run : copick.Run
93
+ The current Copick run object.
94
+ voxel_spacing : float
95
+ The spacing of the voxels in the tomogram data.
96
+ tomo_type : str
97
+ The type of tomogram to retrieve.
98
+ pickable_object : copick.models.CopickObject
99
+ The object that defines the label value to be used in segmentation.
100
+ pick_set : copick.models.CopickPicks
101
+ The set of picks containing the locations to paint spheres.
102
+ user_id : str, optional
103
+ The ID of the user creating the segmentation. Default is "paintedPicks".
104
+ session_id : str, optional
105
+ The session ID for this segmentation. Default is "0".
106
+
107
+ Returns:
108
+ --------
109
+ copick.Segmentation
110
+ The created or modified segmentation object.
111
+ """
112
+ # Fetch the tomogram and determine its multiscale structure
113
+ tomogram = run.get_voxel_spacing(voxel_spacing).get_tomogram(tomo_type)
114
+ if not tomogram:
115
+ raise ValueError("Tomogram not found for the given parameters.")
116
+
117
+ # Use copick to create a new segmentation if one does not exist
118
+ segs = run.get_segmentations(user_id=user_id, session_id=session_id, is_multilabel=True, name=painting_segmentation_name, voxel_size=voxel_spacing)
119
+ if len(segs) == 0:
120
+ seg = run.new_segmentation(voxel_spacing, painting_segmentation_name, session_id, True, user_id=user_id)
121
+ else:
122
+ seg = segs[0]
123
+
124
+ segmentation_group = zarr.open(seg.zarr(), mode="a")
125
+ highest_res_name = "0"
126
+
127
+ # Get the highest resolution dimensions and create a new array if necessary
128
+ tomogram_zarr = zarr.open(tomogram.zarr(), "r")
129
+
130
+ highest_res_shape = tomogram_zarr[highest_res_name].shape
131
+ if highest_res_name not in segmentation_group:
132
+ segmentation_group.create(highest_res_name, shape=highest_res_shape, dtype=np.uint16, overwrite=True)
133
+
134
+ # Initialize or load the highest resolution array
135
+ highest_res_seg = segmentation_group[highest_res_name][:]
136
+ highest_res_seg.fill(0)
137
+
138
+ # Paint picks into the highest resolution array
139
+ highest_res_seg = from_picks(pick_set, highest_res_seg, radius, pickable_object.label, voxel_spacing)
140
+
141
+ # Write back the highest resolution data
142
+ segmentation_group[highest_res_name][:] = highest_res_seg
143
+
144
+ # Downsample to create lower resolution scales
145
+ multiscale_metadata = tomogram_zarr.attrs.get('multiscales', [{}])[0].get('datasets', [])
146
+ for level_index, level_metadata in enumerate(multiscale_metadata):
147
+ if level_index == 0:
148
+ continue
149
+
150
+ level_name = level_metadata.get("path", str(level_index))
151
+ expected_shape = tuple(tomogram_zarr[level_name].shape)
152
+
153
+ # Compute scaling factors relative to the highest resolution shape
154
+ scaled_array = downsample_to_exact_shape(highest_res_seg, expected_shape)
155
+
156
+ # Create/overwrite the Zarr array for this level
157
+ segmentation_group.create_dataset(level_name, shape=expected_shape, data=scaled_array, dtype=np.uint16, overwrite=True)
158
+
159
+ segmentation_group[level_name][:] = scaled_array
160
+
161
+ return seg
@@ -0,0 +1,96 @@
1
+ from typing import Any, Dict, List
2
+ import numpy as np
3
+
4
+ def tomogram(
5
+ run,
6
+ input_volume,
7
+ voxel_size=10,
8
+ algorithm="wbp"
9
+ ):
10
+ """
11
+ Writes a volumetric tomogram into an OME-Zarr format within a Copick directory.
12
+
13
+ Parameters:
14
+ -----------
15
+ run : copick.Run
16
+ The current Copick run object.
17
+ input_volume : np.ndarray
18
+ The volumetric tomogram data to be written.
19
+ voxel_size : float, optional
20
+ The size of the voxels in physical units. Default is 10.
21
+ algorithm : str, optional
22
+ The tomographic reconstruction algorithm to use. Default is 'wbp'.
23
+
24
+ Returns:
25
+ --------
26
+ copick.Tomogram
27
+ The created or modified tomogram object.
28
+ """
29
+
30
+ # Retrieve or create voxel spacing
31
+ voxel_spacing = run.get_voxel_spacing(voxel_size)
32
+ if voxel_spacing is None:
33
+ voxel_spacing = run.new_voxel_spacing(voxel_size=voxel_size)
34
+
35
+ # Check if We Need to Create a New Tomogram for Given Algorithm
36
+ tomogram = voxel_spacing.get_tomogram(algorithm)
37
+ if tomogram is None:
38
+ tomogram = voxel_spacing.new_tomogram(tomo_type=algorithm)
39
+
40
+ # Write the tomogram data
41
+ tomogram.from_numpy(input_volume)
42
+
43
+
44
+ def segmentation(
45
+ run,
46
+ segmentation_volume,
47
+ user_id,
48
+ name="segmentation",
49
+ session_id="0",
50
+ voxel_size=10,
51
+ multilabel=True
52
+ ):
53
+ """
54
+ Writes a segmentation into an OME-Zarr format within a Copick directory.
55
+
56
+ Parameters:
57
+ -----------
58
+ run : copick.Run
59
+ The current Copick run object.
60
+ segmentation_volume : np.ndarray
61
+ The segmentation data to be written.
62
+ user_id : str
63
+ The ID of the user creating the segmentation.
64
+ name : str, optional
65
+ The name of the segmentation dataset to be created or modified. Default is 'segmentation'.
66
+ session_id : str, optional
67
+ The session ID for this segmentation. Default is '0'.
68
+ voxel_size : float, optional
69
+ The size of the voxels in physical units. Default is 10.
70
+ multilabel : bool, optional
71
+ Whether the segmentation is a multilabel segmentation. Default is True.
72
+
73
+ Returns:
74
+ --------
75
+ copick.Segmentation
76
+ The created or modified segmentation object.
77
+ """
78
+
79
+ # Retrieve or create a segmentation
80
+ segmentations = run.get_segmentations(name=name, user_id=user_id, session_id=session_id)
81
+
82
+ # If no segmentation exists or no segmentation at the given voxel size, create a new one
83
+ if len(segmentations) == 0 or any(seg.voxel_size != voxel_size for seg in segmentations):
84
+ segmentation = run.new_segmentation(
85
+ voxel_size=voxel_size,
86
+ name=name,
87
+ session_id=session_id,
88
+ is_multilabel=multilabel,
89
+ user_id=user_id
90
+ )
91
+ else:
92
+ # Overwrite the current segmentation at the specified voxel size if it exists
93
+ segmentation = next(seg for seg in segmentations if seg.voxel_size == voxel_size)
94
+
95
+ # Write the segmentation data
96
+ segmentation.from_numpy(segmentation_volume, dtype=np.uint8)