ngio 0.5.0b6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ngio/__init__.py +69 -0
- ngio/common/__init__.py +28 -0
- ngio/common/_dimensions.py +335 -0
- ngio/common/_masking_roi.py +153 -0
- ngio/common/_pyramid.py +408 -0
- ngio/common/_roi.py +315 -0
- ngio/common/_synt_images_utils.py +101 -0
- ngio/common/_zoom.py +188 -0
- ngio/experimental/__init__.py +5 -0
- ngio/experimental/iterators/__init__.py +15 -0
- ngio/experimental/iterators/_abstract_iterator.py +390 -0
- ngio/experimental/iterators/_feature.py +189 -0
- ngio/experimental/iterators/_image_processing.py +130 -0
- ngio/experimental/iterators/_mappers.py +48 -0
- ngio/experimental/iterators/_rois_utils.py +126 -0
- ngio/experimental/iterators/_segmentation.py +235 -0
- ngio/hcs/__init__.py +19 -0
- ngio/hcs/_plate.py +1354 -0
- ngio/images/__init__.py +44 -0
- ngio/images/_abstract_image.py +967 -0
- ngio/images/_create_synt_container.py +132 -0
- ngio/images/_create_utils.py +423 -0
- ngio/images/_image.py +926 -0
- ngio/images/_label.py +411 -0
- ngio/images/_masked_image.py +531 -0
- ngio/images/_ome_zarr_container.py +1237 -0
- ngio/images/_table_ops.py +471 -0
- ngio/io_pipes/__init__.py +75 -0
- ngio/io_pipes/_io_pipes.py +361 -0
- ngio/io_pipes/_io_pipes_masked.py +488 -0
- ngio/io_pipes/_io_pipes_roi.py +146 -0
- ngio/io_pipes/_io_pipes_types.py +56 -0
- ngio/io_pipes/_match_shape.py +377 -0
- ngio/io_pipes/_ops_axes.py +344 -0
- ngio/io_pipes/_ops_slices.py +411 -0
- ngio/io_pipes/_ops_slices_utils.py +199 -0
- ngio/io_pipes/_ops_transforms.py +104 -0
- ngio/io_pipes/_zoom_transform.py +180 -0
- ngio/ome_zarr_meta/__init__.py +65 -0
- ngio/ome_zarr_meta/_meta_handlers.py +536 -0
- ngio/ome_zarr_meta/ngio_specs/__init__.py +77 -0
- ngio/ome_zarr_meta/ngio_specs/_axes.py +515 -0
- ngio/ome_zarr_meta/ngio_specs/_channels.py +462 -0
- ngio/ome_zarr_meta/ngio_specs/_dataset.py +89 -0
- ngio/ome_zarr_meta/ngio_specs/_ngio_hcs.py +539 -0
- ngio/ome_zarr_meta/ngio_specs/_ngio_image.py +438 -0
- ngio/ome_zarr_meta/ngio_specs/_pixel_size.py +122 -0
- ngio/ome_zarr_meta/v04/__init__.py +27 -0
- ngio/ome_zarr_meta/v04/_custom_models.py +18 -0
- ngio/ome_zarr_meta/v04/_v04_spec.py +473 -0
- ngio/ome_zarr_meta/v05/__init__.py +27 -0
- ngio/ome_zarr_meta/v05/_custom_models.py +18 -0
- ngio/ome_zarr_meta/v05/_v05_spec.py +511 -0
- ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/mask.png +0 -0
- ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/nuclei.png +0 -0
- ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/raw.jpg +0 -0
- ngio/resources/__init__.py +55 -0
- ngio/resources/resource_model.py +36 -0
- ngio/tables/__init__.py +43 -0
- ngio/tables/_abstract_table.py +270 -0
- ngio/tables/_tables_container.py +449 -0
- ngio/tables/backends/__init__.py +57 -0
- ngio/tables/backends/_abstract_backend.py +240 -0
- ngio/tables/backends/_anndata.py +139 -0
- ngio/tables/backends/_anndata_utils.py +90 -0
- ngio/tables/backends/_csv.py +19 -0
- ngio/tables/backends/_json.py +92 -0
- ngio/tables/backends/_parquet.py +19 -0
- ngio/tables/backends/_py_arrow_backends.py +222 -0
- ngio/tables/backends/_table_backends.py +226 -0
- ngio/tables/backends/_utils.py +608 -0
- ngio/tables/v1/__init__.py +23 -0
- ngio/tables/v1/_condition_table.py +71 -0
- ngio/tables/v1/_feature_table.py +125 -0
- ngio/tables/v1/_generic_table.py +49 -0
- ngio/tables/v1/_roi_table.py +575 -0
- ngio/transforms/__init__.py +5 -0
- ngio/transforms/_zoom.py +19 -0
- ngio/utils/__init__.py +45 -0
- ngio/utils/_cache.py +48 -0
- ngio/utils/_datasets.py +165 -0
- ngio/utils/_errors.py +37 -0
- ngio/utils/_fractal_fsspec_store.py +42 -0
- ngio/utils/_zarr_utils.py +534 -0
- ngio-0.5.0b6.dist-info/METADATA +148 -0
- ngio-0.5.0b6.dist-info/RECORD +88 -0
- ngio-0.5.0b6.dist-info/WHEEL +4 -0
- ngio-0.5.0b6.dist-info/licenses/LICENSE +28 -0
ngio/__init__.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"""Next Generation file format IO."""
|
|
2
|
+
|
|
3
|
+
from importlib.metadata import PackageNotFoundError, version
|
|
4
|
+
|
|
5
|
+
try:
|
|
6
|
+
__version__ = version("ngio")
|
|
7
|
+
except PackageNotFoundError: # pragma: no cover
|
|
8
|
+
__version__ = "uninstalled"
|
|
9
|
+
__author__ = "Lorenzo Cerrone"
|
|
10
|
+
__email__ = "lorenzo.cerrone@uzh.ch"
|
|
11
|
+
|
|
12
|
+
from ngio.common import Dimensions, Roi, RoiSlice
|
|
13
|
+
from ngio.hcs import (
|
|
14
|
+
OmeZarrPlate,
|
|
15
|
+
OmeZarrWell,
|
|
16
|
+
create_empty_plate,
|
|
17
|
+
create_empty_well,
|
|
18
|
+
open_ome_zarr_plate,
|
|
19
|
+
open_ome_zarr_well,
|
|
20
|
+
)
|
|
21
|
+
from ngio.images import (
|
|
22
|
+
ChannelSelectionModel,
|
|
23
|
+
Image,
|
|
24
|
+
Label,
|
|
25
|
+
OmeZarrContainer,
|
|
26
|
+
create_empty_ome_zarr,
|
|
27
|
+
create_ome_zarr_from_array,
|
|
28
|
+
create_synthetic_ome_zarr,
|
|
29
|
+
open_image,
|
|
30
|
+
open_label,
|
|
31
|
+
open_ome_zarr_container,
|
|
32
|
+
)
|
|
33
|
+
from ngio.ome_zarr_meta.ngio_specs import (
|
|
34
|
+
AxesSetup,
|
|
35
|
+
DefaultNgffVersion,
|
|
36
|
+
ImageInWellPath,
|
|
37
|
+
NgffVersions,
|
|
38
|
+
PixelSize,
|
|
39
|
+
)
|
|
40
|
+
from ngio.utils import NgioSupportedStore, StoreOrGroup
|
|
41
|
+
|
|
42
|
+
__all__ = [
|
|
43
|
+
"AxesSetup",
|
|
44
|
+
"ChannelSelectionModel",
|
|
45
|
+
"DefaultNgffVersion",
|
|
46
|
+
"Dimensions",
|
|
47
|
+
"Image",
|
|
48
|
+
"ImageInWellPath",
|
|
49
|
+
"Label",
|
|
50
|
+
"NgffVersions",
|
|
51
|
+
"NgioSupportedStore",
|
|
52
|
+
"OmeZarrContainer",
|
|
53
|
+
"OmeZarrPlate",
|
|
54
|
+
"OmeZarrWell",
|
|
55
|
+
"PixelSize",
|
|
56
|
+
"Roi",
|
|
57
|
+
"RoiSlice",
|
|
58
|
+
"StoreOrGroup",
|
|
59
|
+
"create_empty_ome_zarr",
|
|
60
|
+
"create_empty_plate",
|
|
61
|
+
"create_empty_well",
|
|
62
|
+
"create_ome_zarr_from_array",
|
|
63
|
+
"create_synthetic_ome_zarr",
|
|
64
|
+
"open_image",
|
|
65
|
+
"open_label",
|
|
66
|
+
"open_ome_zarr_container",
|
|
67
|
+
"open_ome_zarr_plate",
|
|
68
|
+
"open_ome_zarr_well",
|
|
69
|
+
]
|
ngio/common/__init__.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"""Common classes and functions that are used across the package."""
|
|
2
|
+
|
|
3
|
+
from ngio.common._dimensions import Dimensions
|
|
4
|
+
from ngio.common._masking_roi import compute_masking_roi
|
|
5
|
+
from ngio.common._pyramid import (
|
|
6
|
+
ChunksLike,
|
|
7
|
+
ImagePyramidBuilder,
|
|
8
|
+
ShardsLike,
|
|
9
|
+
consolidate_pyramid,
|
|
10
|
+
on_disk_zoom,
|
|
11
|
+
)
|
|
12
|
+
from ngio.common._roi import Roi, RoiSlice
|
|
13
|
+
from ngio.common._zoom import InterpolationOrder, dask_zoom, numpy_zoom
|
|
14
|
+
|
|
15
|
+
__all__ = [
|
|
16
|
+
"ChunksLike",
|
|
17
|
+
"Dimensions",
|
|
18
|
+
"ImagePyramidBuilder",
|
|
19
|
+
"InterpolationOrder",
|
|
20
|
+
"Roi",
|
|
21
|
+
"RoiSlice",
|
|
22
|
+
"ShardsLike",
|
|
23
|
+
"compute_masking_roi",
|
|
24
|
+
"consolidate_pyramid",
|
|
25
|
+
"dask_zoom",
|
|
26
|
+
"numpy_zoom",
|
|
27
|
+
"on_disk_zoom",
|
|
28
|
+
]
|
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
"""Dimension metadata.
|
|
2
|
+
|
|
3
|
+
This is not related to the NGFF metadata,
|
|
4
|
+
but it is based on the actual metadata of the image data.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import math
|
|
8
|
+
from typing import overload
|
|
9
|
+
|
|
10
|
+
from ngio.ome_zarr_meta import (
|
|
11
|
+
AxesHandler,
|
|
12
|
+
)
|
|
13
|
+
from ngio.ome_zarr_meta.ngio_specs._dataset import Dataset
|
|
14
|
+
from ngio.ome_zarr_meta.ngio_specs._pixel_size import PixelSize
|
|
15
|
+
from ngio.utils import NgioValueError
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def _are_compatible(shape1: int, shape2: int, scaling: float) -> bool:
|
|
19
|
+
"""Check if shape2 is consistent with shape1 given pixel sizes.
|
|
20
|
+
|
|
21
|
+
Since we only deal with shape discrepancies due to rounding, we
|
|
22
|
+
shape1, needs to be larger than shape2.
|
|
23
|
+
"""
|
|
24
|
+
if shape1 < shape2:
|
|
25
|
+
return _are_compatible(shape2, shape1, 1 / scaling)
|
|
26
|
+
expected_shape2 = shape1 * scaling
|
|
27
|
+
expected_shape2_floor = math.floor(expected_shape2)
|
|
28
|
+
expected_shape2_ceil = math.ceil(expected_shape2)
|
|
29
|
+
return shape2 in {expected_shape2_floor, expected_shape2_ceil}
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def require_axes_match(reference: "Dimensions", other: "Dimensions") -> None:
|
|
33
|
+
"""Check if two Dimensions objects have the same axes.
|
|
34
|
+
|
|
35
|
+
Besides the channel axis (which is a special case), all axes must be
|
|
36
|
+
present in both Dimensions objects.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
reference (Dimensions): The reference dimensions object to compare against.
|
|
40
|
+
other (Dimensions): The other dimensions object to compare against.
|
|
41
|
+
|
|
42
|
+
Raises:
|
|
43
|
+
NgioValueError: If the axes do not match.
|
|
44
|
+
"""
|
|
45
|
+
for s_axis in reference.axes_handler.axes:
|
|
46
|
+
if s_axis.axis_type == "channel":
|
|
47
|
+
continue
|
|
48
|
+
o_axis = other.axes_handler.get_axis(s_axis.name)
|
|
49
|
+
if o_axis is None:
|
|
50
|
+
raise NgioValueError(
|
|
51
|
+
f"Axes do not match. The axis {s_axis.name} "
|
|
52
|
+
f"is not present in either dimensions."
|
|
53
|
+
)
|
|
54
|
+
# Check for axes present in the other dimensions but not in this one
|
|
55
|
+
for o_axis in other.axes_handler.axes:
|
|
56
|
+
if o_axis.axis_type == "channel":
|
|
57
|
+
continue
|
|
58
|
+
s_axis = reference.axes_handler.get_axis(o_axis.name)
|
|
59
|
+
if s_axis is None:
|
|
60
|
+
raise NgioValueError(
|
|
61
|
+
f"Axes do not match. The axis {o_axis.name} "
|
|
62
|
+
f"is not present in either dimensions."
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def check_if_axes_match(reference: "Dimensions", other: "Dimensions") -> bool:
|
|
67
|
+
"""Check if two Dimensions objects have the same axes.
|
|
68
|
+
|
|
69
|
+
Besides the channel axis (which is a special case), all axes must be
|
|
70
|
+
present in both Dimensions objects.
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
reference (Dimensions): The reference dimensions object to compare against.
|
|
74
|
+
other (Dimensions): The other dimensions object to compare against.
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
bool: True if the axes match, False otherwise.
|
|
78
|
+
"""
|
|
79
|
+
try:
|
|
80
|
+
require_axes_match(reference, other)
|
|
81
|
+
return True
|
|
82
|
+
except NgioValueError:
|
|
83
|
+
return False
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def require_dimensions_match(
|
|
87
|
+
reference: "Dimensions", other: "Dimensions", allow_singleton: bool = False
|
|
88
|
+
) -> None:
|
|
89
|
+
"""Check if two Dimensions objects have the same axes and dimensions.
|
|
90
|
+
|
|
91
|
+
Besides the channel axis, all axes must have the same dimension in
|
|
92
|
+
both images.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
reference (Dimensions): The reference dimensions object to compare against.
|
|
96
|
+
other (Dimensions): The other dimensions object to compare against.
|
|
97
|
+
allow_singleton (bool): Whether to allow singleton dimensions to be
|
|
98
|
+
different. For example, if the input image has shape
|
|
99
|
+
(5, 100, 100) and the label has shape (1, 100, 100).
|
|
100
|
+
|
|
101
|
+
Raises:
|
|
102
|
+
NgioValueError: If the dimensions do not match.
|
|
103
|
+
"""
|
|
104
|
+
require_axes_match(reference, other)
|
|
105
|
+
for r_axis in reference.axes_handler.axes:
|
|
106
|
+
if r_axis.axis_type == "channel":
|
|
107
|
+
continue
|
|
108
|
+
o_axis = other.axes_handler.get_axis(r_axis.name)
|
|
109
|
+
assert o_axis is not None # already checked in assert_axes_match
|
|
110
|
+
|
|
111
|
+
r_dim = reference.get(r_axis.name, default=1)
|
|
112
|
+
o_dim = other.get(o_axis.name, default=1)
|
|
113
|
+
|
|
114
|
+
if r_dim != o_dim:
|
|
115
|
+
if allow_singleton and (r_dim == 1 or o_dim == 1):
|
|
116
|
+
continue
|
|
117
|
+
raise NgioValueError(
|
|
118
|
+
f"Dimensions do not match for axis "
|
|
119
|
+
f"{r_axis.name}. Got {r_dim} and {o_dim}."
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def check_if_dimensions_match(
|
|
124
|
+
reference: "Dimensions", other: "Dimensions", allow_singleton: bool = False
|
|
125
|
+
) -> bool:
|
|
126
|
+
"""Check if two Dimensions objects have the same axes and dimensions.
|
|
127
|
+
|
|
128
|
+
Besides the channel axis, all axes must have the same dimension in
|
|
129
|
+
both images.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
reference (Dimensions): The reference dimensions object to compare against.
|
|
133
|
+
other (Dimensions): The other dimensions object to compare against.
|
|
134
|
+
allow_singleton (bool): Whether to allow singleton dimensions to be
|
|
135
|
+
different. For example, if the input image has shape
|
|
136
|
+
(5, 100, 100) and the label has shape (1, 100, 100).
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
bool: True if the dimensions match, False otherwise.
|
|
140
|
+
"""
|
|
141
|
+
try:
|
|
142
|
+
require_dimensions_match(reference, other, allow_singleton)
|
|
143
|
+
return True
|
|
144
|
+
except NgioValueError:
|
|
145
|
+
return False
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def require_rescalable(reference: "Dimensions", other: "Dimensions") -> None:
|
|
149
|
+
"""Assert that two images can be rescaled.
|
|
150
|
+
|
|
151
|
+
For this to be true, the images must have the same axes, and
|
|
152
|
+
the pixel sizes must be compatible (i.e. one can be scaled to the other).
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
reference (Dimensions): The reference dimensions object to compare against.
|
|
156
|
+
other (Dimensions): The other dimensions object to compare against.
|
|
157
|
+
|
|
158
|
+
"""
|
|
159
|
+
require_axes_match(reference, other)
|
|
160
|
+
for ax_r in reference.axes_handler.axes:
|
|
161
|
+
if ax_r.axis_type == "channel":
|
|
162
|
+
continue
|
|
163
|
+
ax_o = other.axes_handler.get_axis(ax_r.name)
|
|
164
|
+
assert ax_o is not None, "Axes do not match."
|
|
165
|
+
px_r = reference.pixel_size.get(ax_r.name, default=1.0)
|
|
166
|
+
px_o = other.pixel_size.get(ax_o.name, default=1.0)
|
|
167
|
+
shape_r = reference.get(ax_r.name, default=1)
|
|
168
|
+
shape_o = other.get(ax_o.name, default=1)
|
|
169
|
+
scale = px_r / px_o
|
|
170
|
+
if not _are_compatible(
|
|
171
|
+
shape1=shape_r,
|
|
172
|
+
shape2=shape_o,
|
|
173
|
+
scaling=scale,
|
|
174
|
+
):
|
|
175
|
+
raise NgioValueError(
|
|
176
|
+
f"Reference image with shape {reference.shape}, "
|
|
177
|
+
f"and pixel size {reference.pixel_size}, "
|
|
178
|
+
f"cannot be rescaled to "
|
|
179
|
+
f"image with shape {other.shape} "
|
|
180
|
+
f"and pixel size {other.pixel_size}. "
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def check_if_rescalable(reference: "Dimensions", other: "Dimensions") -> bool:
|
|
185
|
+
"""Check if two images can be rescaled.
|
|
186
|
+
|
|
187
|
+
For this to be true, the images must have the same axes, and
|
|
188
|
+
the pixel sizes must be compatible (i.e. one can be scaled to the other).
|
|
189
|
+
|
|
190
|
+
Args:
|
|
191
|
+
reference (Dimensions): The reference dimensions object to compare against.
|
|
192
|
+
other (Dimensions): The other dimensions object to compare against.
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
bool: True if the images can be rescaled, False otherwise.
|
|
196
|
+
"""
|
|
197
|
+
try:
|
|
198
|
+
require_rescalable(reference, other)
|
|
199
|
+
return True
|
|
200
|
+
except NgioValueError:
|
|
201
|
+
return False
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
class Dimensions:
|
|
205
|
+
"""Dimension metadata Handling Class.
|
|
206
|
+
|
|
207
|
+
This class is used to handle and manipulate dimension metadata.
|
|
208
|
+
It provides methods to access and validate dimension information,
|
|
209
|
+
such as shape, axes, and properties like is_2d, is_3d, is_time_series, etc.
|
|
210
|
+
"""
|
|
211
|
+
|
|
212
|
+
require_axes_match = require_axes_match
|
|
213
|
+
check_if_axes_match = check_if_axes_match
|
|
214
|
+
require_dimensions_match = require_dimensions_match
|
|
215
|
+
check_if_dimensions_match = check_if_dimensions_match
|
|
216
|
+
require_rescalable = require_rescalable
|
|
217
|
+
check_if_rescalable = check_if_rescalable
|
|
218
|
+
|
|
219
|
+
def __init__(
|
|
220
|
+
self,
|
|
221
|
+
shape: tuple[int, ...],
|
|
222
|
+
chunks: tuple[int, ...],
|
|
223
|
+
dataset: Dataset,
|
|
224
|
+
) -> None:
|
|
225
|
+
"""Create a Dimension object from a Zarr array.
|
|
226
|
+
|
|
227
|
+
Args:
|
|
228
|
+
shape: The shape of the Zarr array.
|
|
229
|
+
chunks: The chunks of the Zarr array.
|
|
230
|
+
dataset: The dataset object.
|
|
231
|
+
"""
|
|
232
|
+
self._shape = shape
|
|
233
|
+
self._chunks = chunks
|
|
234
|
+
self._axes_handler = dataset.axes_handler
|
|
235
|
+
self._pixel_size = dataset.pixel_size
|
|
236
|
+
|
|
237
|
+
if len(self._shape) != len(self._axes_handler.axes):
|
|
238
|
+
raise NgioValueError(
|
|
239
|
+
"The number of dimensions must match the number of axes. "
|
|
240
|
+
f"Expected Axis {self._axes_handler.axes_names} but got shape "
|
|
241
|
+
f"{self._shape}."
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
def __str__(self) -> str:
|
|
245
|
+
"""Return the string representation of the object."""
|
|
246
|
+
dims = ", ".join(
|
|
247
|
+
f"{ax.name}: {s}"
|
|
248
|
+
for ax, s in zip(self._axes_handler.axes, self._shape, strict=True)
|
|
249
|
+
)
|
|
250
|
+
return f"Dimensions({dims})"
|
|
251
|
+
|
|
252
|
+
def __repr__(self) -> str:
|
|
253
|
+
"""Return the string representation of the object."""
|
|
254
|
+
return str(self)
|
|
255
|
+
|
|
256
|
+
@property
|
|
257
|
+
def axes_handler(self) -> AxesHandler:
|
|
258
|
+
"""Return the axes handler object."""
|
|
259
|
+
return self._axes_handler
|
|
260
|
+
|
|
261
|
+
@property
|
|
262
|
+
def pixel_size(self) -> PixelSize:
|
|
263
|
+
"""Return the pixel size object."""
|
|
264
|
+
return self._pixel_size
|
|
265
|
+
|
|
266
|
+
@property
|
|
267
|
+
def shape(self) -> tuple[int, ...]:
|
|
268
|
+
"""Return the shape as a tuple."""
|
|
269
|
+
return self._shape
|
|
270
|
+
|
|
271
|
+
@property
|
|
272
|
+
def chunks(self) -> tuple[int, ...]:
|
|
273
|
+
"""Return the chunks as a tuple."""
|
|
274
|
+
return self._chunks
|
|
275
|
+
|
|
276
|
+
@property
|
|
277
|
+
def axes(self) -> tuple[str, ...]:
|
|
278
|
+
"""Return the axes as a tuple of strings."""
|
|
279
|
+
return self.axes_handler.axes_names
|
|
280
|
+
|
|
281
|
+
@property
|
|
282
|
+
def is_time_series(self) -> bool:
|
|
283
|
+
"""Return whether the image is a time series."""
|
|
284
|
+
if self.get("t", default=1) == 1:
|
|
285
|
+
return False
|
|
286
|
+
return True
|
|
287
|
+
|
|
288
|
+
@property
|
|
289
|
+
def is_2d(self) -> bool:
|
|
290
|
+
"""Return whether the image is 2D."""
|
|
291
|
+
if self.get("z", default=1) != 1:
|
|
292
|
+
return False
|
|
293
|
+
return True
|
|
294
|
+
|
|
295
|
+
@property
|
|
296
|
+
def is_2d_time_series(self) -> bool:
|
|
297
|
+
"""Return whether the image is a 2D time series."""
|
|
298
|
+
return self.is_2d and self.is_time_series
|
|
299
|
+
|
|
300
|
+
@property
|
|
301
|
+
def is_3d(self) -> bool:
|
|
302
|
+
"""Return whether the image is 3D."""
|
|
303
|
+
return not self.is_2d
|
|
304
|
+
|
|
305
|
+
@property
|
|
306
|
+
def is_3d_time_series(self) -> bool:
|
|
307
|
+
"""Return whether the image is a 3D time series."""
|
|
308
|
+
return self.is_3d and self.is_time_series
|
|
309
|
+
|
|
310
|
+
@property
|
|
311
|
+
def is_multi_channels(self) -> bool:
|
|
312
|
+
"""Return whether the image has multiple channels."""
|
|
313
|
+
if self.get("c", default=1) == 1:
|
|
314
|
+
return False
|
|
315
|
+
return True
|
|
316
|
+
|
|
317
|
+
@overload
|
|
318
|
+
def get(self, axis_name: str, default: None = None) -> int | None:
|
|
319
|
+
pass
|
|
320
|
+
|
|
321
|
+
@overload
|
|
322
|
+
def get(self, axis_name: str, default: int) -> int:
|
|
323
|
+
pass
|
|
324
|
+
|
|
325
|
+
def get(self, axis_name: str, default: int | None = None) -> int | None:
|
|
326
|
+
"""Return the dimension/shape of the given axis name.
|
|
327
|
+
|
|
328
|
+
Args:
|
|
329
|
+
axis_name: The name of the axis (either canonical or non-canonical).
|
|
330
|
+
default: The default value to return if the axis does not exist.
|
|
331
|
+
"""
|
|
332
|
+
index = self.axes_handler.get_index(axis_name)
|
|
333
|
+
if index is None:
|
|
334
|
+
return default
|
|
335
|
+
return self._shape[index]
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
"""Utilities to build masking regions of interest (ROIs)."""
|
|
2
|
+
|
|
3
|
+
import itertools
|
|
4
|
+
from collections.abc import Sequence
|
|
5
|
+
|
|
6
|
+
import dask.array as da
|
|
7
|
+
import numpy as np
|
|
8
|
+
import scipy.ndimage as ndi
|
|
9
|
+
from dask.delayed import delayed
|
|
10
|
+
|
|
11
|
+
from ngio.common._roi import Roi
|
|
12
|
+
from ngio.ome_zarr_meta import PixelSize
|
|
13
|
+
from ngio.utils import NgioValueError
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _compute_offsets(chunks):
|
|
17
|
+
"""Given a chunks tuple, compute cumulative offsets for each axis.
|
|
18
|
+
|
|
19
|
+
Returns a list where each element is a list of offsets for that dimension.
|
|
20
|
+
"""
|
|
21
|
+
offsets = []
|
|
22
|
+
for dim_chunks in chunks:
|
|
23
|
+
dim_offsets = [0]
|
|
24
|
+
for size in dim_chunks:
|
|
25
|
+
dim_offsets.append(dim_offsets[-1] + size)
|
|
26
|
+
offsets.append(dim_offsets)
|
|
27
|
+
return offsets
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _adjust_slices(slices, offset):
|
|
31
|
+
"""Adjust slices to global coordinates using the provided offset."""
|
|
32
|
+
adjusted_slices = {}
|
|
33
|
+
for label, s in slices.items():
|
|
34
|
+
adjusted = tuple(
|
|
35
|
+
slice(s_dim.start + off, s_dim.stop + off)
|
|
36
|
+
for s_dim, off in zip(s, offset, strict=True)
|
|
37
|
+
)
|
|
38
|
+
adjusted_slices[label] = adjusted
|
|
39
|
+
return adjusted_slices
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@delayed
|
|
43
|
+
def _process_chunk(chunk, offset):
|
|
44
|
+
"""Process a single chunk.
|
|
45
|
+
|
|
46
|
+
run ndi.find_objects and adjust the slices
|
|
47
|
+
to global coordinates using the provided offset.
|
|
48
|
+
"""
|
|
49
|
+
local_slices = compute_slices(chunk)
|
|
50
|
+
local_slices = _adjust_slices(local_slices, offset)
|
|
51
|
+
return local_slices
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def _merge_slices(
|
|
55
|
+
slice1: tuple[slice, ...], slice2: tuple[slice, ...]
|
|
56
|
+
) -> tuple[slice, ...]:
|
|
57
|
+
"""Merge two slices."""
|
|
58
|
+
merged = []
|
|
59
|
+
for s1, s2 in zip(slice1, slice2, strict=True):
|
|
60
|
+
start = min(s1.start, s2.start)
|
|
61
|
+
stop = max(s1.stop, s2.stop)
|
|
62
|
+
merged.append(slice(start, stop))
|
|
63
|
+
return tuple(merged)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@delayed
|
|
67
|
+
def _collect_slices(
|
|
68
|
+
local_slices: list[dict[int, tuple[slice, ...]]],
|
|
69
|
+
) -> dict[int, tuple[slice]]:
|
|
70
|
+
"""Collect the slices from the delayed results."""
|
|
71
|
+
global_slices = {}
|
|
72
|
+
for result in local_slices:
|
|
73
|
+
for label, s in result.items():
|
|
74
|
+
if label in global_slices:
|
|
75
|
+
global_slices[label] = _merge_slices(global_slices[label], s)
|
|
76
|
+
else:
|
|
77
|
+
global_slices[label] = s
|
|
78
|
+
return global_slices
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def compute_slices(segmentation: np.ndarray) -> dict[int, tuple[slice, ...]]:
|
|
82
|
+
"""Compute slices for each label in a segmentation.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
segmentation (ndarray): The segmentation array.
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
dict[int, tuple[slice]]: A dictionary with the label as key
|
|
89
|
+
and the slice as value.
|
|
90
|
+
"""
|
|
91
|
+
slices = ndi.find_objects(segmentation)
|
|
92
|
+
slices_dict = {}
|
|
93
|
+
for label, s in enumerate(slices, start=1):
|
|
94
|
+
if s is None:
|
|
95
|
+
continue
|
|
96
|
+
else:
|
|
97
|
+
slices_dict[label] = s
|
|
98
|
+
return slices_dict
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def lazy_compute_slices(segmentation: da.Array) -> dict[int, tuple[slice, ...]]:
|
|
102
|
+
"""Compute slices for each label in a segmentation."""
|
|
103
|
+
global_offsets = _compute_offsets(segmentation.chunks)
|
|
104
|
+
delayed_chunks = segmentation.to_delayed() # type: ignore
|
|
105
|
+
|
|
106
|
+
grid_shape = tuple(len(c) for c in segmentation.chunks)
|
|
107
|
+
|
|
108
|
+
grid_indices = list(itertools.product(*[range(n) for n in grid_shape]))
|
|
109
|
+
delayed_results = []
|
|
110
|
+
for idx, chunk in zip(grid_indices, np.ravel(delayed_chunks), strict=True):
|
|
111
|
+
offset = tuple(global_offsets[dim][idx[dim]] for dim in range(len(idx)))
|
|
112
|
+
delayed_result = _process_chunk(chunk, offset)
|
|
113
|
+
delayed_results.append(delayed_result)
|
|
114
|
+
|
|
115
|
+
return _collect_slices(delayed_results).compute()
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def compute_masking_roi(
|
|
119
|
+
segmentation: np.ndarray | da.Array,
|
|
120
|
+
pixel_size: PixelSize,
|
|
121
|
+
axes_order: Sequence[str],
|
|
122
|
+
) -> list[Roi]:
|
|
123
|
+
"""Compute a ROIs for each label in a segmentation.
|
|
124
|
+
|
|
125
|
+
This function expects a 2D or 3D segmentation array.
|
|
126
|
+
And this function expects the axes order to be 'zyx' or 'yx'.
|
|
127
|
+
Other axes orders are not supported.
|
|
128
|
+
|
|
129
|
+
"""
|
|
130
|
+
if segmentation.ndim not in [2, 3, 4]:
|
|
131
|
+
raise NgioValueError("Only 2D, 3D, and 4D segmentations are supported.")
|
|
132
|
+
|
|
133
|
+
if len(axes_order) != segmentation.ndim:
|
|
134
|
+
raise NgioValueError(
|
|
135
|
+
"The length of axes_order must match the number of dimensions "
|
|
136
|
+
"of the segmentation."
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
if isinstance(segmentation, da.Array):
|
|
140
|
+
slices = lazy_compute_slices(segmentation)
|
|
141
|
+
else:
|
|
142
|
+
slices = compute_slices(segmentation)
|
|
143
|
+
|
|
144
|
+
rois = []
|
|
145
|
+
for label, slice_ in slices.items():
|
|
146
|
+
assert len(slice_) == len(axes_order)
|
|
147
|
+
slices = dict(zip(axes_order, slice_, strict=True))
|
|
148
|
+
roi = Roi.from_values(
|
|
149
|
+
name=str(label), slices=slices, label=label, space="pixel"
|
|
150
|
+
)
|
|
151
|
+
roi = roi.to_world(pixel_size=pixel_size)
|
|
152
|
+
rois.append(roi)
|
|
153
|
+
return rois
|