ngio 0.3.4__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ngio/__init__.py +7 -2
- ngio/common/__init__.py +5 -52
- ngio/common/_dimensions.py +270 -55
- ngio/common/_masking_roi.py +38 -10
- ngio/common/_pyramid.py +51 -30
- ngio/common/_roi.py +269 -82
- ngio/common/_synt_images_utils.py +101 -0
- ngio/common/_zoom.py +49 -19
- ngio/experimental/__init__.py +5 -0
- ngio/experimental/iterators/__init__.py +15 -0
- ngio/experimental/iterators/_abstract_iterator.py +390 -0
- ngio/experimental/iterators/_feature.py +189 -0
- ngio/experimental/iterators/_image_processing.py +130 -0
- ngio/experimental/iterators/_mappers.py +48 -0
- ngio/experimental/iterators/_rois_utils.py +127 -0
- ngio/experimental/iterators/_segmentation.py +235 -0
- ngio/hcs/_plate.py +41 -36
- ngio/images/__init__.py +22 -1
- ngio/images/_abstract_image.py +403 -176
- ngio/images/_create.py +31 -15
- ngio/images/_create_synt_container.py +138 -0
- ngio/images/_image.py +452 -63
- ngio/images/_label.py +56 -30
- ngio/images/_masked_image.py +387 -129
- ngio/images/_ome_zarr_container.py +237 -67
- ngio/{common → images}/_table_ops.py +41 -41
- ngio/io_pipes/__init__.py +75 -0
- ngio/io_pipes/_io_pipes.py +361 -0
- ngio/io_pipes/_io_pipes_masked.py +488 -0
- ngio/io_pipes/_io_pipes_roi.py +152 -0
- ngio/io_pipes/_io_pipes_types.py +56 -0
- ngio/io_pipes/_match_shape.py +376 -0
- ngio/io_pipes/_ops_axes.py +344 -0
- ngio/io_pipes/_ops_slices.py +446 -0
- ngio/io_pipes/_ops_slices_utils.py +196 -0
- ngio/io_pipes/_ops_transforms.py +104 -0
- ngio/io_pipes/_zoom_transform.py +175 -0
- ngio/ome_zarr_meta/__init__.py +4 -2
- ngio/ome_zarr_meta/ngio_specs/__init__.py +4 -10
- ngio/ome_zarr_meta/ngio_specs/_axes.py +186 -175
- ngio/ome_zarr_meta/ngio_specs/_channels.py +55 -18
- ngio/ome_zarr_meta/ngio_specs/_dataset.py +48 -122
- ngio/ome_zarr_meta/ngio_specs/_ngio_hcs.py +6 -15
- ngio/ome_zarr_meta/ngio_specs/_ngio_image.py +38 -87
- ngio/ome_zarr_meta/ngio_specs/_pixel_size.py +17 -1
- ngio/ome_zarr_meta/v04/_v04_spec_utils.py +34 -31
- ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/mask.png +0 -0
- ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/nuclei.png +0 -0
- ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/raw.jpg +0 -0
- ngio/resources/__init__.py +55 -0
- ngio/resources/resource_model.py +36 -0
- ngio/tables/backends/_abstract_backend.py +5 -6
- ngio/tables/backends/_anndata.py +1 -2
- ngio/tables/backends/_anndata_utils.py +3 -3
- ngio/tables/backends/_non_zarr_backends.py +1 -1
- ngio/tables/backends/_table_backends.py +0 -1
- ngio/tables/backends/_utils.py +3 -3
- ngio/tables/v1/_roi_table.py +165 -70
- ngio/transforms/__init__.py +5 -0
- ngio/transforms/_zoom.py +19 -0
- ngio/utils/__init__.py +2 -3
- ngio/utils/_datasets.py +5 -0
- ngio/utils/_logger.py +19 -0
- ngio/utils/_zarr_utils.py +6 -6
- {ngio-0.3.4.dist-info → ngio-0.4.0.dist-info}/METADATA +24 -22
- ngio-0.4.0.dist-info/RECORD +85 -0
- ngio/common/_array_pipe.py +0 -288
- ngio/common/_axes_transforms.py +0 -64
- ngio/common/_common_types.py +0 -5
- ngio/common/_slicer.py +0 -96
- ngio-0.3.4.dist-info/RECORD +0 -61
- {ngio-0.3.4.dist-info → ngio-0.4.0.dist-info}/WHEEL +0 -0
- {ngio-0.3.4.dist-info → ngio-0.4.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
from math import ceil
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def _center_crop(arr: np.ndarray, target: int, axis: int) -> np.ndarray:
|
|
7
|
+
# Center-crop the array `arr` along dimension `axis` to size `target`.
|
|
8
|
+
# This assumes target < arr.shape[axis].
|
|
9
|
+
n = arr.shape[axis]
|
|
10
|
+
start = (n - target) // 2
|
|
11
|
+
end = start + target
|
|
12
|
+
slc = [slice(None)] * arr.ndim
|
|
13
|
+
slc[axis] = slice(start, end)
|
|
14
|
+
return arr[tuple(slc)]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def _tile_to(
|
|
18
|
+
arr: np.ndarray, target: int, axis: int, label_mode: bool = False
|
|
19
|
+
) -> np.ndarray:
|
|
20
|
+
# Tile the array `arr` along dimension `axis` to size `target`.
|
|
21
|
+
# This assumes target > arr.shape[axis].
|
|
22
|
+
n = arr.shape[axis]
|
|
23
|
+
reps = ceil(target / n)
|
|
24
|
+
|
|
25
|
+
tiles = []
|
|
26
|
+
flip = False
|
|
27
|
+
max_label = 0
|
|
28
|
+
for _ in range(reps):
|
|
29
|
+
if flip:
|
|
30
|
+
t_arr = np.flip(arr, axis=axis)
|
|
31
|
+
else:
|
|
32
|
+
t_arr = 1 * arr
|
|
33
|
+
if label_mode:
|
|
34
|
+
# Remove duplicate labels
|
|
35
|
+
t_arr = np.where(t_arr > 0, t_arr + max_label, 0)
|
|
36
|
+
max_label = t_arr.max()
|
|
37
|
+
tiles.append(t_arr)
|
|
38
|
+
flip = not flip
|
|
39
|
+
|
|
40
|
+
tiled = np.concatenate(tiles, axis=axis)
|
|
41
|
+
|
|
42
|
+
slc = [slice(None)] * arr.ndim
|
|
43
|
+
slc[axis] = slice(0, target)
|
|
44
|
+
return tiled[tuple(slc)]
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _fit_to_shape_2d(
|
|
48
|
+
src: np.ndarray, out_shape: tuple[int, int], label_mode: bool = False
|
|
49
|
+
) -> np.ndarray:
|
|
50
|
+
"""Fit a 2D array to a target shape by center-cropping or tiling as necessary."""
|
|
51
|
+
out_r, out_c = out_shape
|
|
52
|
+
arr = src
|
|
53
|
+
if out_r < arr.shape[0]:
|
|
54
|
+
arr = _center_crop(arr, out_r, axis=0)
|
|
55
|
+
else:
|
|
56
|
+
arr = _tile_to(arr, out_r, axis=0, label_mode=label_mode)
|
|
57
|
+
|
|
58
|
+
if out_c < arr.shape[1]:
|
|
59
|
+
arr = _center_crop(arr, out_c, axis=1)
|
|
60
|
+
else:
|
|
61
|
+
arr = _tile_to(arr, out_c, axis=1, label_mode=label_mode)
|
|
62
|
+
return arr
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def fit_to_shape(
|
|
66
|
+
arr: np.ndarray, out_shape: tuple[int, ...], ensure_unique_info: bool = False
|
|
67
|
+
) -> np.ndarray:
|
|
68
|
+
"""Fit a 2D array to a target shape.
|
|
69
|
+
|
|
70
|
+
The x,y dimensions of `arr` are fitted to the last two dimensions of
|
|
71
|
+
`out_shape` by center-cropping or tiling as necessary.
|
|
72
|
+
The other dimensions are broadcasted as necessary.
|
|
73
|
+
|
|
74
|
+
WARNING: This does not zoom the image, it only crops or tiles it.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
arr (np.ndarray): The input 2D array.
|
|
78
|
+
out_shape (tuple[int, ...]): The target shape. Must have at least 2
|
|
79
|
+
and at most 5 dimensions.
|
|
80
|
+
ensure_unique_info (bool, optional): If True, assumes that `arr` is a label
|
|
81
|
+
image and ensures that labels do not overlap when tiling. Defaults to False.
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
np.ndarray: The fitted array with shape `out_shape`.
|
|
85
|
+
"""
|
|
86
|
+
if len(out_shape) < 2:
|
|
87
|
+
raise ValueError("`out_shape` must contain at least 2 dimensions.")
|
|
88
|
+
|
|
89
|
+
if len(out_shape) > 5:
|
|
90
|
+
raise ValueError("`out_shape` must contain at most 5 dimensions.")
|
|
91
|
+
|
|
92
|
+
if any(d <= 0 for d in out_shape):
|
|
93
|
+
raise ValueError("`out_shape` must contain positive integers.")
|
|
94
|
+
|
|
95
|
+
if arr.ndim != 2:
|
|
96
|
+
raise ValueError("`arr` must be a 2D array.")
|
|
97
|
+
|
|
98
|
+
*_, sy, sx = out_shape
|
|
99
|
+
arr = _fit_to_shape_2d(arr, out_shape=(sy, sx), label_mode=ensure_unique_info)
|
|
100
|
+
arr = np.broadcast_to(arr, out_shape)
|
|
101
|
+
return arr
|
ngio/common/_zoom.py
CHANGED
|
@@ -7,17 +7,29 @@ from scipy.ndimage import zoom as scipy_zoom
|
|
|
7
7
|
|
|
8
8
|
from ngio.utils import NgioValueError
|
|
9
9
|
|
|
10
|
+
InterpolationOrder = Literal["nearest", "linear", "cubic"]
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def order_to_int(order: InterpolationOrder | Literal[0, 1, 2]) -> Literal[0, 1, 2]:
|
|
14
|
+
if order == "nearest" or order == 0:
|
|
15
|
+
return 0
|
|
16
|
+
elif order == "linear" or order == 1:
|
|
17
|
+
return 1
|
|
18
|
+
elif order == "cubic" or order == 2:
|
|
19
|
+
return 2
|
|
20
|
+
else:
|
|
21
|
+
raise NgioValueError(f"Invalid order: {order}")
|
|
22
|
+
|
|
10
23
|
|
|
11
24
|
def _stacked_zoom(x, zoom_y, zoom_x, order=1, mode="grid-constant", grid_mode=True):
|
|
12
25
|
*rest, yshape, xshape = x.shape
|
|
13
26
|
x = x.reshape(-1, yshape, xshape)
|
|
14
27
|
scale_xy = (zoom_y, zoom_x)
|
|
15
|
-
|
|
16
|
-
[
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
)
|
|
28
|
+
_x_out = [
|
|
29
|
+
scipy_zoom(x[i], scale_xy, order=order, mode=mode, grid_mode=grid_mode)
|
|
30
|
+
for i in range(x.shape[0])
|
|
31
|
+
]
|
|
32
|
+
x_out = np.stack(_x_out) # type: ignore (scipy_zoom returns np.ndarray, but type is not inferred correctly)
|
|
21
33
|
return x_out.reshape(*rest, *x_out.shape[1:])
|
|
22
34
|
|
|
23
35
|
|
|
@@ -33,6 +45,10 @@ def fast_zoom(x, zoom, order=1, mode="grid-constant", grid_mode=True, auto_stack
|
|
|
33
45
|
it stacks the first dimensions to call zoom only on the last two.
|
|
34
46
|
"""
|
|
35
47
|
mask = np.isclose(x.shape, 1)
|
|
48
|
+
# Always keep the last two dimensions
|
|
49
|
+
# To avoid issues with singleton x or y dimensions
|
|
50
|
+
mask[-1] = False
|
|
51
|
+
mask[-2] = False
|
|
36
52
|
zoom = np.array(zoom)
|
|
37
53
|
singletons = tuple(np.where(mask)[0])
|
|
38
54
|
xs = np.squeeze(x, axis=singletons)
|
|
@@ -45,13 +61,13 @@ def fast_zoom(x, zoom, order=1, mode="grid-constant", grid_mode=True, auto_stack
|
|
|
45
61
|
)
|
|
46
62
|
else:
|
|
47
63
|
xs = scipy_zoom(xs, new_zoom, order=order, mode=mode, grid_mode=grid_mode)
|
|
48
|
-
x = np.expand_dims(xs, axis=singletons)
|
|
64
|
+
x = np.expand_dims(xs, axis=singletons) # type: ignore (scipy_zoom returns np.ndarray, but type is not inferred correctly)
|
|
49
65
|
return x
|
|
50
66
|
|
|
51
67
|
|
|
52
68
|
def _zoom_inputs_check(
|
|
53
69
|
source_array: np.ndarray | da.Array,
|
|
54
|
-
scale: tuple[int, ...] | None = None,
|
|
70
|
+
scale: tuple[int | float, ...] | None = None,
|
|
55
71
|
target_shape: tuple[int, ...] | None = None,
|
|
56
72
|
) -> tuple[np.ndarray, tuple[int, ...]]:
|
|
57
73
|
if scale is None and target_shape is None:
|
|
@@ -72,16 +88,22 @@ def _zoom_inputs_check(
|
|
|
72
88
|
_target_shape = target_shape
|
|
73
89
|
else:
|
|
74
90
|
_scale = np.array(scale)
|
|
75
|
-
_target_shape = tuple(np.array(source_array.shape) * scale)
|
|
91
|
+
_target_shape = tuple(map(int, np.round(np.array(source_array.shape) * scale)))
|
|
92
|
+
|
|
93
|
+
if len(_scale) != source_array.ndim:
|
|
94
|
+
raise NgioValueError(
|
|
95
|
+
f"Cannot scale array of shape {source_array.shape} with factors {_scale}."
|
|
96
|
+
" Target shape must have the same number of dimensions as the source array."
|
|
97
|
+
)
|
|
76
98
|
|
|
77
99
|
return _scale, _target_shape
|
|
78
100
|
|
|
79
101
|
|
|
80
102
|
def dask_zoom(
|
|
81
103
|
source_array: da.Array,
|
|
82
|
-
scale: tuple[int, ...] | None = None,
|
|
104
|
+
scale: tuple[float | int, ...] | None = None,
|
|
83
105
|
target_shape: tuple[int, ...] | None = None,
|
|
84
|
-
order:
|
|
106
|
+
order: InterpolationOrder = "linear",
|
|
85
107
|
) -> da.Array:
|
|
86
108
|
"""Dask implementation of zooming an array.
|
|
87
109
|
|
|
@@ -91,7 +113,8 @@ def dask_zoom(
|
|
|
91
113
|
source_array (da.Array): The source array to zoom.
|
|
92
114
|
scale (tuple[int, ...] | None): The scale factor to zoom by.
|
|
93
115
|
target_shape (tuple[int, ...], None): The target shape to zoom to.
|
|
94
|
-
order (Literal[
|
|
116
|
+
order (Literal["nearest", "linear", "cubic"]): The order of interpolation.
|
|
117
|
+
Defaults to "linear".
|
|
95
118
|
|
|
96
119
|
Returns:
|
|
97
120
|
da.Array: The zoomed array.
|
|
@@ -100,22 +123,25 @@ def dask_zoom(
|
|
|
100
123
|
# https://github.com/ome/ome-zarr-py/blob/master/ome_zarr/dask_utils.py
|
|
101
124
|
# The module was contributed by Andreas Eisenbarth @aeisenbarth
|
|
102
125
|
# See https://github.com/toloudis/ome-zarr-py/pull/
|
|
103
|
-
|
|
104
126
|
_scale, _target_shape = _zoom_inputs_check(
|
|
105
127
|
source_array=source_array, scale=scale, target_shape=target_shape
|
|
106
128
|
)
|
|
107
129
|
|
|
108
130
|
# Rechunk to better match the scaling operation
|
|
109
|
-
source_chunks = np.array(source_array.chunksize)
|
|
131
|
+
source_chunks = np.array(source_array.chunksize) # type: ignore (da.Array.chunksize is a tuple of ints)
|
|
110
132
|
better_source_chunks = np.maximum(1, np.round(source_chunks * _scale) / _scale)
|
|
111
133
|
better_source_chunks = better_source_chunks.astype(int)
|
|
112
|
-
source_array = source_array.rechunk(better_source_chunks) # type: ignore
|
|
134
|
+
source_array = source_array.rechunk(better_source_chunks) # type: ignore (better_source_chunks is a valid input for rechunk)
|
|
113
135
|
|
|
114
136
|
# Calculate the block output shape
|
|
115
137
|
block_output_shape = tuple(np.ceil(better_source_chunks * _scale).astype(int))
|
|
116
138
|
|
|
117
139
|
zoom_wrapper = partial(
|
|
118
|
-
fast_zoom,
|
|
140
|
+
fast_zoom,
|
|
141
|
+
zoom=_scale,
|
|
142
|
+
order=order_to_int(order),
|
|
143
|
+
mode="grid-constant",
|
|
144
|
+
grid_mode=True,
|
|
119
145
|
)
|
|
120
146
|
|
|
121
147
|
out_array = da.map_blocks(
|
|
@@ -130,9 +156,9 @@ def dask_zoom(
|
|
|
130
156
|
|
|
131
157
|
def numpy_zoom(
|
|
132
158
|
source_array: np.ndarray,
|
|
133
|
-
scale: tuple[int, ...] | None = None,
|
|
159
|
+
scale: tuple[int | float, ...] | None = None,
|
|
134
160
|
target_shape: tuple[int, ...] | None = None,
|
|
135
|
-
order:
|
|
161
|
+
order: InterpolationOrder = "linear",
|
|
136
162
|
) -> np.ndarray:
|
|
137
163
|
"""Numpy implementation of zooming an array.
|
|
138
164
|
|
|
@@ -152,7 +178,11 @@ def numpy_zoom(
|
|
|
152
178
|
)
|
|
153
179
|
|
|
154
180
|
out_array = fast_zoom(
|
|
155
|
-
source_array,
|
|
181
|
+
source_array,
|
|
182
|
+
zoom=_scale,
|
|
183
|
+
order=order_to_int(order),
|
|
184
|
+
mode="grid-constant",
|
|
185
|
+
grid_mode=True,
|
|
156
186
|
)
|
|
157
187
|
assert isinstance(out_array, np.ndarray)
|
|
158
188
|
return out_array
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""This file is part of NGIO, a library for working with OME-Zarr data."""
|
|
2
|
+
|
|
3
|
+
from ngio.experimental.iterators._feature import FeatureExtractorIterator
|
|
4
|
+
from ngio.experimental.iterators._image_processing import ImageProcessingIterator
|
|
5
|
+
from ngio.experimental.iterators._segmentation import (
|
|
6
|
+
MaskedSegmentationIterator,
|
|
7
|
+
SegmentationIterator,
|
|
8
|
+
)
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"FeatureExtractorIterator",
|
|
12
|
+
"ImageProcessingIterator",
|
|
13
|
+
"MaskedSegmentationIterator",
|
|
14
|
+
"SegmentationIterator",
|
|
15
|
+
]
|
|
@@ -0,0 +1,390 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from collections.abc import Callable, Generator
|
|
3
|
+
from typing import Generic, Literal, Self, TypeVar, overload
|
|
4
|
+
|
|
5
|
+
from ngio import Roi
|
|
6
|
+
from ngio.experimental.iterators._mappers import BasicMapper, MapperProtocol
|
|
7
|
+
from ngio.experimental.iterators._rois_utils import (
|
|
8
|
+
by_chunks,
|
|
9
|
+
by_yx,
|
|
10
|
+
by_zyx,
|
|
11
|
+
grid,
|
|
12
|
+
rois_product,
|
|
13
|
+
)
|
|
14
|
+
from ngio.images._abstract_image import AbstractImage
|
|
15
|
+
from ngio.io_pipes._io_pipes_types import DataGetterProtocol, DataSetterProtocol
|
|
16
|
+
from ngio.io_pipes._ops_slices_utils import check_if_regions_overlap
|
|
17
|
+
from ngio.tables import GenericRoiTable
|
|
18
|
+
from ngio.utils import NgioValueError
|
|
19
|
+
|
|
20
|
+
NumpyPipeType = TypeVar("NumpyPipeType")
|
|
21
|
+
DaskPipeType = TypeVar("DaskPipeType")
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class AbstractIteratorBuilder(ABC, Generic[NumpyPipeType, DaskPipeType]):
|
|
25
|
+
"""Base class for building iterators over ROIs."""
|
|
26
|
+
|
|
27
|
+
_rois: list[Roi]
|
|
28
|
+
_ref_image: AbstractImage
|
|
29
|
+
|
|
30
|
+
def __repr__(self) -> str:
|
|
31
|
+
return f"{self.__class__.__name__}(regions={len(self._rois)})"
|
|
32
|
+
|
|
33
|
+
@abstractmethod
|
|
34
|
+
def get_init_kwargs(self) -> dict:
|
|
35
|
+
"""Return the initialization arguments for the iterator.
|
|
36
|
+
|
|
37
|
+
This is used to clone the iterator with the same parameters
|
|
38
|
+
after every "product" operation.
|
|
39
|
+
"""
|
|
40
|
+
pass
|
|
41
|
+
|
|
42
|
+
@property
|
|
43
|
+
def rois(self) -> list[Roi]:
|
|
44
|
+
"""Get the list of ROIs for the iterator."""
|
|
45
|
+
return self._rois
|
|
46
|
+
|
|
47
|
+
def _set_rois(self, rois: list[Roi]) -> None:
|
|
48
|
+
"""Set the list of ROIs for the iterator."""
|
|
49
|
+
self._rois = rois
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def ref_image(self) -> AbstractImage:
|
|
53
|
+
"""Get the reference image for the iterator."""
|
|
54
|
+
return self._ref_image
|
|
55
|
+
|
|
56
|
+
def _new_from_rois(self, rois: list[Roi]) -> Self:
|
|
57
|
+
"""Create a new instance of the iterator with a different set of ROIs."""
|
|
58
|
+
init_kwargs = self.get_init_kwargs()
|
|
59
|
+
new_instance = self.__class__(**init_kwargs)
|
|
60
|
+
new_instance._set_rois(rois)
|
|
61
|
+
return new_instance
|
|
62
|
+
|
|
63
|
+
def grid(
|
|
64
|
+
self,
|
|
65
|
+
size_x: int | None = None,
|
|
66
|
+
size_y: int | None = None,
|
|
67
|
+
size_z: int | None = None,
|
|
68
|
+
size_t: int | None = None,
|
|
69
|
+
stride_x: int | None = None,
|
|
70
|
+
stride_y: int | None = None,
|
|
71
|
+
stride_z: int | None = None,
|
|
72
|
+
stride_t: int | None = None,
|
|
73
|
+
base_name: str = "",
|
|
74
|
+
) -> Self:
|
|
75
|
+
"""Create a grid of ROIs based on the input image dimensions."""
|
|
76
|
+
rois = grid(
|
|
77
|
+
rois=self.rois,
|
|
78
|
+
ref_image=self.ref_image,
|
|
79
|
+
size_x=size_x,
|
|
80
|
+
size_y=size_y,
|
|
81
|
+
size_z=size_z,
|
|
82
|
+
size_t=size_t,
|
|
83
|
+
stride_x=stride_x,
|
|
84
|
+
stride_y=stride_y,
|
|
85
|
+
stride_z=stride_z,
|
|
86
|
+
stride_t=stride_t,
|
|
87
|
+
base_name=base_name,
|
|
88
|
+
)
|
|
89
|
+
return self._new_from_rois(rois)
|
|
90
|
+
|
|
91
|
+
def by_yx(self) -> Self:
|
|
92
|
+
"""Return a new iterator that iterates over ROIs by YX coordinates."""
|
|
93
|
+
rois = by_yx(self.rois, self.ref_image)
|
|
94
|
+
return self._new_from_rois(rois)
|
|
95
|
+
|
|
96
|
+
def by_zyx(self, strict: bool = True) -> Self:
|
|
97
|
+
"""Return a new iterator that iterates over ROIs by ZYX coordinates.
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
strict (bool): If True, only iterate over ZYX if a Z axis
|
|
101
|
+
is present and not of size 1.
|
|
102
|
+
|
|
103
|
+
"""
|
|
104
|
+
rois = by_zyx(self.rois, self.ref_image, strict=strict)
|
|
105
|
+
return self._new_from_rois(rois)
|
|
106
|
+
|
|
107
|
+
def by_chunks(self, overlap_xy: int = 0, overlap_z: int = 0) -> Self:
|
|
108
|
+
"""Return a new iterator that iterates over ROIs by chunks.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
overlap_xy (int): Overlap in XY dimensions.
|
|
112
|
+
overlap_z (int): Overlap in Z dimension.
|
|
113
|
+
|
|
114
|
+
Returns:
|
|
115
|
+
SegmentationIterator: A new iterator with chunked ROIs.
|
|
116
|
+
"""
|
|
117
|
+
rois = by_chunks(
|
|
118
|
+
self.rois, self.ref_image, overlap_xy=overlap_xy, overlap_z=overlap_z
|
|
119
|
+
)
|
|
120
|
+
return self._new_from_rois(rois)
|
|
121
|
+
|
|
122
|
+
def product(self, other: list[Roi] | GenericRoiTable) -> Self:
|
|
123
|
+
"""Cartesian product of the current ROIs with an arbitrary list of ROIs."""
|
|
124
|
+
if isinstance(other, GenericRoiTable):
|
|
125
|
+
other = other.rois()
|
|
126
|
+
rois = rois_product(self.rois, other)
|
|
127
|
+
return self._new_from_rois(rois)
|
|
128
|
+
|
|
129
|
+
@abstractmethod
|
|
130
|
+
def build_numpy_getter(self, roi: Roi) -> DataGetterProtocol[NumpyPipeType]:
|
|
131
|
+
"""Build a getter function for the given ROI."""
|
|
132
|
+
raise NotImplementedError
|
|
133
|
+
|
|
134
|
+
@abstractmethod
|
|
135
|
+
def build_numpy_setter(self, roi: Roi) -> DataSetterProtocol[NumpyPipeType] | None:
|
|
136
|
+
"""Build a setter function for the given ROI."""
|
|
137
|
+
raise NotImplementedError
|
|
138
|
+
|
|
139
|
+
@abstractmethod
|
|
140
|
+
def build_dask_getter(self, roi: Roi) -> DataGetterProtocol[DaskPipeType]:
|
|
141
|
+
"""Build a Dask reader function for the given ROI."""
|
|
142
|
+
raise NotImplementedError
|
|
143
|
+
|
|
144
|
+
@abstractmethod
|
|
145
|
+
def build_dask_setter(self, roi: Roi) -> DataSetterProtocol[DaskPipeType] | None:
|
|
146
|
+
"""Build a Dask setter function for the given ROI."""
|
|
147
|
+
raise NotImplementedError
|
|
148
|
+
|
|
149
|
+
@abstractmethod
|
|
150
|
+
def post_consolidate(self) -> None:
|
|
151
|
+
"""Post-process the consolidated data."""
|
|
152
|
+
raise NotImplementedError
|
|
153
|
+
|
|
154
|
+
def _numpy_getters_generator(self) -> Generator[DataGetterProtocol[NumpyPipeType]]:
|
|
155
|
+
"""Return a list of numpy getter functions for all ROIs."""
|
|
156
|
+
yield from (self.build_numpy_getter(roi) for roi in self.rois)
|
|
157
|
+
|
|
158
|
+
def _dask_getters_generator(self) -> Generator[DataGetterProtocol[DaskPipeType]]:
|
|
159
|
+
"""Return a list of dask getter functions for all ROIs."""
|
|
160
|
+
yield from (self.build_dask_getter(roi) for roi in self.rois)
|
|
161
|
+
|
|
162
|
+
def _numpy_setters_generator(
|
|
163
|
+
self,
|
|
164
|
+
) -> Generator[DataSetterProtocol[NumpyPipeType] | None]:
|
|
165
|
+
"""Return a list of numpy setter functions for all ROIs."""
|
|
166
|
+
yield from (self.build_numpy_setter(roi) for roi in self.rois)
|
|
167
|
+
|
|
168
|
+
def _dask_setters_generator(
|
|
169
|
+
self,
|
|
170
|
+
) -> Generator[DataSetterProtocol[DaskPipeType] | None]:
|
|
171
|
+
"""Return a list of dask setter functions for all ROIs."""
|
|
172
|
+
yield from (self.build_dask_setter(roi) for roi in self.rois)
|
|
173
|
+
|
|
174
|
+
def _read_and_write_generator(
|
|
175
|
+
self,
|
|
176
|
+
getters: Generator[
|
|
177
|
+
DataGetterProtocol[NumpyPipeType] | DataGetterProtocol[DaskPipeType]
|
|
178
|
+
],
|
|
179
|
+
setters: Generator[
|
|
180
|
+
DataSetterProtocol[NumpyPipeType] | DataSetterProtocol[DaskPipeType] | None
|
|
181
|
+
],
|
|
182
|
+
) -> Generator[
|
|
183
|
+
tuple[
|
|
184
|
+
DataGetterProtocol[NumpyPipeType] | DataGetterProtocol[DaskPipeType],
|
|
185
|
+
DataSetterProtocol[NumpyPipeType] | DataSetterProtocol[DaskPipeType],
|
|
186
|
+
]
|
|
187
|
+
]:
|
|
188
|
+
"""Create an iterator over the pixels of the ROIs."""
|
|
189
|
+
for getter, setter in zip(getters, setters, strict=True):
|
|
190
|
+
if setter is None:
|
|
191
|
+
name = self.__class__.__name__
|
|
192
|
+
raise NgioValueError(f"Iterator is read-only: {name}")
|
|
193
|
+
yield getter, setter
|
|
194
|
+
self.post_consolidate()
|
|
195
|
+
|
|
196
|
+
@overload
|
|
197
|
+
def iter(
|
|
198
|
+
self,
|
|
199
|
+
lazy: Literal[True],
|
|
200
|
+
data_mode: Literal["numpy"],
|
|
201
|
+
iterator_mode: Literal["readwrite"],
|
|
202
|
+
) -> Generator[
|
|
203
|
+
tuple[DataGetterProtocol[NumpyPipeType], DataSetterProtocol[NumpyPipeType]]
|
|
204
|
+
]: ...
|
|
205
|
+
|
|
206
|
+
@overload
|
|
207
|
+
def iter(
|
|
208
|
+
self,
|
|
209
|
+
lazy: Literal[True],
|
|
210
|
+
data_mode: Literal["numpy"],
|
|
211
|
+
iterator_mode: Literal["readonly"] = ...,
|
|
212
|
+
) -> Generator[DataGetterProtocol[NumpyPipeType]]: ...
|
|
213
|
+
|
|
214
|
+
@overload
|
|
215
|
+
def iter(
|
|
216
|
+
self,
|
|
217
|
+
lazy: Literal[True],
|
|
218
|
+
data_mode: Literal["dask"],
|
|
219
|
+
iterator_mode: Literal["readwrite"],
|
|
220
|
+
) -> Generator[
|
|
221
|
+
tuple[DataGetterProtocol[DaskPipeType], DataSetterProtocol[DaskPipeType]]
|
|
222
|
+
]: ...
|
|
223
|
+
|
|
224
|
+
@overload
|
|
225
|
+
def iter(
|
|
226
|
+
self,
|
|
227
|
+
lazy: Literal[True],
|
|
228
|
+
data_mode: Literal["dask"],
|
|
229
|
+
iterator_mode: Literal["readonly"] = ...,
|
|
230
|
+
) -> Generator[DataGetterProtocol[DaskPipeType]]: ...
|
|
231
|
+
|
|
232
|
+
@overload
|
|
233
|
+
def iter(
|
|
234
|
+
self,
|
|
235
|
+
lazy: Literal[False],
|
|
236
|
+
data_mode: Literal["numpy"],
|
|
237
|
+
iterator_mode: Literal["readwrite"],
|
|
238
|
+
) -> Generator[tuple[NumpyPipeType, DataSetterProtocol[NumpyPipeType]]]: ...
|
|
239
|
+
|
|
240
|
+
@overload
|
|
241
|
+
def iter(
|
|
242
|
+
self,
|
|
243
|
+
lazy: Literal[False],
|
|
244
|
+
data_mode: Literal["numpy"],
|
|
245
|
+
iterator_mode: Literal["readonly"] = ...,
|
|
246
|
+
) -> Generator[NumpyPipeType]: ...
|
|
247
|
+
|
|
248
|
+
@overload
|
|
249
|
+
def iter(
|
|
250
|
+
self,
|
|
251
|
+
lazy: Literal[False],
|
|
252
|
+
data_mode: Literal["dask"],
|
|
253
|
+
iterator_mode: Literal["readwrite"],
|
|
254
|
+
) -> Generator[tuple[DaskPipeType, DataSetterProtocol[DaskPipeType]]]: ...
|
|
255
|
+
|
|
256
|
+
@overload
|
|
257
|
+
def iter(
|
|
258
|
+
self,
|
|
259
|
+
lazy: Literal[False],
|
|
260
|
+
data_mode: Literal["dask"],
|
|
261
|
+
iterator_mode: Literal["readonly"] = ...,
|
|
262
|
+
) -> Generator[DaskPipeType]: ...
|
|
263
|
+
|
|
264
|
+
def iter(
|
|
265
|
+
self,
|
|
266
|
+
lazy: bool = False,
|
|
267
|
+
data_mode: Literal["numpy", "dask"] = "dask",
|
|
268
|
+
iterator_mode: Literal["readwrite", "readonly"] = "readwrite",
|
|
269
|
+
) -> Generator:
|
|
270
|
+
"""Create an iterator over the pixels of the ROIs."""
|
|
271
|
+
if data_mode == "numpy":
|
|
272
|
+
getters = self._numpy_getters_generator()
|
|
273
|
+
setters = self._numpy_setters_generator()
|
|
274
|
+
elif data_mode == "dask":
|
|
275
|
+
getters = self._dask_getters_generator()
|
|
276
|
+
setters = self._dask_setters_generator()
|
|
277
|
+
else:
|
|
278
|
+
raise NgioValueError(f"Invalid mode: {data_mode}")
|
|
279
|
+
|
|
280
|
+
if iterator_mode == "readonly":
|
|
281
|
+
if lazy:
|
|
282
|
+
return getters
|
|
283
|
+
else:
|
|
284
|
+
return (getter() for getter in getters)
|
|
285
|
+
if lazy:
|
|
286
|
+
return self._read_and_write_generator(getters, setters)
|
|
287
|
+
else:
|
|
288
|
+
gen = self._read_and_write_generator(getters, setters)
|
|
289
|
+
return ((getter(), setter) for getter, setter in gen)
|
|
290
|
+
|
|
291
|
+
def iter_as_numpy(
|
|
292
|
+
self,
|
|
293
|
+
):
|
|
294
|
+
"""Create an iterator over the pixels of the ROIs."""
|
|
295
|
+
return self.iter(lazy=False, data_mode="numpy", iterator_mode="readwrite")
|
|
296
|
+
|
|
297
|
+
def iter_as_dask(
|
|
298
|
+
self,
|
|
299
|
+
):
|
|
300
|
+
"""Create an iterator over the pixels of the ROIs."""
|
|
301
|
+
return self.iter(lazy=False, data_mode="dask", iterator_mode="readwrite")
|
|
302
|
+
|
|
303
|
+
def map_as_numpy(
|
|
304
|
+
self,
|
|
305
|
+
func: Callable[[NumpyPipeType], NumpyPipeType],
|
|
306
|
+
mapper: MapperProtocol[NumpyPipeType] | None = None,
|
|
307
|
+
) -> None:
|
|
308
|
+
"""Apply a transformation function to the ROI pixels."""
|
|
309
|
+
if mapper is None:
|
|
310
|
+
_mapper = BasicMapper[NumpyPipeType]()
|
|
311
|
+
else:
|
|
312
|
+
_mapper = mapper
|
|
313
|
+
|
|
314
|
+
_mapper(
|
|
315
|
+
func=func,
|
|
316
|
+
getters=self._numpy_getters_generator(),
|
|
317
|
+
setters=self._numpy_setters_generator(),
|
|
318
|
+
)
|
|
319
|
+
self.post_consolidate()
|
|
320
|
+
|
|
321
|
+
def map_as_dask(
|
|
322
|
+
self,
|
|
323
|
+
func: Callable[[DaskPipeType], DaskPipeType],
|
|
324
|
+
mapper: MapperProtocol[DaskPipeType] | None = None,
|
|
325
|
+
) -> None:
|
|
326
|
+
"""Apply a transformation function to the ROI pixels."""
|
|
327
|
+
if mapper is None:
|
|
328
|
+
_mapper = BasicMapper[DaskPipeType]()
|
|
329
|
+
else:
|
|
330
|
+
_mapper = mapper
|
|
331
|
+
|
|
332
|
+
_mapper(
|
|
333
|
+
func=func,
|
|
334
|
+
getters=self._dask_getters_generator(),
|
|
335
|
+
setters=self._dask_setters_generator(),
|
|
336
|
+
)
|
|
337
|
+
self.post_consolidate()
|
|
338
|
+
|
|
339
|
+
def check_if_regions_overlap(self) -> bool:
|
|
340
|
+
"""Check if any of the ROIs overlap logically.
|
|
341
|
+
|
|
342
|
+
If two ROIs cover the same pixel, they are considered to overlap.
|
|
343
|
+
This does not consider chunking or other storage details.
|
|
344
|
+
|
|
345
|
+
Returns:
|
|
346
|
+
bool: True if any ROIs overlap. False otherwise.
|
|
347
|
+
"""
|
|
348
|
+
if len(self.rois) < 2:
|
|
349
|
+
# Less than 2 ROIs cannot overlap
|
|
350
|
+
return False
|
|
351
|
+
|
|
352
|
+
slicing_tuples = (
|
|
353
|
+
g.slicing_ops.normalized_slicing_tuple
|
|
354
|
+
for g in self._numpy_getters_generator()
|
|
355
|
+
)
|
|
356
|
+
x = check_if_regions_overlap(slicing_tuples)
|
|
357
|
+
return x
|
|
358
|
+
|
|
359
|
+
def require_no_regions_overlap(self) -> None:
|
|
360
|
+
"""Ensure that the Iterator's ROIs do not overlap."""
|
|
361
|
+
if self.check_if_regions_overlap():
|
|
362
|
+
raise NgioValueError("Some rois overlap.")
|
|
363
|
+
|
|
364
|
+
def check_if_chunks_overlap(self) -> bool:
|
|
365
|
+
"""Check if any of the ROIs overlap in terms of chunks.
|
|
366
|
+
|
|
367
|
+
If two ROIs cover the same chunk, they are considered to overlap in chunks.
|
|
368
|
+
This does not consider pixel-level overlaps.
|
|
369
|
+
|
|
370
|
+
Returns:
|
|
371
|
+
bool: True if any ROIs overlap in chunks, False otherwise.
|
|
372
|
+
"""
|
|
373
|
+
from ngio.io_pipes._ops_slices_utils import check_if_chunks_overlap
|
|
374
|
+
|
|
375
|
+
if len(self.rois) < 2:
|
|
376
|
+
# Less than 2 ROIs cannot overlap
|
|
377
|
+
return False
|
|
378
|
+
|
|
379
|
+
slicing_tuples = (
|
|
380
|
+
g.slicing_ops.normalized_slicing_tuple
|
|
381
|
+
for g in self._numpy_getters_generator()
|
|
382
|
+
)
|
|
383
|
+
shape = self.ref_image.shape
|
|
384
|
+
chunks = self.ref_image.chunks
|
|
385
|
+
return check_if_chunks_overlap(slicing_tuples, shape, chunks)
|
|
386
|
+
|
|
387
|
+
def require_no_chunks_overlap(self) -> None:
|
|
388
|
+
"""Ensure that the ROIs do not overlap in terms of chunks."""
|
|
389
|
+
if self.check_if_chunks_overlap():
|
|
390
|
+
raise NgioValueError("Some rois overlap in chunks.")
|