ngio 0.4.0a3__py3-none-any.whl → 0.4.0a4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ngio/__init__.py +1 -2
- ngio/common/__init__.py +2 -51
- ngio/common/_dimensions.py +223 -64
- ngio/common/_pyramid.py +42 -23
- ngio/common/_roi.py +47 -411
- ngio/common/_zoom.py +32 -7
- ngio/experimental/iterators/_abstract_iterator.py +2 -2
- ngio/experimental/iterators/_feature.py +9 -14
- ngio/experimental/iterators/_image_processing.py +17 -27
- ngio/experimental/iterators/_rois_utils.py +4 -4
- ngio/experimental/iterators/_segmentation.py +37 -53
- ngio/images/_abstract_image.py +135 -93
- ngio/images/_create.py +16 -0
- ngio/images/_create_synt_container.py +10 -0
- ngio/images/_image.py +33 -9
- ngio/images/_label.py +24 -3
- ngio/images/_masked_image.py +60 -81
- ngio/images/_ome_zarr_container.py +33 -0
- ngio/io_pipes/__init__.py +49 -0
- ngio/io_pipes/_io_pipes.py +286 -0
- ngio/io_pipes/_io_pipes_masked.py +481 -0
- ngio/io_pipes/_io_pipes_roi.py +143 -0
- ngio/io_pipes/_io_pipes_utils.py +299 -0
- ngio/io_pipes/_match_shape.py +376 -0
- ngio/io_pipes/_ops_axes.py +146 -0
- ngio/io_pipes/_ops_slices.py +218 -0
- ngio/io_pipes/_ops_transforms.py +104 -0
- ngio/io_pipes/_zoom_transform.py +175 -0
- ngio/ome_zarr_meta/__init__.py +6 -2
- ngio/ome_zarr_meta/ngio_specs/__init__.py +6 -4
- ngio/ome_zarr_meta/ngio_specs/_axes.py +182 -70
- ngio/ome_zarr_meta/ngio_specs/_dataset.py +47 -121
- ngio/ome_zarr_meta/ngio_specs/_ngio_image.py +30 -22
- ngio/ome_zarr_meta/ngio_specs/_pixel_size.py +17 -1
- ngio/ome_zarr_meta/v04/_v04_spec_utils.py +33 -30
- ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/nuclei.png +0 -0
- ngio/resources/__init__.py +1 -0
- ngio/resources/resource_model.py +1 -0
- ngio/{common/transforms → transforms}/__init__.py +1 -1
- ngio/transforms/_zoom.py +19 -0
- ngio/utils/_zarr_utils.py +5 -1
- {ngio-0.4.0a3.dist-info → ngio-0.4.0a4.dist-info}/METADATA +1 -1
- ngio-0.4.0a4.dist-info/RECORD +83 -0
- ngio/common/_array_io_pipes.py +0 -554
- ngio/common/_array_io_utils.py +0 -508
- ngio/common/transforms/_label.py +0 -12
- ngio/common/transforms/_zoom.py +0 -109
- ngio-0.4.0a3.dist-info/RECORD +0 -76
- {ngio-0.4.0a3.dist-info → ngio-0.4.0a4.dist-info}/WHEEL +0 -0
- {ngio-0.4.0a3.dist-info → ngio-0.4.0a4.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
from collections.abc import Sequence
|
|
2
|
+
from typing import TypeVar
|
|
3
|
+
|
|
4
|
+
import dask.array as da
|
|
5
|
+
import numpy as np
|
|
6
|
+
|
|
7
|
+
from ngio.ome_zarr_meta.ngio_specs._axes import AxesOps
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def _apply_numpy_axes_ops(
|
|
11
|
+
array: np.ndarray,
|
|
12
|
+
squeeze_axes: tuple[int, ...] | None = None,
|
|
13
|
+
transpose_axes: tuple[int, ...] | None = None,
|
|
14
|
+
expand_axes: tuple[int, ...] | None = None,
|
|
15
|
+
) -> np.ndarray:
|
|
16
|
+
"""Apply axes operations to a numpy array."""
|
|
17
|
+
if squeeze_axes is not None:
|
|
18
|
+
array = np.squeeze(array, axis=squeeze_axes)
|
|
19
|
+
if transpose_axes is not None:
|
|
20
|
+
array = np.transpose(array, axes=transpose_axes)
|
|
21
|
+
if expand_axes is not None:
|
|
22
|
+
array = np.expand_dims(array, axis=expand_axes)
|
|
23
|
+
return array
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _apply_dask_axes_ops(
|
|
27
|
+
array: da.Array,
|
|
28
|
+
squeeze_axes: tuple[int, ...] | None = None,
|
|
29
|
+
transpose_axes: tuple[int, ...] | None = None,
|
|
30
|
+
expand_axes: tuple[int, ...] | None = None,
|
|
31
|
+
) -> da.Array:
|
|
32
|
+
"""Apply axes operations to a dask array."""
|
|
33
|
+
if squeeze_axes is not None:
|
|
34
|
+
array = da.squeeze(array, axis=squeeze_axes)
|
|
35
|
+
if transpose_axes is not None:
|
|
36
|
+
array = da.transpose(array, axes=transpose_axes)
|
|
37
|
+
if expand_axes is not None:
|
|
38
|
+
array = da.expand_dims(array, axis=expand_axes)
|
|
39
|
+
return array
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
T = TypeVar("T")
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def _apply_sequence_axes_ops(
|
|
46
|
+
input_: Sequence[T],
|
|
47
|
+
default: T,
|
|
48
|
+
squeeze_axes: tuple[int, ...] | None = None,
|
|
49
|
+
transpose_axes: tuple[int, ...] | None = None,
|
|
50
|
+
expand_axes: tuple[int, ...] | None = None,
|
|
51
|
+
) -> list[T]:
|
|
52
|
+
input_list = list(input_)
|
|
53
|
+
if squeeze_axes is not None:
|
|
54
|
+
for offset, ax in enumerate(squeeze_axes):
|
|
55
|
+
input_list.pop(ax - offset)
|
|
56
|
+
|
|
57
|
+
if transpose_axes is not None:
|
|
58
|
+
input_list = [input_list[i] for i in transpose_axes]
|
|
59
|
+
|
|
60
|
+
if expand_axes is not None:
|
|
61
|
+
for ax in expand_axes:
|
|
62
|
+
input_list.insert(ax, default)
|
|
63
|
+
|
|
64
|
+
return input_list
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def get_as_numpy_axes_ops(
|
|
68
|
+
array: np.ndarray,
|
|
69
|
+
axes_ops: AxesOps,
|
|
70
|
+
) -> np.ndarray:
|
|
71
|
+
"""Apply axes operations to a numpy array."""
|
|
72
|
+
return _apply_numpy_axes_ops(
|
|
73
|
+
array,
|
|
74
|
+
squeeze_axes=axes_ops.get_squeeze_op,
|
|
75
|
+
transpose_axes=axes_ops.get_transpose_op,
|
|
76
|
+
expand_axes=axes_ops.get_expand_op,
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def get_as_dask_axes_ops(
|
|
81
|
+
array: da.Array,
|
|
82
|
+
axes_ops: AxesOps,
|
|
83
|
+
) -> da.Array:
|
|
84
|
+
"""Apply axes operations to a dask array."""
|
|
85
|
+
return _apply_dask_axes_ops(
|
|
86
|
+
array,
|
|
87
|
+
squeeze_axes=axes_ops.get_squeeze_op,
|
|
88
|
+
transpose_axes=axes_ops.get_transpose_op,
|
|
89
|
+
expand_axes=axes_ops.get_expand_op,
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def get_as_sequence_axes_ops(
|
|
94
|
+
input_: Sequence[T],
|
|
95
|
+
axes_ops: AxesOps,
|
|
96
|
+
default: T,
|
|
97
|
+
) -> list[T]:
|
|
98
|
+
"""Apply axes operations to a sequence."""
|
|
99
|
+
return _apply_sequence_axes_ops(
|
|
100
|
+
input_,
|
|
101
|
+
default=default,
|
|
102
|
+
squeeze_axes=axes_ops.get_squeeze_op,
|
|
103
|
+
transpose_axes=axes_ops.get_transpose_op,
|
|
104
|
+
expand_axes=axes_ops.get_expand_op,
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def set_as_numpy_axes_ops(
|
|
109
|
+
array: np.ndarray,
|
|
110
|
+
axes_ops: AxesOps,
|
|
111
|
+
) -> np.ndarray:
|
|
112
|
+
"""Apply inverse axes operations to a numpy array."""
|
|
113
|
+
return _apply_numpy_axes_ops(
|
|
114
|
+
array,
|
|
115
|
+
squeeze_axes=axes_ops.set_squeeze_op,
|
|
116
|
+
transpose_axes=axes_ops.set_transpose_op,
|
|
117
|
+
expand_axes=axes_ops.set_expand_op,
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def set_as_dask_axes_ops(
|
|
122
|
+
array: da.Array,
|
|
123
|
+
axes_ops: AxesOps,
|
|
124
|
+
) -> da.Array:
|
|
125
|
+
"""Apply inverse axes operations to a dask array."""
|
|
126
|
+
return _apply_dask_axes_ops(
|
|
127
|
+
array,
|
|
128
|
+
squeeze_axes=axes_ops.set_squeeze_op,
|
|
129
|
+
transpose_axes=axes_ops.set_transpose_op,
|
|
130
|
+
expand_axes=axes_ops.set_expand_op,
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def set_as_sequence_axes_ops(
|
|
135
|
+
input_: Sequence[T],
|
|
136
|
+
axes_ops: AxesOps,
|
|
137
|
+
default: T,
|
|
138
|
+
) -> list[T]:
|
|
139
|
+
"""Apply inverse axes operations to a sequence."""
|
|
140
|
+
return _apply_sequence_axes_ops(
|
|
141
|
+
input_,
|
|
142
|
+
default=default,
|
|
143
|
+
squeeze_axes=axes_ops.set_squeeze_op,
|
|
144
|
+
transpose_axes=axes_ops.set_transpose_op,
|
|
145
|
+
expand_axes=axes_ops.set_expand_op,
|
|
146
|
+
)
|
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
import math
|
|
2
|
+
from typing import TypeAlias, assert_never
|
|
3
|
+
from warnings import warn
|
|
4
|
+
|
|
5
|
+
import dask.array as da
|
|
6
|
+
import numpy as np
|
|
7
|
+
import zarr
|
|
8
|
+
from pydantic import BaseModel, ConfigDict
|
|
9
|
+
|
|
10
|
+
from ngio.utils import NgioValueError
|
|
11
|
+
|
|
12
|
+
SlicingType: TypeAlias = slice | tuple[int, ...] | int
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _int_boundary_check(value: int, shape: int) -> int:
|
|
16
|
+
"""Ensure that the integer value is within the boundaries of the array shape."""
|
|
17
|
+
if value < 0 or value >= shape:
|
|
18
|
+
raise NgioValueError(
|
|
19
|
+
f"Invalid index {value}. Index is out of bounds for axis with size {shape}."
|
|
20
|
+
)
|
|
21
|
+
return value
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _slicing_tuple_boundary_check(
|
|
25
|
+
slicing_tuple: tuple[SlicingType, ...],
|
|
26
|
+
array_shape: tuple[int, ...],
|
|
27
|
+
) -> tuple[SlicingType, ...]:
|
|
28
|
+
"""Ensure that the slicing tuple is within the boundaries of the array shape.
|
|
29
|
+
|
|
30
|
+
This function normalizes the slicing tuple to ensure that the selection
|
|
31
|
+
is within the boundaries of the array shape.
|
|
32
|
+
"""
|
|
33
|
+
if len(slicing_tuple) != len(array_shape):
|
|
34
|
+
raise NgioValueError(
|
|
35
|
+
f"Invalid slicing tuple {slicing_tuple}. "
|
|
36
|
+
f"Length {len(slicing_tuple)} does not match array shape {array_shape}."
|
|
37
|
+
)
|
|
38
|
+
out_slicing_tuple = []
|
|
39
|
+
for sl, sh in zip(slicing_tuple, array_shape, strict=True):
|
|
40
|
+
if isinstance(sl, slice):
|
|
41
|
+
start, stop, step = sl.start, sl.stop, sl.step
|
|
42
|
+
if start is not None:
|
|
43
|
+
start = math.floor(start)
|
|
44
|
+
start = max(0, min(start, sh))
|
|
45
|
+
if stop is not None:
|
|
46
|
+
stop = math.ceil(stop)
|
|
47
|
+
stop = max(0, min(stop, sh))
|
|
48
|
+
out_slicing_tuple.append(slice(start, stop, step))
|
|
49
|
+
elif isinstance(sl, int):
|
|
50
|
+
_int_boundary_check(sl, shape=sh)
|
|
51
|
+
out_slicing_tuple.append(sl)
|
|
52
|
+
elif isinstance(sl, tuple):
|
|
53
|
+
[_int_boundary_check(i, shape=sh) for i in sl]
|
|
54
|
+
out_slicing_tuple.append(sl)
|
|
55
|
+
else:
|
|
56
|
+
assert_never(sl)
|
|
57
|
+
|
|
58
|
+
return tuple(out_slicing_tuple)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class SlicingOps(BaseModel):
|
|
62
|
+
"""Class to hold slicing operations."""
|
|
63
|
+
|
|
64
|
+
on_disk_axes: tuple[str, ...]
|
|
65
|
+
on_disk_shape: tuple[int, ...]
|
|
66
|
+
slicing_tuple: tuple[SlicingType, ...] | None = None
|
|
67
|
+
model_config = ConfigDict(extra="forbid", arbitrary_types_allowed=True)
|
|
68
|
+
|
|
69
|
+
@property
|
|
70
|
+
def normalized_slicing_tuple(self) -> None | tuple[SlicingType, ...]:
|
|
71
|
+
"""Normalize the slicing tuple to be within the array shape boundaries."""
|
|
72
|
+
if self.slicing_tuple is not None:
|
|
73
|
+
return _slicing_tuple_boundary_check(
|
|
74
|
+
slicing_tuple=self.slicing_tuple,
|
|
75
|
+
array_shape=self.on_disk_shape,
|
|
76
|
+
)
|
|
77
|
+
return None
|
|
78
|
+
|
|
79
|
+
def get(self, ax_name: str, normalize: bool = False) -> SlicingType:
|
|
80
|
+
"""Get the slicing tuple."""
|
|
81
|
+
slicing_tuple = (
|
|
82
|
+
self.slicing_tuple if not normalize else self.normalized_slicing_tuple
|
|
83
|
+
)
|
|
84
|
+
if slicing_tuple is None:
|
|
85
|
+
return slice(None)
|
|
86
|
+
if ax_name not in self.on_disk_axes:
|
|
87
|
+
return slice(None)
|
|
88
|
+
ax_index = self.on_disk_axes.index(ax_name)
|
|
89
|
+
return slicing_tuple[ax_index]
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def _check_tuple_in_slicing_tuple(
|
|
93
|
+
slicing_tuple: tuple[SlicingType, ...],
|
|
94
|
+
) -> tuple[None, None] | tuple[int, tuple[int, ...]]:
|
|
95
|
+
"""Check if there are any tuple in the slicing tuple.
|
|
96
|
+
|
|
97
|
+
The zarr python api only supports int or slices, not tuples.
|
|
98
|
+
Ngio support a single tuple in the slicing tuple to allow non-contiguous
|
|
99
|
+
selection (main use case: selecting multiple channels).
|
|
100
|
+
"""
|
|
101
|
+
# Find if the is any tuple in the slicing tuple
|
|
102
|
+
# If there is one we need to handle it differently
|
|
103
|
+
tuple_in_slice = [
|
|
104
|
+
(i, s) for i, s in enumerate(slicing_tuple) if isinstance(s, tuple)
|
|
105
|
+
]
|
|
106
|
+
if not tuple_in_slice:
|
|
107
|
+
# No tuple in the slicing tuple
|
|
108
|
+
return None, None
|
|
109
|
+
|
|
110
|
+
if len(tuple_in_slice) > 1:
|
|
111
|
+
raise NotImplementedError(
|
|
112
|
+
"Slicing with multiple non-contiguous tuples/lists "
|
|
113
|
+
"is not supported yet in Ngio. Use directly the "
|
|
114
|
+
"zarr.Array api to get the correct array slice."
|
|
115
|
+
)
|
|
116
|
+
# Complex case, we have exactly one tuple in the slicing tuple
|
|
117
|
+
ax, first_tuple = tuple_in_slice[0]
|
|
118
|
+
if len(first_tuple) > 100:
|
|
119
|
+
warn(
|
|
120
|
+
"Performance warning: "
|
|
121
|
+
"Non-contiguous slicing with a tuple/list with more than 100 elements is "
|
|
122
|
+
"not natively supported by zarr. This is implemented by Ngio by performing "
|
|
123
|
+
"multiple reads and stacking the result.",
|
|
124
|
+
stacklevel=2,
|
|
125
|
+
)
|
|
126
|
+
return ax, first_tuple
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def get_slice_as_numpy(zarr_array: zarr.Array, slicing_ops: SlicingOps) -> np.ndarray:
|
|
130
|
+
slicing_tuple = slicing_ops.normalized_slicing_tuple
|
|
131
|
+
if slicing_tuple is None:
|
|
132
|
+
# Base case, no slicing, return the full array
|
|
133
|
+
return zarr_array[...]
|
|
134
|
+
|
|
135
|
+
# Find if the is any tuple in the slicing tuple
|
|
136
|
+
# If there is one we need to handle it differently
|
|
137
|
+
ax, first_tuple = _check_tuple_in_slicing_tuple(slicing_tuple)
|
|
138
|
+
if ax is None:
|
|
139
|
+
# Simple case, no tuple in the slicing tuple
|
|
140
|
+
return zarr_array[slicing_tuple]
|
|
141
|
+
|
|
142
|
+
assert first_tuple is not None
|
|
143
|
+
slices = [
|
|
144
|
+
zarr_array[(*slicing_tuple[:ax], idx, *slicing_tuple[ax + 1 :])]
|
|
145
|
+
for idx in first_tuple
|
|
146
|
+
]
|
|
147
|
+
out_array = np.stack(slices, axis=ax)
|
|
148
|
+
return out_array
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def get_slice_as_dask(zarr_array: zarr.Array, slicing_ops: SlicingOps) -> da.Array:
|
|
152
|
+
da_array = da.from_zarr(zarr_array)
|
|
153
|
+
slicing_tuple = slicing_ops.normalized_slicing_tuple
|
|
154
|
+
if slicing_tuple is None:
|
|
155
|
+
# Base case, no slicing, return the full array
|
|
156
|
+
return da_array[...]
|
|
157
|
+
|
|
158
|
+
# Find if the is any tuple in the slicing tuple
|
|
159
|
+
# If there is one we need to handle it differently
|
|
160
|
+
ax, first_tuple = _check_tuple_in_slicing_tuple(slicing_tuple)
|
|
161
|
+
if ax is None:
|
|
162
|
+
# Base case, no tuple in the slicing tuple
|
|
163
|
+
return da_array[slicing_tuple]
|
|
164
|
+
|
|
165
|
+
assert first_tuple is not None
|
|
166
|
+
slices = [
|
|
167
|
+
da_array[(*slicing_tuple[:ax], idx, *slicing_tuple[ax + 1 :])]
|
|
168
|
+
for idx in first_tuple
|
|
169
|
+
]
|
|
170
|
+
out_array = da.stack(slices, axis=ax)
|
|
171
|
+
return out_array
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def set_slice_as_numpy(
|
|
175
|
+
zarr_array: zarr.Array,
|
|
176
|
+
patch: np.ndarray,
|
|
177
|
+
slicing_ops: SlicingOps,
|
|
178
|
+
) -> None:
|
|
179
|
+
slice_tuple = slicing_ops.normalized_slicing_tuple
|
|
180
|
+
if slice_tuple is None:
|
|
181
|
+
# Base case, no slicing, write the full array
|
|
182
|
+
zarr_array[...] = patch
|
|
183
|
+
return
|
|
184
|
+
|
|
185
|
+
ax, first_tuple = _check_tuple_in_slicing_tuple(slice_tuple)
|
|
186
|
+
if ax is None:
|
|
187
|
+
# Base case, no tuple in the slicing tuple
|
|
188
|
+
zarr_array[slice_tuple] = patch
|
|
189
|
+
return
|
|
190
|
+
|
|
191
|
+
# Complex case, we have exactly one tuple in the slicing tuple
|
|
192
|
+
assert first_tuple is not None
|
|
193
|
+
for i, idx in enumerate(first_tuple):
|
|
194
|
+
_sub_slice = (*slice_tuple[:ax], idx, *slice_tuple[ax + 1 :])
|
|
195
|
+
zarr_array[_sub_slice] = np.take(patch, indices=i, axis=ax)
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def set_slice_as_dask(
|
|
199
|
+
zarr_array: zarr.Array, patch: da.Array, slicing_ops: SlicingOps
|
|
200
|
+
) -> None:
|
|
201
|
+
slice_tuple = slicing_ops.normalized_slicing_tuple
|
|
202
|
+
if slice_tuple is None:
|
|
203
|
+
# Base case, no slicing, write the full array
|
|
204
|
+
da.to_zarr(arr=patch, url=zarr_array)
|
|
205
|
+
return
|
|
206
|
+
ax, first_tuple = _check_tuple_in_slicing_tuple(slice_tuple)
|
|
207
|
+
if ax is None:
|
|
208
|
+
# Base case, no tuple in the slicing tuple
|
|
209
|
+
da.to_zarr(arr=patch, url=zarr_array, region=slice_tuple)
|
|
210
|
+
return
|
|
211
|
+
|
|
212
|
+
# Complex case, we have exactly one tuple in the slicing tuple
|
|
213
|
+
assert first_tuple is not None
|
|
214
|
+
for i, idx in enumerate(first_tuple):
|
|
215
|
+
_sub_slice = (*slice_tuple[:ax], slice(idx, idx + 1), *slice_tuple[ax + 1 :])
|
|
216
|
+
sub_patch = da.take(patch, indices=i, axis=ax)
|
|
217
|
+
sub_patch = da.expand_dims(sub_patch, axis=ax)
|
|
218
|
+
da.to_zarr(arr=sub_patch, url=zarr_array, region=_sub_slice)
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
from collections.abc import Sequence
|
|
2
|
+
from typing import Protocol
|
|
3
|
+
|
|
4
|
+
import dask.array as da
|
|
5
|
+
import numpy as np
|
|
6
|
+
|
|
7
|
+
from ngio.io_pipes._ops_slices import SlicingOps
|
|
8
|
+
from ngio.ome_zarr_meta.ngio_specs._axes import AxesOps
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class TransformProtocol(Protocol):
|
|
12
|
+
"""Protocol for a generic transform."""
|
|
13
|
+
|
|
14
|
+
def get_as_numpy_transform(
|
|
15
|
+
self, array: np.ndarray, slicing_ops: SlicingOps, axes_ops: AxesOps
|
|
16
|
+
) -> np.ndarray:
|
|
17
|
+
"""A transformation to be applied after loading a numpy array."""
|
|
18
|
+
...
|
|
19
|
+
|
|
20
|
+
def get_as_dask_transform(
|
|
21
|
+
self, array: da.Array, slicing_ops: SlicingOps, axes_ops: AxesOps
|
|
22
|
+
) -> da.Array:
|
|
23
|
+
"""A transformation to be applied after loading a dask array."""
|
|
24
|
+
...
|
|
25
|
+
|
|
26
|
+
def set_as_numpy_transform(
|
|
27
|
+
self, array: np.ndarray, slicing_ops: SlicingOps, axes_ops: AxesOps
|
|
28
|
+
) -> np.ndarray:
|
|
29
|
+
"""A transformation to be applied before writing a numpy array."""
|
|
30
|
+
...
|
|
31
|
+
|
|
32
|
+
def set_as_dask_transform(
|
|
33
|
+
self, array: da.Array, slicing_ops: SlicingOps, axes_ops: AxesOps
|
|
34
|
+
) -> da.Array:
|
|
35
|
+
"""A transformation to be applied before writing a dask array."""
|
|
36
|
+
...
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def get_as_numpy_transform(
|
|
40
|
+
array: np.ndarray,
|
|
41
|
+
slicing_ops: SlicingOps,
|
|
42
|
+
axes_ops: AxesOps,
|
|
43
|
+
transforms: Sequence[TransformProtocol] | None = None,
|
|
44
|
+
) -> np.ndarray:
|
|
45
|
+
"""Apply a numpy transform to an array."""
|
|
46
|
+
if transforms is None:
|
|
47
|
+
return array
|
|
48
|
+
|
|
49
|
+
for transform in transforms:
|
|
50
|
+
array = transform.get_as_numpy_transform(
|
|
51
|
+
array, slicing_ops=slicing_ops, axes_ops=axes_ops
|
|
52
|
+
)
|
|
53
|
+
return array
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def get_as_dask_transform(
|
|
57
|
+
array: da.Array,
|
|
58
|
+
slicing_ops: SlicingOps,
|
|
59
|
+
axes_ops: AxesOps,
|
|
60
|
+
transforms: Sequence[TransformProtocol] | None = None,
|
|
61
|
+
) -> da.Array:
|
|
62
|
+
"""Apply a dask transform to an array."""
|
|
63
|
+
if transforms is None:
|
|
64
|
+
return array
|
|
65
|
+
|
|
66
|
+
for transform in transforms:
|
|
67
|
+
array = transform.get_as_dask_transform(
|
|
68
|
+
array, slicing_ops=slicing_ops, axes_ops=axes_ops
|
|
69
|
+
)
|
|
70
|
+
return array
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def set_as_numpy_transform(
|
|
74
|
+
array: np.ndarray,
|
|
75
|
+
slicing_ops: SlicingOps,
|
|
76
|
+
axes_ops: AxesOps,
|
|
77
|
+
transforms: Sequence[TransformProtocol] | None = None,
|
|
78
|
+
) -> np.ndarray:
|
|
79
|
+
"""Apply inverse numpy transforms to an array."""
|
|
80
|
+
if transforms is None:
|
|
81
|
+
return array
|
|
82
|
+
|
|
83
|
+
for transform in transforms:
|
|
84
|
+
array = transform.set_as_numpy_transform(
|
|
85
|
+
array, slicing_ops=slicing_ops, axes_ops=axes_ops
|
|
86
|
+
)
|
|
87
|
+
return array
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def set_as_dask_transform(
|
|
91
|
+
array: da.Array,
|
|
92
|
+
slicing_ops: SlicingOps,
|
|
93
|
+
axes_ops: AxesOps,
|
|
94
|
+
transforms: Sequence[TransformProtocol] | None = None,
|
|
95
|
+
) -> da.Array:
|
|
96
|
+
"""Apply inverse dask transforms to an array."""
|
|
97
|
+
if transforms is None:
|
|
98
|
+
return array
|
|
99
|
+
|
|
100
|
+
for transform in transforms:
|
|
101
|
+
array = transform.set_as_dask_transform(
|
|
102
|
+
array, slicing_ops=slicing_ops, axes_ops=axes_ops
|
|
103
|
+
)
|
|
104
|
+
return array
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
import math
|
|
2
|
+
from collections.abc import Sequence
|
|
3
|
+
|
|
4
|
+
import dask.array as da
|
|
5
|
+
import numpy as np
|
|
6
|
+
|
|
7
|
+
from ngio.common._dimensions import Dimensions
|
|
8
|
+
from ngio.common._zoom import (
|
|
9
|
+
InterpolationOrder,
|
|
10
|
+
dask_zoom,
|
|
11
|
+
numpy_zoom,
|
|
12
|
+
)
|
|
13
|
+
from ngio.io_pipes._ops_slices import SlicingOps
|
|
14
|
+
from ngio.ome_zarr_meta import AxesOps
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class BaseZoomTransform:
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
input_dimensions: Dimensions,
|
|
21
|
+
target_dimensions: Dimensions,
|
|
22
|
+
order: InterpolationOrder = "nearest",
|
|
23
|
+
) -> None:
|
|
24
|
+
self._input_dimensions = input_dimensions
|
|
25
|
+
self._target_dimensions = target_dimensions
|
|
26
|
+
self._input_pixel_size = input_dimensions.pixel_size
|
|
27
|
+
self._target_pixel_size = target_dimensions.pixel_size
|
|
28
|
+
self._order: InterpolationOrder = order
|
|
29
|
+
|
|
30
|
+
def _normalize_shape(
|
|
31
|
+
self, slice_: slice | int | tuple, scale: float, max_dim: int
|
|
32
|
+
) -> int:
|
|
33
|
+
if isinstance(slice_, slice):
|
|
34
|
+
_start = slice_.start or 0
|
|
35
|
+
_start_int = math.floor(_start * scale)
|
|
36
|
+
if slice_.stop is not None:
|
|
37
|
+
_stop = slice_.stop * scale
|
|
38
|
+
_stop = min(_stop, max_dim)
|
|
39
|
+
else:
|
|
40
|
+
_stop = max_dim
|
|
41
|
+
_stop_int = math.ceil(_stop)
|
|
42
|
+
target_shape = _stop_int - _start_int
|
|
43
|
+
|
|
44
|
+
elif isinstance(slice_, int):
|
|
45
|
+
target_shape = 1
|
|
46
|
+
elif isinstance(slice_, tuple):
|
|
47
|
+
target_shape = len(slice_) * scale
|
|
48
|
+
else:
|
|
49
|
+
raise ValueError(f"Unsupported slice type: {type(slice_)}")
|
|
50
|
+
return math.ceil(target_shape)
|
|
51
|
+
|
|
52
|
+
def _compute_zoom_shape(
|
|
53
|
+
self,
|
|
54
|
+
array_shape: Sequence[int],
|
|
55
|
+
axes_ops: AxesOps,
|
|
56
|
+
slicing_ops: SlicingOps,
|
|
57
|
+
) -> tuple[int, ...]:
|
|
58
|
+
assert len(array_shape) == len(axes_ops.in_memory_axes)
|
|
59
|
+
|
|
60
|
+
target_shape = []
|
|
61
|
+
for shape, ax_name in zip(array_shape, axes_ops.in_memory_axes, strict=True):
|
|
62
|
+
ax_type = self._input_dimensions.axes_handler.get_axis(ax_name)
|
|
63
|
+
if ax_type is not None and ax_type.axis_type == "channel":
|
|
64
|
+
# Do not scale channel axis
|
|
65
|
+
target_shape.append(shape)
|
|
66
|
+
continue
|
|
67
|
+
t_dim = self._target_dimensions.get(ax_name, default=1)
|
|
68
|
+
in_pix = self._input_pixel_size.get(ax_name, default=1.0)
|
|
69
|
+
t_pix = self._target_pixel_size.get(ax_name, default=1.0)
|
|
70
|
+
slice_ = slicing_ops.get(ax_name, normalize=False)
|
|
71
|
+
scale = in_pix / t_pix
|
|
72
|
+
_target_shape = self._normalize_shape(
|
|
73
|
+
slice_=slice_, scale=scale, max_dim=t_dim
|
|
74
|
+
)
|
|
75
|
+
target_shape.append(_target_shape)
|
|
76
|
+
return tuple(target_shape)
|
|
77
|
+
|
|
78
|
+
def _compute_inverse_zoom_shape(
|
|
79
|
+
self,
|
|
80
|
+
array_shape: Sequence[int],
|
|
81
|
+
axes_ops: AxesOps,
|
|
82
|
+
slicing_ops: SlicingOps,
|
|
83
|
+
) -> tuple[int, ...]:
|
|
84
|
+
assert len(array_shape) == len(axes_ops.in_memory_axes)
|
|
85
|
+
|
|
86
|
+
target_shape = []
|
|
87
|
+
for shape, ax_name in zip(array_shape, axes_ops.in_memory_axes, strict=True):
|
|
88
|
+
ax_type = self._input_dimensions.axes_handler.get_axis(ax_name)
|
|
89
|
+
if ax_type is not None and ax_type.axis_type == "channel":
|
|
90
|
+
# Do not scale channel axis
|
|
91
|
+
target_shape.append(shape)
|
|
92
|
+
continue
|
|
93
|
+
in_dim = self._input_dimensions.get(ax_name, default=1)
|
|
94
|
+
slice_ = slicing_ops.get(ax_name=ax_name, normalize=True)
|
|
95
|
+
target_shape.append(
|
|
96
|
+
self._normalize_shape(slice_=slice_, scale=1, max_dim=in_dim)
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
# Since we are basing the rescaling on the slice, we need to ensure
|
|
100
|
+
# that the input image we got is roughly the right size.
|
|
101
|
+
# This is a safeguard against user errors.
|
|
102
|
+
expected_shape = self._compute_zoom_shape(
|
|
103
|
+
array_shape=target_shape, axes_ops=axes_ops, slicing_ops=slicing_ops
|
|
104
|
+
)
|
|
105
|
+
if any(
|
|
106
|
+
abs(es - s) > 1 for es, s in zip(expected_shape, array_shape, strict=True)
|
|
107
|
+
):
|
|
108
|
+
raise ValueError(
|
|
109
|
+
f"Input array shape {array_shape} is not compatible with the expected "
|
|
110
|
+
f"shape {expected_shape} based on the zoom transform.\n"
|
|
111
|
+
)
|
|
112
|
+
return tuple(target_shape)
|
|
113
|
+
|
|
114
|
+
def _numpy_zoom(
|
|
115
|
+
self, array: np.ndarray, target_shape: tuple[int, ...]
|
|
116
|
+
) -> np.ndarray:
|
|
117
|
+
if array.shape == target_shape:
|
|
118
|
+
return array
|
|
119
|
+
return numpy_zoom(
|
|
120
|
+
source_array=array, target_shape=target_shape, order=self._order
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
def _dask_zoom(
|
|
124
|
+
self,
|
|
125
|
+
array: da.Array,
|
|
126
|
+
array_shape: tuple[int, ...],
|
|
127
|
+
target_shape: tuple[int, ...],
|
|
128
|
+
) -> da.Array:
|
|
129
|
+
if array_shape == target_shape:
|
|
130
|
+
return array
|
|
131
|
+
return dask_zoom(
|
|
132
|
+
source_array=array, target_shape=target_shape, order=self._order
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
def get_as_numpy_transform(
|
|
136
|
+
self, array: np.ndarray, slicing_ops: SlicingOps, axes_ops: AxesOps
|
|
137
|
+
) -> np.ndarray:
|
|
138
|
+
"""Apply the scaling transformation to a numpy array."""
|
|
139
|
+
target_shape = self._compute_zoom_shape(
|
|
140
|
+
array_shape=array.shape, axes_ops=axes_ops, slicing_ops=slicing_ops
|
|
141
|
+
)
|
|
142
|
+
return self._numpy_zoom(array=array, target_shape=target_shape)
|
|
143
|
+
|
|
144
|
+
def get_as_dask_transform(
|
|
145
|
+
self, array: da.Array, slicing_ops: SlicingOps, axes_ops: AxesOps
|
|
146
|
+
) -> da.Array:
|
|
147
|
+
"""Apply the scaling transformation to a dask array."""
|
|
148
|
+
array_shape = tuple(int(s) for s in array.shape)
|
|
149
|
+
target_shape = self._compute_zoom_shape(
|
|
150
|
+
array_shape=array_shape, axes_ops=axes_ops, slicing_ops=slicing_ops
|
|
151
|
+
)
|
|
152
|
+
return self._dask_zoom(
|
|
153
|
+
array=array, array_shape=array_shape, target_shape=target_shape
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
def set_as_numpy_transform(
|
|
157
|
+
self, array: np.ndarray, slicing_ops: SlicingOps, axes_ops: AxesOps
|
|
158
|
+
) -> np.ndarray:
|
|
159
|
+
"""Apply the inverse scaling transformation to a numpy array."""
|
|
160
|
+
target_shape = self._compute_inverse_zoom_shape(
|
|
161
|
+
array_shape=array.shape, axes_ops=axes_ops, slicing_ops=slicing_ops
|
|
162
|
+
)
|
|
163
|
+
return self._numpy_zoom(array=array, target_shape=target_shape)
|
|
164
|
+
|
|
165
|
+
def set_as_dask_transform(
|
|
166
|
+
self, array: da.Array, slicing_ops: SlicingOps, axes_ops: AxesOps
|
|
167
|
+
) -> da.Array:
|
|
168
|
+
"""Apply the inverse scaling transformation to a dask array."""
|
|
169
|
+
array_shape = tuple(int(s) for s in array.shape)
|
|
170
|
+
target_shape = self._compute_inverse_zoom_shape(
|
|
171
|
+
array_shape=array_shape, axes_ops=axes_ops, slicing_ops=slicing_ops
|
|
172
|
+
)
|
|
173
|
+
return self._dask_zoom(
|
|
174
|
+
array=array, array_shape=array_shape, target_shape=target_shape
|
|
175
|
+
)
|
ngio/ome_zarr_meta/__init__.py
CHANGED
|
@@ -13,7 +13,8 @@ from ngio.ome_zarr_meta._meta_handlers import (
|
|
|
13
13
|
get_well_meta_handler,
|
|
14
14
|
)
|
|
15
15
|
from ngio.ome_zarr_meta.ngio_specs import (
|
|
16
|
-
|
|
16
|
+
AxesHandler,
|
|
17
|
+
AxesOps,
|
|
17
18
|
Dataset,
|
|
18
19
|
ImageInWellPath,
|
|
19
20
|
NgffVersions,
|
|
@@ -22,11 +23,13 @@ from ngio.ome_zarr_meta.ngio_specs import (
|
|
|
22
23
|
NgioPlateMeta,
|
|
23
24
|
NgioWellMeta,
|
|
24
25
|
PixelSize,
|
|
26
|
+
build_canonical_axes_handler,
|
|
25
27
|
path_in_well_validation,
|
|
26
28
|
)
|
|
27
29
|
|
|
28
30
|
__all__ = [
|
|
29
|
-
"
|
|
31
|
+
"AxesHandler",
|
|
32
|
+
"AxesOps",
|
|
30
33
|
"Dataset",
|
|
31
34
|
"ImageInWellPath",
|
|
32
35
|
"ImageMetaHandler",
|
|
@@ -40,6 +43,7 @@ __all__ = [
|
|
|
40
43
|
"NgioPlateMeta",
|
|
41
44
|
"NgioWellMeta",
|
|
42
45
|
"PixelSize",
|
|
46
|
+
"build_canonical_axes_handler",
|
|
43
47
|
"find_image_meta_handler",
|
|
44
48
|
"find_label_meta_handler",
|
|
45
49
|
"find_plate_meta_handler",
|