ngio 0.1.5__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. ngio/__init__.py +33 -5
  2. ngio/common/__init__.py +54 -0
  3. ngio/common/_array_pipe.py +265 -0
  4. ngio/common/_axes_transforms.py +64 -0
  5. ngio/common/_common_types.py +5 -0
  6. ngio/common/_dimensions.py +120 -0
  7. ngio/common/_masking_roi.py +158 -0
  8. ngio/common/_pyramid.py +228 -0
  9. ngio/common/_roi.py +165 -0
  10. ngio/common/_slicer.py +96 -0
  11. ngio/{pipes/_zoom_utils.py → common/_zoom.py} +51 -83
  12. ngio/hcs/__init__.py +5 -0
  13. ngio/hcs/plate.py +448 -0
  14. ngio/images/__init__.py +23 -0
  15. ngio/images/abstract_image.py +349 -0
  16. ngio/images/create.py +270 -0
  17. ngio/images/image.py +453 -0
  18. ngio/images/label.py +285 -0
  19. ngio/images/masked_image.py +273 -0
  20. ngio/images/ome_zarr_container.py +738 -0
  21. ngio/ome_zarr_meta/__init__.py +47 -0
  22. ngio/ome_zarr_meta/_meta_handlers.py +791 -0
  23. ngio/ome_zarr_meta/ngio_specs/__init__.py +71 -0
  24. ngio/ome_zarr_meta/ngio_specs/_axes.py +481 -0
  25. ngio/ome_zarr_meta/ngio_specs/_channels.py +389 -0
  26. ngio/ome_zarr_meta/ngio_specs/_dataset.py +134 -0
  27. ngio/ome_zarr_meta/ngio_specs/_ngio_hcs.py +377 -0
  28. ngio/ome_zarr_meta/ngio_specs/_ngio_image.py +489 -0
  29. ngio/ome_zarr_meta/ngio_specs/_pixel_size.py +116 -0
  30. ngio/ome_zarr_meta/v04/__init__.py +23 -0
  31. ngio/ome_zarr_meta/v04/_v04_spec_utils.py +485 -0
  32. ngio/tables/__init__.py +24 -6
  33. ngio/tables/_validators.py +190 -0
  34. ngio/tables/backends/__init__.py +8 -0
  35. ngio/tables/backends/_abstract_backend.py +71 -0
  36. ngio/tables/backends/_anndata_utils.py +198 -0
  37. ngio/tables/backends/_anndata_v1.py +76 -0
  38. ngio/tables/backends/_json_v1.py +56 -0
  39. ngio/tables/backends/_table_backends.py +102 -0
  40. ngio/tables/tables_container.py +310 -0
  41. ngio/tables/v1/__init__.py +5 -5
  42. ngio/tables/v1/_feature_table.py +182 -0
  43. ngio/tables/v1/_generic_table.py +160 -179
  44. ngio/tables/v1/_roi_table.py +366 -0
  45. ngio/utils/__init__.py +26 -10
  46. ngio/utils/_datasets.py +53 -0
  47. ngio/utils/_errors.py +10 -4
  48. ngio/utils/_fractal_fsspec_store.py +13 -0
  49. ngio/utils/_logger.py +3 -1
  50. ngio/utils/_zarr_utils.py +401 -0
  51. {ngio-0.1.5.dist-info → ngio-0.2.0.dist-info}/METADATA +31 -43
  52. ngio-0.2.0.dist-info/RECORD +54 -0
  53. ngio/core/__init__.py +0 -7
  54. ngio/core/dimensions.py +0 -122
  55. ngio/core/image_handler.py +0 -228
  56. ngio/core/image_like_handler.py +0 -549
  57. ngio/core/label_handler.py +0 -410
  58. ngio/core/ngff_image.py +0 -387
  59. ngio/core/roi.py +0 -92
  60. ngio/core/utils.py +0 -287
  61. ngio/io/__init__.py +0 -19
  62. ngio/io/_zarr.py +0 -88
  63. ngio/io/_zarr_array_utils.py +0 -0
  64. ngio/io/_zarr_group_utils.py +0 -61
  65. ngio/iterators/__init__.py +0 -1
  66. ngio/ngff_meta/__init__.py +0 -27
  67. ngio/ngff_meta/fractal_image_meta.py +0 -1267
  68. ngio/ngff_meta/meta_handler.py +0 -92
  69. ngio/ngff_meta/utils.py +0 -235
  70. ngio/ngff_meta/v04/__init__.py +0 -6
  71. ngio/ngff_meta/v04/specs.py +0 -158
  72. ngio/ngff_meta/v04/zarr_utils.py +0 -376
  73. ngio/pipes/__init__.py +0 -7
  74. ngio/pipes/_slicer_transforms.py +0 -176
  75. ngio/pipes/_transforms.py +0 -33
  76. ngio/pipes/data_pipe.py +0 -52
  77. ngio/tables/_ad_reader.py +0 -80
  78. ngio/tables/_utils.py +0 -301
  79. ngio/tables/tables_group.py +0 -252
  80. ngio/tables/v1/feature_tables.py +0 -182
  81. ngio/tables/v1/masking_roi_tables.py +0 -243
  82. ngio/tables/v1/roi_tables.py +0 -285
  83. ngio/utils/_common_types.py +0 -5
  84. ngio/utils/_pydantic_utils.py +0 -52
  85. ngio-0.1.5.dist-info/RECORD +0 -44
  86. {ngio-0.1.5.dist-info → ngio-0.2.0.dist-info}/WHEEL +0 -0
  87. {ngio-0.1.5.dist-info → ngio-0.2.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,158 @@
1
+ """Utilities to build masking regions of interest (ROIs)."""
2
+
3
+ import itertools
4
+
5
+ import dask
6
+ import dask.array as da
7
+ import dask.delayed
8
+ import numpy as np
9
+ import scipy.ndimage as ndi
10
+
11
+ from ngio.common._roi import Roi, RoiPixels
12
+ from ngio.ome_zarr_meta import PixelSize
13
+ from ngio.utils import NgioValueError
14
+
15
+
16
+ def _compute_offsets(chunks):
17
+ """Given a chunks tuple, compute cumulative offsets for each axis.
18
+
19
+ Returns a list where each element is a list of offsets for that dimension.
20
+ """
21
+ offsets = []
22
+ for dim_chunks in chunks:
23
+ dim_offsets = [0]
24
+ for size in dim_chunks:
25
+ dim_offsets.append(dim_offsets[-1] + size)
26
+ offsets.append(dim_offsets)
27
+ return offsets
28
+
29
+
30
+ def _adjust_slices(slices, offset):
31
+ """Adjust slices to global coordinates using the provided offset."""
32
+ adjusted_slices = {}
33
+ for label, s in slices.items():
34
+ adjusted = tuple(
35
+ slice(s_dim.start + off, s_dim.stop + off)
36
+ for s_dim, off in zip(s, offset, strict=True)
37
+ )
38
+ adjusted_slices[label] = adjusted
39
+ return adjusted_slices
40
+
41
+
42
+ @dask.delayed
43
+ def _process_chunk(chunk, offset):
44
+ """Process a single chunk.
45
+
46
+ run ndi.find_objects and adjust the slices
47
+ to global coordinates using the provided offset.
48
+ """
49
+ local_slices = compute_slices(chunk)
50
+ local_slices = _adjust_slices(local_slices, offset)
51
+ return local_slices
52
+
53
+
54
+ def _merge_slices(
55
+ slice1: tuple[slice, ...], slice2: tuple[slice, ...]
56
+ ) -> tuple[slice, ...]:
57
+ """Merge two slices."""
58
+ merged = []
59
+ for s1, s2 in zip(slice1, slice2, strict=True):
60
+ start = min(s1.start, s2.start)
61
+ stop = max(s1.stop, s2.stop)
62
+ merged.append(slice(start, stop))
63
+ return tuple(merged)
64
+
65
+
66
+ @dask.delayed
67
+ def _collect_slices(
68
+ local_slices: list[dict[int, tuple[slice, ...]]],
69
+ ) -> dict[int, tuple[slice]]:
70
+ """Collect the slices from the delayed results."""
71
+ global_slices = {}
72
+ for result in local_slices:
73
+ for label, s in result.items():
74
+ if label in global_slices:
75
+ global_slices[label] = _merge_slices(global_slices[label], s)
76
+ else:
77
+ global_slices[label] = s
78
+ return global_slices
79
+
80
+
81
+ def compute_slices(segmentation: np.ndarray) -> dict[int, tuple[slice, ...]]:
82
+ """Compute slices for each label in a segmentation.
83
+
84
+ Args:
85
+ segmentation (ndarray): The segmentation array.
86
+
87
+ Returns:
88
+ dict[int, tuple[slice]]: A dictionary with the label as key
89
+ and the slice as value.
90
+ """
91
+ slices = ndi.find_objects(segmentation)
92
+ slices_dict = {}
93
+ for label, s in enumerate(slices, start=1):
94
+ if s is None:
95
+ continue
96
+ else:
97
+ slices_dict[label] = s
98
+ return slices_dict
99
+
100
+
101
+ def lazy_compute_slices(segmentation: da.Array) -> dict[int, tuple[slice, ...]]:
102
+ """Compute slices for each label in a segmentation."""
103
+ global_offsets = _compute_offsets(segmentation.chunks)
104
+ delayed_chunks = segmentation.to_delayed()
105
+
106
+ grid_shape = tuple(len(c) for c in segmentation.chunks)
107
+
108
+ grid_indices = list(itertools.product(*[range(n) for n in grid_shape]))
109
+ delayed_results = []
110
+ for idx, chunk in zip(grid_indices, np.ravel(delayed_chunks), strict=True):
111
+ offset = tuple(global_offsets[dim][idx[dim]] for dim in range(len(idx)))
112
+ delayed_result = _process_chunk(chunk, offset)
113
+ delayed_results.append(delayed_result)
114
+
115
+ return _collect_slices(delayed_results).compute()
116
+
117
+
118
+ def compute_masking_roi(
119
+ segmentation: np.ndarray | da.Array, pixel_size: PixelSize
120
+ ) -> list[Roi]:
121
+ """Compute a ROIs for each label in a segmentation.
122
+
123
+ This function expects a 2D or 3D segmentation array.
124
+ And this function expects the axes order to be 'zyx' or 'yx'.
125
+ Other axes orders are not supported.
126
+
127
+ """
128
+ if segmentation.ndim not in [2, 3]:
129
+ raise NgioValueError("Only 2D and 3D segmentations are supported.")
130
+
131
+ if isinstance(segmentation, da.Array):
132
+ slices = lazy_compute_slices(segmentation)
133
+ else:
134
+ slices = compute_slices(segmentation)
135
+
136
+ rois = []
137
+ for label, slice_ in slices.items():
138
+ if len(slice_) == 2:
139
+ min_z, min_y, min_x = 0, slice_[0].start, slice_[1].start
140
+ max_z, max_y, max_x = 1, slice_[0].stop, slice_[1].stop
141
+ elif len(slice_) == 3:
142
+ min_z, min_y, min_x = slice_[0].start, slice_[1].start, slice_[2].start
143
+ max_z, max_y, max_x = slice_[0].stop, slice_[1].stop, slice_[2].stop
144
+ else:
145
+ raise ValueError("Invalid slice length.")
146
+ roi = RoiPixels(
147
+ name=str(label),
148
+ x_length=max_x - min_x,
149
+ y_length=max_y - min_y,
150
+ z_length=max_z - min_z,
151
+ x=min_x,
152
+ y=min_y,
153
+ z=min_z,
154
+ )
155
+
156
+ roi = roi.to_roi(pixel_size)
157
+ rois.append(roi)
158
+ return rois
@@ -0,0 +1,228 @@
1
+ import math
2
+ from collections.abc import Collection
3
+ from typing import Literal
4
+
5
+ import dask.array as da
6
+ import numpy as np
7
+ import zarr
8
+
9
+ from ngio.common._zoom import _zoom_inputs_check, dask_zoom, numpy_zoom
10
+ from ngio.utils import (
11
+ AccessModeLiteral,
12
+ NgioValueError,
13
+ StoreOrGroup,
14
+ open_group_wrapper,
15
+ )
16
+
17
+
18
+ def _on_disk_numpy_zoom(
19
+ source: zarr.Array,
20
+ target: zarr.Array,
21
+ order: Literal[0, 1, 2] = 1,
22
+ ) -> None:
23
+ target[...] = numpy_zoom(source[...], target_shape=target.shape, order=order)
24
+
25
+
26
+ def _on_disk_dask_zoom(
27
+ source: zarr.Array,
28
+ target: zarr.Array,
29
+ order: Literal[0, 1, 2] = 1,
30
+ ) -> None:
31
+ source_array = da.from_zarr(source)
32
+ target_array = dask_zoom(source_array, target_shape=target.shape, order=order)
33
+
34
+ target_array = target_array.rechunk(target.chunks)
35
+ target_array.compute_chunk_sizes()
36
+ target_array.to_zarr(target)
37
+
38
+
39
+ def _on_disk_coarsen(
40
+ source: zarr.Array,
41
+ target: zarr.Array,
42
+ _order: Literal[0, 1] = 1,
43
+ aggregation_function: np.ufunc | None = None,
44
+ ) -> None:
45
+ """Apply a coarsening operation from a source zarr array to a target zarr array.
46
+
47
+ Args:
48
+ source (zarr.Array): The source array to coarsen.
49
+ target (zarr.Array): The target array to save the coarsened result to.
50
+ _order (Literal[0, 1]): The order of interpolation is not really implemented
51
+ for coarsening, but it is kept for compatibility with the zoom function.
52
+ _order=1 -> linear interpolation ~ np.mean
53
+ _order=0 -> nearest interpolation ~ np.max
54
+ aggregation_function (np.ufunc): The aggregation function to use.
55
+ """
56
+ source_array = da.from_zarr(source)
57
+
58
+ _scale, _target_shape = _zoom_inputs_check(
59
+ source_array=source_array, scale=None, target_shape=target.shape
60
+ )
61
+
62
+ assert _target_shape == target.shape, (
63
+ "Target shape must match the target array shape"
64
+ )
65
+
66
+ if aggregation_function is None:
67
+ if _order == 1:
68
+ aggregation_function = np.mean
69
+ elif _order == 0:
70
+ aggregation_function = np.max
71
+ else:
72
+ raise NgioValueError(
73
+ f"Aggregation function must be provided for order {_order}"
74
+ )
75
+
76
+ coarsening_setup = {}
77
+ for i, s in enumerate(_scale):
78
+ factor = 1 / s
79
+ # This check is very strict, but it is necessary to avoid
80
+ # a few pixels shift in the coarsening
81
+ # We could add a tolerance
82
+ if factor.is_integer():
83
+ coarsening_setup[i] = int(factor)
84
+ else:
85
+ raise NgioValueError(
86
+ f"Coarsening factor must be an integer, got {factor} on axis {i}"
87
+ )
88
+
89
+ out_target = da.coarsen(
90
+ aggregation_function, source_array, coarsening_setup, trim_excess=True
91
+ )
92
+ out_target = out_target.rechunk(target.chunks)
93
+ out_target.to_zarr(target)
94
+
95
+
96
+ def on_disk_zoom(
97
+ source: zarr.Array,
98
+ target: zarr.Array,
99
+ order: Literal[0, 1, 2] = 1,
100
+ mode: Literal["dask", "numpy", "coarsen"] = "dask",
101
+ ) -> None:
102
+ """Apply a zoom operation from a source zarr array to a target zarr array.
103
+
104
+ Args:
105
+ source (zarr.Array): The source array to zoom.
106
+ target (zarr.Array): The target array to save the zoomed result to.
107
+ order (Literal[0, 1, 2]): The order of interpolation. Defaults to 1.
108
+ mode (Literal["dask", "numpy", "coarsen"]): The mode to use. Defaults to "dask".
109
+ """
110
+ if not isinstance(source, zarr.Array):
111
+ raise NgioValueError("source must be a zarr array")
112
+
113
+ if not isinstance(target, zarr.Array):
114
+ raise NgioValueError("target must be a zarr array")
115
+
116
+ if source.dtype != target.dtype:
117
+ raise NgioValueError("source and target must have the same dtype")
118
+
119
+ match mode:
120
+ case "numpy":
121
+ return _on_disk_numpy_zoom(source, target, order)
122
+ case "dask":
123
+ return _on_disk_dask_zoom(source, target, order)
124
+ case "coarsen":
125
+ return _on_disk_coarsen(
126
+ source,
127
+ target,
128
+ )
129
+ case _:
130
+ raise NgioValueError("mode must be either 'dask', 'numpy' or 'coarsen'")
131
+
132
+
133
+ def _find_closest_arrays(
134
+ processed: list[zarr.Array], to_be_processed: list[zarr.Array]
135
+ ) -> tuple[int, int]:
136
+ dist_matrix = np.zeros((len(processed), len(to_be_processed)))
137
+ for i, arr_to_proc in enumerate(to_be_processed):
138
+ for j, proc_arr in enumerate(processed):
139
+ dist_matrix[j, i] = np.sqrt(
140
+ np.sum(
141
+ [
142
+ (s1 - s2) ** 2
143
+ for s1, s2 in zip(
144
+ arr_to_proc.shape, proc_arr.shape, strict=False
145
+ )
146
+ ]
147
+ )
148
+ )
149
+
150
+ return np.unravel_index(dist_matrix.argmin(), dist_matrix.shape)
151
+
152
+
153
+ def consolidate_pyramid(
154
+ source: zarr.Array,
155
+ targets: list[zarr.Array],
156
+ order: Literal[0, 1, 2] = 1,
157
+ mode: Literal["dask", "numpy", "coarsen"] = "dask",
158
+ ) -> None:
159
+ """Consolidate the Zarr array."""
160
+ processed = [source]
161
+ to_be_processed = targets
162
+
163
+ while to_be_processed:
164
+ source_id, target_id = _find_closest_arrays(processed, to_be_processed)
165
+
166
+ source_image = processed[source_id]
167
+ target_image = to_be_processed.pop(target_id)
168
+
169
+ on_disk_zoom(
170
+ source=source_image,
171
+ target=target_image,
172
+ mode=mode,
173
+ order=order,
174
+ )
175
+ processed.append(target_image)
176
+
177
+
178
+ def init_empty_pyramid(
179
+ store: StoreOrGroup,
180
+ paths: list[str],
181
+ ref_shape: Collection[int],
182
+ scaling_factors: Collection[float],
183
+ chunks: Collection[int] | None = None,
184
+ dtype: str = "uint16",
185
+ mode: AccessModeLiteral = "a",
186
+ ) -> None:
187
+ # Return the an Image object
188
+ if chunks is not None and len(chunks) != len(ref_shape):
189
+ raise NgioValueError(
190
+ "The shape and chunks must have the same number of dimensions."
191
+ )
192
+
193
+ if len(ref_shape) != len(scaling_factors):
194
+ raise NgioValueError(
195
+ "The shape and scaling factor must have the same number of dimensions."
196
+ )
197
+
198
+ root_group, _ = open_group_wrapper(store, mode=mode)
199
+ for path in paths:
200
+ if any(s < 1 for s in ref_shape):
201
+ raise NgioValueError(
202
+ "Level shape must be at least 1 on all dimensions. "
203
+ f"Calculated shape: {ref_shape} at level {path}."
204
+ )
205
+ new_arr = root_group.zeros(
206
+ name=path,
207
+ shape=ref_shape,
208
+ dtype=dtype,
209
+ chunks=chunks,
210
+ dimension_separator="/",
211
+ overwrite=True,
212
+ )
213
+
214
+ # Todo redo this with when a proper build of pyramid is implemented
215
+ _shape = []
216
+ for s, sc in zip(ref_shape, scaling_factors, strict=True):
217
+ if math.floor(s / sc) % 2 == 0:
218
+ _shape.append(math.floor(s / sc))
219
+ else:
220
+ _shape.append(math.ceil(s / sc))
221
+ ref_shape = _shape
222
+
223
+ if chunks is None:
224
+ chunks = new_arr.chunks
225
+ if chunks is None:
226
+ raise NgioValueError("Something went wrong with the chunks")
227
+ chunks = [min(c, s) for c, s in zip(chunks, ref_shape, strict=True)]
228
+ return None
ngio/common/_roi.py ADDED
@@ -0,0 +1,165 @@
1
+ """Region of interest (ROI) metadata.
2
+
3
+ These are the interfaces bwteen the ROI tables / masking ROI tables and
4
+ the ImageLikeHandler.
5
+ """
6
+
7
+ from collections.abc import Iterable
8
+
9
+ import numpy as np
10
+ from pydantic import BaseModel, ConfigDict, Field
11
+
12
+ from ngio.common._dimensions import Dimensions
13
+ from ngio.ome_zarr_meta.ngio_specs import PixelSize, SpaceUnits
14
+ from ngio.utils import NgioValueError
15
+
16
+
17
+ def _to_raster(value: float, pixel_size: float, max_shape: int) -> int:
18
+ """Convert to raster coordinates."""
19
+ round_value = int(np.round(value / pixel_size))
20
+ # Ensure the value is within the image shape boundaries
21
+ return max(0, min(round_value, max_shape))
22
+
23
+
24
+ def _to_world(value: int, pixel_size: float) -> float:
25
+ """Convert to world coordinates."""
26
+ return value * pixel_size
27
+
28
+
29
+ class Roi(BaseModel):
30
+ """Region of interest (ROI) metadata."""
31
+
32
+ name: str
33
+ x_length: float
34
+ y_length: float
35
+ z_length: float = 1.0
36
+ x: float = 0.0
37
+ y: float = 0.0
38
+ z: float = 0.0
39
+ unit: SpaceUnits = Field(SpaceUnits.micrometer, repr=False)
40
+
41
+ model_config = ConfigDict(extra="allow")
42
+
43
+ def to_pixel_roi(
44
+ self, pixel_size: PixelSize, dimensions: Dimensions
45
+ ) -> "RoiPixels":
46
+ """Convert to raster coordinates."""
47
+ dim_x = dimensions.get("x")
48
+ dim_y = dimensions.get("y")
49
+ # Will default to 1 if z does not exist
50
+ dim_z = dimensions.get("z", strict=False)
51
+
52
+ return RoiPixels(
53
+ name=self.name,
54
+ x=_to_raster(self.x, pixel_size.x, dim_x),
55
+ y=_to_raster(self.y, pixel_size.y, dim_y),
56
+ z=_to_raster(self.z, pixel_size.z, dim_z),
57
+ x_length=_to_raster(self.x_length, pixel_size.x, dim_x),
58
+ y_length=_to_raster(self.y_length, pixel_size.y, dim_y),
59
+ z_length=_to_raster(self.z_length, pixel_size.z, dim_z),
60
+ )
61
+
62
+ def zoom(self, zoom_factor: float = 1) -> "Roi":
63
+ """Zoom the ROI by a factor.
64
+
65
+ Args:
66
+ zoom_factor: The zoom factor. If the zoom factor
67
+ is less than 1 the ROI will be zoomed in.
68
+ If the zoom factor is greater than 1 the ROI will be zoomed out.
69
+ If the zoom factor is 1 the ROI will not be changed.
70
+ """
71
+ return zoom_roi(self, zoom_factor)
72
+
73
+
74
+ class RoiPixels(BaseModel):
75
+ """Region of interest (ROI) metadata."""
76
+
77
+ name: str
78
+ x: int
79
+ y: int
80
+ z: int
81
+ x_length: int
82
+ y_length: int
83
+ z_length: int
84
+ model_config = ConfigDict(extra="allow")
85
+
86
+ def to_roi(self, pixel_size: PixelSize) -> Roi:
87
+ """Convert to world coordinates."""
88
+ return Roi(
89
+ name=self.name,
90
+ x=_to_world(self.x, pixel_size.x),
91
+ y=_to_world(self.y, pixel_size.y),
92
+ z=_to_world(self.z, pixel_size.z),
93
+ x_length=_to_world(self.x_length, pixel_size.x),
94
+ y_length=_to_world(self.y_length, pixel_size.y),
95
+ z_length=_to_world(self.z_length, pixel_size.z),
96
+ unit=pixel_size.space_unit,
97
+ )
98
+
99
+ def to_slices(self) -> dict[str, slice]:
100
+ """Return the slices for the ROI."""
101
+ return {
102
+ "x": slice(self.x, self.x + self.x_length),
103
+ "y": slice(self.y, self.y + self.y_length),
104
+ "z": slice(self.z, self.z + self.z_length),
105
+ }
106
+
107
+
108
+ def zoom_roi(roi: Roi, zoom_factor: float = 1) -> Roi:
109
+ """Zoom the ROI by a factor.
110
+
111
+ Args:
112
+ roi: The ROI to zoom.
113
+ zoom_factor: The zoom factor. If the zoom factor
114
+ is less than 1 the ROI will be zoomed in.
115
+ If the zoom factor is greater than 1 the ROI will be zoomed out.
116
+ If the zoom factor is 1 the ROI will not be changed.
117
+ """
118
+ if zoom_factor <= 0:
119
+ raise ValueError("Zoom factor must be greater than 0.")
120
+
121
+ # the zoom factor needs to be rescaled
122
+ # from the range [-1, inf) to [0, inf)
123
+ zoom_factor -= 1
124
+ diff_x = roi.x_length * zoom_factor
125
+ diff_y = roi.y_length * zoom_factor
126
+
127
+ new_x = max(roi.x - diff_x / 2, 0)
128
+ new_y = max(roi.y - diff_y / 2, 0)
129
+
130
+ new_roi = Roi(
131
+ name=roi.name,
132
+ x=new_x,
133
+ y=new_y,
134
+ z=roi.z,
135
+ x_length=roi.x_length + diff_x,
136
+ y_length=roi.y_length + diff_y,
137
+ z_length=roi.z_length,
138
+ unit=roi.unit,
139
+ )
140
+
141
+ return new_roi
142
+
143
+
144
+ def roi_to_slice_kwargs(
145
+ roi: Roi,
146
+ pixel_size: PixelSize,
147
+ dimensions: Dimensions,
148
+ **slice_kwargs: slice | int | Iterable[int],
149
+ ) -> dict[str, slice | int | Iterable[int]]:
150
+ """Convert a WorldCooROI to slice_kwargs."""
151
+ raster_roi = roi.to_pixel_roi(
152
+ pixel_size=pixel_size, dimensions=dimensions
153
+ ).to_slices()
154
+
155
+ if not dimensions.has_axis(axis_name="z"):
156
+ raster_roi.pop("z")
157
+
158
+ for key in slice_kwargs.keys():
159
+ if key in raster_roi:
160
+ raise NgioValueError(
161
+ f"Key {key} is already in the slice_kwargs. "
162
+ "Ambiguous which one to use: "
163
+ f"{key}={slice_kwargs[key]} or roi_{key}={raster_roi[key]}"
164
+ )
165
+ return {**raster_roi, **slice_kwargs}
ngio/common/_slicer.py ADDED
@@ -0,0 +1,96 @@
1
+ from collections.abc import Iterable
2
+
3
+ import dask.array as da
4
+ import numpy as np
5
+ import zarr
6
+
7
+ from ngio.common._dimensions import Dimensions
8
+ from ngio.ome_zarr_meta.ngio_specs import AxesTransformation
9
+ from ngio.utils import NgioValueError
10
+
11
+
12
+ def _validate_int(value: int, shape: int) -> int:
13
+ if not isinstance(value, int):
14
+ raise NgioValueError(f"Invalid value {value} of type {type(value)}")
15
+ if value < 0 or value >= shape:
16
+ raise NgioValueError(
17
+ f"Invalid value {value}. Index out of bounds for axis of shape {shape}"
18
+ )
19
+ return value
20
+
21
+
22
+ def _validate_iter_of_ints(value: Iterable[int], shape: int) -> list[int]:
23
+ if not isinstance(value, list):
24
+ raise NgioValueError(f"Invalid value {value} of type {type(value)}")
25
+ value = [_validate_int(v, shape=shape) for v in value]
26
+ return value
27
+
28
+
29
+ def _validate_slice(value: slice, shape: int) -> slice:
30
+ start = value.start if value.start is not None else 0
31
+ start = max(start, 0)
32
+ stop = value.stop if value.stop is not None else shape
33
+ return slice(start, stop)
34
+
35
+
36
+ class SliceTransform(AxesTransformation):
37
+ slices: tuple[slice | tuple[int, ...], ...]
38
+
39
+
40
+ def compute_and_slices(
41
+ *,
42
+ dimensions: Dimensions,
43
+ **slice_kwargs: slice | int | Iterable[int],
44
+ ) -> SliceTransform:
45
+ _slices = {}
46
+ axes_names = dimensions._axes_mapper.on_disk_axes_names
47
+ for axis_name, slice_ in slice_kwargs.items():
48
+ axis = dimensions._axes_mapper.get_axis(axis_name)
49
+ if axis is None:
50
+ raise NgioValueError(
51
+ f"Invalid axis {axis_name}. "
52
+ f"Not found on the on-disk axes {axes_names}. "
53
+ "If you want to get/set a singletorn value include "
54
+ "it in the axes_order parameter."
55
+ )
56
+
57
+ shape = dimensions.get(axis.on_disk_name)
58
+
59
+ if isinstance(slice_, int):
60
+ slice_ = _validate_int(slice_, shape)
61
+ slice_ = slice(slice_, slice_ + 1)
62
+
63
+ elif isinstance(slice_, Iterable):
64
+ slice_ = _validate_iter_of_ints(slice_, shape)
65
+ slice_ = tuple(slice_)
66
+
67
+ elif isinstance(slice_, slice):
68
+ slice_ = _validate_slice(slice_, shape)
69
+
70
+ elif not isinstance(slice_, slice):
71
+ raise NgioValueError(
72
+ f"Invalid slice definition {slice_} of type {type(slice_)}"
73
+ )
74
+ _slices[axis.on_disk_name] = slice_
75
+
76
+ slices = tuple(_slices.get(axis, slice(None)) for axis in axes_names)
77
+ return SliceTransform(slices=slices)
78
+
79
+
80
+ def numpy_get_slice(array: zarr.Array, slices: SliceTransform) -> np.ndarray:
81
+ return array[slices.slices]
82
+
83
+
84
+ def dask_get_slice(array: zarr.Array, slices: SliceTransform) -> da.Array:
85
+ da_array = da.from_zarr(array)
86
+ return da_array[slices.slices]
87
+
88
+
89
+ def numpy_set_slice(
90
+ array: zarr.Array, patch: np.ndarray, slices: SliceTransform
91
+ ) -> None:
92
+ array[slices.slices] = patch
93
+
94
+
95
+ def dask_set_slice(array: zarr.Array, patch: da.Array, slices: SliceTransform) -> None:
96
+ da.to_zarr(arr=patch, url=array, region=slices.slices)