ngio 0.1.6__py3-none-any.whl → 0.2.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. ngio/__init__.py +31 -5
  2. ngio/common/__init__.py +44 -0
  3. ngio/common/_array_pipe.py +160 -0
  4. ngio/common/_axes_transforms.py +63 -0
  5. ngio/common/_common_types.py +5 -0
  6. ngio/common/_dimensions.py +113 -0
  7. ngio/common/_pyramid.py +222 -0
  8. ngio/{core/roi.py → common/_roi.py} +22 -23
  9. ngio/common/_slicer.py +97 -0
  10. ngio/{pipes/_zoom_utils.py → common/_zoom.py} +2 -78
  11. ngio/hcs/__init__.py +60 -0
  12. ngio/images/__init__.py +23 -0
  13. ngio/images/abstract_image.py +240 -0
  14. ngio/images/create.py +251 -0
  15. ngio/images/image.py +383 -0
  16. ngio/images/label.py +96 -0
  17. ngio/images/omezarr_container.py +512 -0
  18. ngio/ome_zarr_meta/__init__.py +35 -0
  19. ngio/ome_zarr_meta/_generic_handlers.py +320 -0
  20. ngio/ome_zarr_meta/_meta_handlers.py +142 -0
  21. ngio/ome_zarr_meta/ngio_specs/__init__.py +63 -0
  22. ngio/ome_zarr_meta/ngio_specs/_axes.py +481 -0
  23. ngio/ome_zarr_meta/ngio_specs/_channels.py +378 -0
  24. ngio/ome_zarr_meta/ngio_specs/_dataset.py +134 -0
  25. ngio/ome_zarr_meta/ngio_specs/_ngio_hcs.py +5 -0
  26. ngio/ome_zarr_meta/ngio_specs/_ngio_image.py +434 -0
  27. ngio/ome_zarr_meta/ngio_specs/_pixel_size.py +84 -0
  28. ngio/ome_zarr_meta/v04/__init__.py +11 -0
  29. ngio/ome_zarr_meta/v04/_meta_handlers.py +54 -0
  30. ngio/ome_zarr_meta/v04/_v04_spec_utils.py +412 -0
  31. ngio/tables/__init__.py +21 -5
  32. ngio/tables/_validators.py +192 -0
  33. ngio/tables/backends/__init__.py +8 -0
  34. ngio/tables/backends/_abstract_backend.py +71 -0
  35. ngio/tables/backends/_anndata_utils.py +194 -0
  36. ngio/tables/backends/_anndata_v1.py +75 -0
  37. ngio/tables/backends/_json_v1.py +56 -0
  38. ngio/tables/backends/_table_backends.py +102 -0
  39. ngio/tables/tables_container.py +300 -0
  40. ngio/tables/v1/__init__.py +6 -5
  41. ngio/tables/v1/_feature_table.py +161 -0
  42. ngio/tables/v1/_generic_table.py +99 -182
  43. ngio/tables/v1/_masking_roi_table.py +175 -0
  44. ngio/tables/v1/_roi_table.py +226 -0
  45. ngio/utils/__init__.py +23 -10
  46. ngio/utils/_datasets.py +51 -0
  47. ngio/utils/_errors.py +10 -4
  48. ngio/utils/_zarr_utils.py +378 -0
  49. {ngio-0.1.6.dist-info → ngio-0.2.0a1.dist-info}/METADATA +18 -39
  50. ngio-0.2.0a1.dist-info/RECORD +53 -0
  51. ngio/core/__init__.py +0 -7
  52. ngio/core/dimensions.py +0 -122
  53. ngio/core/image_handler.py +0 -228
  54. ngio/core/image_like_handler.py +0 -549
  55. ngio/core/label_handler.py +0 -410
  56. ngio/core/ngff_image.py +0 -387
  57. ngio/core/utils.py +0 -287
  58. ngio/io/__init__.py +0 -19
  59. ngio/io/_zarr.py +0 -88
  60. ngio/io/_zarr_array_utils.py +0 -0
  61. ngio/io/_zarr_group_utils.py +0 -60
  62. ngio/iterators/__init__.py +0 -1
  63. ngio/ngff_meta/__init__.py +0 -27
  64. ngio/ngff_meta/fractal_image_meta.py +0 -1267
  65. ngio/ngff_meta/meta_handler.py +0 -92
  66. ngio/ngff_meta/utils.py +0 -235
  67. ngio/ngff_meta/v04/__init__.py +0 -6
  68. ngio/ngff_meta/v04/specs.py +0 -158
  69. ngio/ngff_meta/v04/zarr_utils.py +0 -376
  70. ngio/pipes/__init__.py +0 -7
  71. ngio/pipes/_slicer_transforms.py +0 -176
  72. ngio/pipes/_transforms.py +0 -33
  73. ngio/pipes/data_pipe.py +0 -52
  74. ngio/tables/_ad_reader.py +0 -80
  75. ngio/tables/_utils.py +0 -301
  76. ngio/tables/tables_group.py +0 -252
  77. ngio/tables/v1/feature_tables.py +0 -182
  78. ngio/tables/v1/masking_roi_tables.py +0 -243
  79. ngio/tables/v1/roi_tables.py +0 -285
  80. ngio/utils/_common_types.py +0 -5
  81. ngio/utils/_pydantic_utils.py +0 -52
  82. ngio-0.1.6.dist-info/RECORD +0 -44
  83. {ngio-0.1.6.dist-info → ngio-0.2.0a1.dist-info}/WHEEL +0 -0
  84. {ngio-0.1.6.dist-info → ngio-0.2.0a1.dist-info}/licenses/LICENSE +0 -0
@@ -1,376 +0,0 @@
1
- """Zarr utilities for loading metadata from OME-NGFF 0.4."""
2
-
3
- from typing import Literal
4
-
5
- from ngio.io import (
6
- AccessModeLiteral,
7
- Group,
8
- StoreOrGroup,
9
- open_group_wrapper,
10
- )
11
- from ngio.io._zarr import _is_group_readonly
12
- from ngio.ngff_meta.fractal_image_meta import (
13
- Axis,
14
- Channel,
15
- ChannelVisualisation,
16
- Dataset,
17
- ImageLabelMeta,
18
- ImageMeta,
19
- LabelMeta,
20
- Omero,
21
- )
22
- from ngio.ngff_meta.v04.specs import (
23
- Axis04,
24
- Channel04,
25
- Dataset04,
26
- Multiscale04,
27
- NgffImageMeta04,
28
- Omero04,
29
- ScaleCoordinateTransformation04,
30
- Transformation04,
31
- TranslationCoordinateTransformation04,
32
- Window04,
33
- )
34
-
35
-
36
- def check_ngff_image_meta_v04(store: StoreOrGroup) -> bool:
37
- """Check if a Zarr Group contains the OME-NGFF v0.4."""
38
- store = open_group_wrapper(store=store, mode="r", zarr_format=2)
39
- attrs = dict(store.attrs)
40
- multiscales = attrs.get("multiscales", None)
41
- if multiscales is None:
42
- return False
43
-
44
- version = multiscales[0].get("version", None)
45
- if version != "0.4":
46
- return False
47
-
48
- return True
49
-
50
-
51
- def load_vanilla_ngff_image_meta_v04(group: Group) -> NgffImageMeta04:
52
- """Load the OME-NGFF 0.4 image meta model."""
53
- return NgffImageMeta04(**group.attrs)
54
-
55
-
56
- def _transform_dataset(
57
- datasets04: list[Dataset04],
58
- axes: list[Axis04],
59
- coo_transformation04: list[Transformation04] | None,
60
- ) -> list[Dataset]:
61
- # coo_transformation validation
62
- # only one scale transformation is allowed as top-level
63
- if coo_transformation04 is not None:
64
- if len(coo_transformation04) != 1:
65
- raise ValueError("Only one scale transformation is allowed as top-level.")
66
-
67
- if not isinstance(coo_transformation04[0], ScaleCoordinateTransformation04):
68
- raise ValueError(
69
- "Invalid coordinate transformations. \
70
- Only scale transformation is allowed."
71
- )
72
-
73
- top_scale = coo_transformation04[0].scale
74
- else:
75
- top_scale = None
76
-
77
- fractal_datasets = []
78
- for dataset04 in datasets04:
79
- scale, translation = None, None
80
- for transformation in dataset04.coordinateTransformations:
81
- if isinstance(transformation, TranslationCoordinateTransformation04):
82
- translation = transformation.translation
83
-
84
- if isinstance(transformation, ScaleCoordinateTransformation04):
85
- scale = transformation.scale
86
- if top_scale is not None:
87
- # Combine the scale transformation with the top-level scale
88
- if len(scale) != len(top_scale):
89
- raise ValueError(
90
- "Inconsistent scale transformation. \
91
- The scale transformation must have the same length."
92
- )
93
- # Combine the scale transformation with the top-level scale
94
- scale = [s * ts for s, ts in zip(scale, top_scale, strict=True)]
95
- scale = scale
96
- fractal_datasets.append(
97
- Dataset(
98
- path=dataset04.path,
99
- on_disk_axes=axes,
100
- on_disk_scale=scale,
101
- on_disk_translation=translation,
102
- )
103
- )
104
- return fractal_datasets
105
-
106
-
107
- def vanilla_omero_v04_to_fractal(omero04: Omero04) -> Omero:
108
- """Convert the Omero04 to Omero."""
109
- list_channels = []
110
- for channel04 in omero04.channels:
111
- # Convert the window to a dictionary
112
- label = channel04.label
113
-
114
- if "wavelength_id" in channel04.extra_fields:
115
- # If the wavelength_id is present, pop it from the extra fields
116
- # so that it is not added to the channel_visualisation
117
- wavelength_id = channel04.extra_fields.pop("wavelength_id")
118
- else:
119
- wavelength_id = label
120
-
121
- if channel04.window is None:
122
- window04 = Window04(
123
- start=0,
124
- end=65535,
125
- min=0,
126
- max=65535,
127
- )
128
- else:
129
- window04 = channel04.window
130
-
131
- ch_visualisation = ChannelVisualisation(
132
- color=channel04.color,
133
- active=channel04.active,
134
- start=window04.start,
135
- end=window04.end,
136
- min=window04.min,
137
- max=window04.max,
138
- **channel04.extra_fields,
139
- )
140
-
141
- channel = Channel(
142
- label=label,
143
- wavelength_id=wavelength_id,
144
- channel_visualisation=ch_visualisation,
145
- )
146
- list_channels.append(channel)
147
-
148
- return Omero(
149
- channels=list_channels,
150
- **omero04.extra_fields,
151
- )
152
-
153
-
154
- def fractal_omero_to_vanilla_v04(omero: Omero) -> Omero04:
155
- """Convert the Omero to Omero04."""
156
- list_channels04 = []
157
- for channel in omero.channels:
158
- # Convert the window to a Window04 object
159
- window04 = Window04(
160
- start=channel.channel_visualisation.start,
161
- end=channel.channel_visualisation.end,
162
- min=channel.channel_visualisation.min,
163
- max=channel.channel_visualisation.max,
164
- )
165
- channel04 = Channel04(
166
- label=channel.label,
167
- color=channel.channel_visualisation.color,
168
- active=channel.channel_visualisation.active,
169
- window=window04,
170
- wavelength_id=channel.wavelength_id,
171
- **channel.channel_visualisation.extra_fields,
172
- )
173
- list_channels04.append(channel04)
174
-
175
- return Omero04(
176
- version="0.4",
177
- channels=list_channels04,
178
- **omero.extra_fields,
179
- )
180
-
181
-
182
- def vanilla_ngff_image_meta_v04_to_fractal(
183
- meta04: NgffImageMeta04,
184
- meta_mode: Literal["image", "label"] = "image",
185
- ) -> ImageLabelMeta:
186
- """Convert the NgffImageMeta04 to ImageMeta."""
187
- if not isinstance(meta04, NgffImageMeta04):
188
- raise ValueError("Invalid metadata type. Expected NgffImageMeta04.")
189
-
190
- multiscale04 = meta04.multiscales[0]
191
- axes = [Axis(name=axis.name, unit=axis.unit) for axis in multiscale04.axes]
192
- fractal_datasets = _transform_dataset(
193
- datasets04=multiscale04.datasets,
194
- axes=axes,
195
- coo_transformation04=multiscale04.coordinateTransformations,
196
- )
197
-
198
- if meta_mode == "label":
199
- return LabelMeta(
200
- version="0.4",
201
- name=multiscale04.name,
202
- datasets=fractal_datasets,
203
- )
204
-
205
- if meta04.omero is not None:
206
- fractal_omero = vanilla_omero_v04_to_fractal(omero04=meta04.omero)
207
- else:
208
- fractal_omero = None
209
-
210
- return ImageMeta(
211
- version="0.4",
212
- name=multiscale04.name,
213
- datasets=fractal_datasets,
214
- omero=fractal_omero,
215
- )
216
-
217
-
218
- def fractal_ngff_image_meta_to_vanilla_v04(
219
- meta: ImageLabelMeta,
220
- ) -> NgffImageMeta04:
221
- """Convert the ImageMeta to NgffImageMeta."""
222
- axes04 = [Axis04(**axis.model_dump()) for axis in meta.axes]
223
- dataset04 = []
224
- for dataset in meta.datasets:
225
- transformations = [
226
- ScaleCoordinateTransformation04(type="scale", scale=dataset.scale)
227
- ]
228
- if dataset.translation is not None:
229
- transformations.append(
230
- TranslationCoordinateTransformation04(
231
- type="translation", translation=dataset.translation
232
- )
233
- )
234
- dataset04.append(
235
- Dataset04(path=dataset.path, coordinateTransformations=transformations)
236
- )
237
- multiscale04 = Multiscale04(
238
- name=meta.name,
239
- axes=axes04,
240
- datasets=dataset04,
241
- version="0.4",
242
- )
243
-
244
- if isinstance(meta, LabelMeta):
245
- return NgffImageMeta04(multiscales=[multiscale04])
246
-
247
- if meta.omero is not None:
248
- omero04 = fractal_omero_to_vanilla_v04(meta.omero)
249
- else:
250
- omero04 = None
251
-
252
- return NgffImageMeta04(
253
- multiscales=[multiscale04],
254
- omero=omero04,
255
- )
256
-
257
-
258
- def load_ngff_image_meta_v04(
259
- group: Group, meta_mode: Literal["image", "label"]
260
- ) -> ImageLabelMeta:
261
- """Load the OME-NGFF 0.4 image meta model."""
262
- if not check_ngff_image_meta_v04(store=group):
263
- raise ValueError(
264
- "The Zarr store does not contain the correct OME-Zarr version."
265
- )
266
- meta04 = load_vanilla_ngff_image_meta_v04(group=group)
267
- return vanilla_ngff_image_meta_v04_to_fractal(meta04=meta04, meta_mode=meta_mode)
268
-
269
-
270
- def write_ngff_image_meta_v04(group: Group, meta: ImageLabelMeta) -> None:
271
- """Write the OME-NGFF 0.4 image meta model."""
272
- if dict(group.attrs):
273
- # If group is not empty, check if the version is correct
274
- if not check_ngff_image_meta_v04(store=group):
275
- raise ValueError(
276
- "The Zarr store does not contain the correct OME-Zarr version."
277
- )
278
- meta04 = fractal_ngff_image_meta_to_vanilla_v04(meta=meta)
279
- group.attrs.update(meta04.model_dump(exclude_none=True))
280
-
281
-
282
- class NgffImageMetaZarrHandlerV04:
283
- """Class for loading and writing OME-NGFF 0.4 metadata."""
284
-
285
- def __init__(
286
- self,
287
- store: StoreOrGroup,
288
- meta_mode: Literal["image", "label"],
289
- cache: bool = False,
290
- mode: AccessModeLiteral = "a",
291
- ):
292
- """Initialize the handler.
293
-
294
- Args:
295
- store (StoreOrGroup): The Zarr store or group containing the image data.
296
- meta_mode (str): The mode of the metadata handler.
297
- cache (bool): Whether to cache the metadata.
298
- mode (str): The mode of the store.
299
- """
300
- if isinstance(store, Group):
301
- if hasattr(store, "store_path"):
302
- self._store = store.store_path
303
- else:
304
- self._store = store.store
305
-
306
- self._group = store
307
-
308
- else:
309
- self._store = store
310
- self._group = open_group_wrapper(store=store, mode=mode, zarr_format=2)
311
-
312
- self.meta_mode = meta_mode
313
- self.cache = cache
314
- self._meta: None | ImageLabelMeta = None
315
-
316
- @property
317
- def zarr_version(self) -> int:
318
- """Return the Zarr version.
319
-
320
- This is not strictly necessary, but it is necessary
321
- to make sure the zarr python creare consistent zarr files.
322
- """
323
- return 2
324
-
325
- @property
326
- def store(self) -> StoreOrGroup:
327
- """Return the Zarr store."""
328
- return self._store
329
-
330
- @property
331
- def group(self) -> Group:
332
- """Return the Zarr group."""
333
- return self._group
334
-
335
- @staticmethod
336
- def check_version(store: StoreOrGroup) -> bool:
337
- """Check if the version of the metadata is supported."""
338
- return check_ngff_image_meta_v04(store=store)
339
-
340
- def load_meta(self) -> ImageLabelMeta:
341
- """Load the OME-NGFF 0.4 metadata."""
342
- if not self.check_version(store=self.group):
343
- raise ValueError(
344
- "The Zarr store does not contain the correct OME-Zarr version."
345
- )
346
-
347
- if self.cache:
348
- if self._meta is None:
349
- self._meta = load_ngff_image_meta_v04(
350
- self.group, meta_mode=self.meta_mode
351
- )
352
- return self._meta
353
-
354
- return load_ngff_image_meta_v04(self.group, meta_mode=self.meta_mode)
355
-
356
- def write_meta(self, meta: ImageLabelMeta) -> None:
357
- """Write the OME-NGFF 0.4 metadata."""
358
- if _is_group_readonly(self.group):
359
- raise ValueError(
360
- "The store is read-only. Cannot write the metadata to the store."
361
- )
362
-
363
- write_ngff_image_meta_v04(group=self.group, meta=meta)
364
-
365
- if self.cache:
366
- self.update_cache(meta)
367
-
368
- def update_cache(self, meta: ImageLabelMeta) -> None:
369
- """Update the cached metadata."""
370
- if not self.cache:
371
- raise ValueError("Cache is not enabled.")
372
- self._meta = meta
373
-
374
- def clear_cache(self) -> None:
375
- """Clear the cached metadata."""
376
- self._meta = None
ngio/pipes/__init__.py DELETED
@@ -1,7 +0,0 @@
1
- """A module to handle data transforms for image data."""
2
-
3
- from ngio.pipes._slicer_transforms import NaiveSlicer, RoiSlicer
4
- from ngio.pipes._zoom_utils import on_disk_zoom
5
- from ngio.pipes.data_pipe import DataTransformPipe
6
-
7
- __all__ = ["DataTransformPipe", "NaiveSlicer", "RoiSlicer", "on_disk_zoom"]
@@ -1,176 +0,0 @@
1
- from typing import Protocol # noqa: I001
2
-
3
- import dask.delayed
4
- import numpy as np
5
- from dask import array as da
6
- import dask
7
- from dask.delayed import Delayed
8
-
9
- from ngio.utils._common_types import ArrayLike
10
- from ngio.core.roi import RasterCooROI
11
- import zarr
12
-
13
-
14
- class SlicerTransform(Protocol):
15
- """A special class of transform that load a specific slice of the data."""
16
-
17
- def get(self, data: ArrayLike) -> ArrayLike:
18
- """Select a slice of the data and return the result."""
19
- ...
20
-
21
- def set(
22
- self,
23
- data: ArrayLike,
24
- patch: ArrayLike,
25
- ) -> None:
26
- """Replace the slice of the data with the patch and return the result."""
27
- ...
28
-
29
-
30
- @dask.delayed
31
- def _slice_set_delayed(
32
- data: zarr.Array,
33
- patch: Delayed,
34
- slices: tuple[slice, ...],
35
- axes_order: list[int] | None,
36
- ) -> None:
37
- if axes_order is not None:
38
- patch = da.transpose(patch, axes_order)
39
-
40
- if isinstance(patch, Delayed):
41
- shape = tuple([s.stop - s.start for s in slices])
42
- patch = da.from_delayed(patch, shape=shape, dtype=data.dtype)
43
- da.to_zarr(arr=patch, url=data, region=slices)
44
-
45
-
46
- class NaiveSlicer:
47
- """A simple slicer that requires all axes to be specified."""
48
-
49
- def __init__(
50
- self,
51
- on_disk_axes_name: list[str],
52
- axes_order: list[int],
53
- t: int | slice | None = None,
54
- c: int | slice | None = None,
55
- z: int | slice | None = None,
56
- y: int | slice | None = None,
57
- x: int | slice | None = None,
58
- preserve_dimensions: bool = True,
59
- ):
60
- """Initialize the NaiveSlicer object."""
61
- self.on_disk_axes_name = on_disk_axes_name
62
-
63
- # Check if axes_order is trivial
64
- if axes_order != list(range(len(axes_order))):
65
- self.axes_order = axes_order
66
- else:
67
- self.axes_order = None
68
-
69
- self.slices = {
70
- "t": self._parse_input(t, preserve_dimensions),
71
- "c": self._parse_input(c, preserve_dimensions),
72
- "z": self._parse_input(z, preserve_dimensions),
73
- "y": self._parse_input(y, preserve_dimensions),
74
- "x": self._parse_input(x, preserve_dimensions),
75
- }
76
-
77
- self.slice_on_disk_order = tuple(
78
- [self.slices[axis] for axis in self.on_disk_axes_name]
79
- )
80
-
81
- def __repr__(self) -> str:
82
- """Return the string representation of the object."""
83
- slices = ", ".join([f"{axis}={slice_}" for axis, slice_ in self.slices.items()])
84
- return f"NaiveSlicer({slices})"
85
-
86
- def _parse_input(
87
- self, x: int | slice | None, preserve_dimensions: bool = True
88
- ) -> slice:
89
- """Parse the input."""
90
- if x is None:
91
- return slice(None)
92
- elif isinstance(x, int):
93
- if preserve_dimensions:
94
- return slice(x, x + 1)
95
- else:
96
- return x
97
- elif isinstance(x, slice):
98
- return x
99
-
100
- raise ValueError(f"Invalid slice definition {x} of type {type(x)}")
101
-
102
- def _shape_from_slices(self) -> tuple[int, ...]:
103
- """Return the shape of the slice."""
104
- slices = self.slice_on_disk_order
105
- return tuple([s.stop - s.start for s in slices])
106
-
107
- def get(self, data: ArrayLike) -> ArrayLike:
108
- """Select a slice of the data and return the result."""
109
- patch = data[self.slice_on_disk_order]
110
-
111
- # If sel.axis_order is trivial, skip the transpose
112
- if self.axes_order is None:
113
- return patch
114
-
115
- if isinstance(patch, np.ndarray):
116
- patch = np.transpose(patch, self.axes_order)
117
- elif isinstance(patch, da.core.Array):
118
- patch = da.transpose(patch, self.axes_order)
119
- else:
120
- raise ValueError(
121
- f"Invalid patch type {type(patch)}, "
122
- "supported types are np.ndarray and da.core.Array"
123
- )
124
- return patch
125
-
126
- def set(self, data: ArrayLike, patch: ArrayLike) -> None:
127
- """Replace the slice of the data with the patch and return the result."""
128
- # If sel.axis_order is trivial, skip the transpose
129
- if isinstance(patch, np.ndarray):
130
- if self.axes_order is not None:
131
- patch = np.transpose(patch, self.axes_order)
132
- data[self.slice_on_disk_order] = patch
133
- elif isinstance(patch, (da.core.Array, Delayed)): # noqa: UP038
134
- if self.axes_order is not None:
135
- patch = da.transpose(patch, self.axes_order)
136
-
137
- if isinstance(patch, Delayed):
138
- patch = da.from_delayed(
139
- patch, shape=self._shape_from_slices(), dtype=data.dtype
140
- )
141
- da.to_zarr(arr=patch, url=data, region=self.slice_on_disk_order)
142
- else:
143
- raise ValueError(
144
- f"Invalid patch type {type(patch)}, "
145
- "supported types are np.ndarray and da.core.Array"
146
- )
147
-
148
-
149
- class RoiSlicer(NaiveSlicer):
150
- """A slicer that requires all axes to be specified."""
151
-
152
- def __init__(
153
- self,
154
- on_disk_axes_name: list[str],
155
- axes_order: list[int],
156
- roi: RasterCooROI,
157
- t: int | slice | None = None,
158
- c: int | slice | None = None,
159
- preserve_dimensions: bool = True,
160
- ):
161
- """Initialize the RoiSlicer object."""
162
- super().__init__(
163
- on_disk_axes_name=on_disk_axes_name,
164
- axes_order=axes_order,
165
- t=t,
166
- c=c,
167
- z=roi.z_slice(),
168
- y=roi.y_slice(),
169
- x=roi.x_slice(),
170
- preserve_dimensions=preserve_dimensions,
171
- )
172
-
173
- def __repr__(self) -> str:
174
- """Return the string representation of the object."""
175
- slices = ", ".join([f"{axis}={slice_}" for axis, slice_ in self.slices.items()])
176
- return f"RoiSlicer({slices})"
ngio/pipes/_transforms.py DELETED
@@ -1,33 +0,0 @@
1
- from typing import Protocol
2
-
3
- from scipy.ndimage import zoom
4
-
5
- from ngio.utils._common_types import ArrayLike
6
-
7
-
8
- class Transform(Protocol):
9
- """A protocol for data transforms to be performed on image data."""
10
-
11
- def get(self, data: ArrayLike) -> ArrayLike:
12
- """Apply the transform to the data and return the result."""
13
- ...
14
-
15
- def set(self, data: ArrayLike) -> ArrayLike:
16
- """Apply the reverse transform to the data and return the result."""
17
- ...
18
-
19
-
20
- class ZoomTransform:
21
- """A transform to zoom in or out of the data."""
22
-
23
- def __init__(self, zoom_factor: list[float]):
24
- """Initialize the ZoomTransform object."""
25
- self.zoom_factor = zoom_factor
26
-
27
- def get(self, data: ArrayLike) -> ArrayLike:
28
- """Apply the zoom transform to the data and return the result."""
29
- return zoom(data, self.zoom_factor)
30
-
31
- def set(self, data: ArrayLike) -> ArrayLike:
32
- """Apply the reverse zoom transform to the data and return the result."""
33
- return zoom(data, [1 / factor for factor in self.zoom_factor])
ngio/pipes/data_pipe.py DELETED
@@ -1,52 +0,0 @@
1
- """A module to handle data transforms for image data."""
2
-
3
- from ngio.pipes._slicer_transforms import SlicerTransform
4
- from ngio.pipes._transforms import Transform
5
- from ngio.utils._common_types import ArrayLike
6
-
7
-
8
- class DataTransformPipe:
9
- """A class to handle a pipeline of data transforms.
10
-
11
- For example, a pipeline of transforms can be:
12
- - Select a subset of the data
13
- - Shuffle the axes of the data
14
- - Normalize the data
15
-
16
- All these in reverse order will be applied to the data when setting a patch.
17
-
18
- """
19
-
20
- def __init__(self, slicer: SlicerTransform, *data_transforms: Transform):
21
- """Initialize the DataLoadPipe object.
22
-
23
- Args:
24
- slicer (SlicerTransform): The first transform to be applied to the
25
- data MUST be a slicer.
26
- *data_transforms (Transform): A list of transforms to be
27
- applied to the data in order.
28
- """
29
- self.slicer = slicer
30
- self.list_of_transforms = data_transforms
31
-
32
- def __repr__(self) -> str:
33
- """Return the string representation of the object."""
34
- list_transforms = ", ".join(
35
- [str(transform) for transform in self.list_of_transforms]
36
- )
37
- return f"DataTransformPipe(slicer={self.slicer}, transforms={list_transforms})"
38
-
39
- def get(self, data: ArrayLike) -> ArrayLike:
40
- """Apply all the transforms to the data and return the result."""
41
- data = self.slicer.get(data)
42
- for transform in self.list_of_transforms:
43
- data = transform.get(data)
44
- return data
45
-
46
- def set(self, data: ArrayLike, patch: ArrayLike) -> None:
47
- """Apply all the reverse transforms to the data and return the result."""
48
- for transform in reversed(self.list_of_transforms):
49
- patch = transform.set(patch)
50
-
51
- # Write the patch to the data and save it
52
- self.slicer.set(data, patch)