ngio 0.1.5__py3-none-any.whl → 0.2.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. ngio/__init__.py +31 -5
  2. ngio/common/__init__.py +44 -0
  3. ngio/common/_array_pipe.py +160 -0
  4. ngio/common/_axes_transforms.py +63 -0
  5. ngio/common/_common_types.py +5 -0
  6. ngio/common/_dimensions.py +113 -0
  7. ngio/common/_pyramid.py +222 -0
  8. ngio/{core/roi.py → common/_roi.py} +22 -23
  9. ngio/common/_slicer.py +97 -0
  10. ngio/{pipes/_zoom_utils.py → common/_zoom.py} +46 -80
  11. ngio/hcs/__init__.py +60 -0
  12. ngio/images/__init__.py +23 -0
  13. ngio/images/abstract_image.py +240 -0
  14. ngio/images/create.py +251 -0
  15. ngio/images/image.py +383 -0
  16. ngio/images/label.py +96 -0
  17. ngio/images/omezarr_container.py +512 -0
  18. ngio/ome_zarr_meta/__init__.py +35 -0
  19. ngio/ome_zarr_meta/_generic_handlers.py +320 -0
  20. ngio/ome_zarr_meta/_meta_handlers.py +142 -0
  21. ngio/ome_zarr_meta/ngio_specs/__init__.py +63 -0
  22. ngio/ome_zarr_meta/ngio_specs/_axes.py +481 -0
  23. ngio/ome_zarr_meta/ngio_specs/_channels.py +378 -0
  24. ngio/ome_zarr_meta/ngio_specs/_dataset.py +134 -0
  25. ngio/ome_zarr_meta/ngio_specs/_ngio_hcs.py +5 -0
  26. ngio/ome_zarr_meta/ngio_specs/_ngio_image.py +434 -0
  27. ngio/ome_zarr_meta/ngio_specs/_pixel_size.py +84 -0
  28. ngio/ome_zarr_meta/v04/__init__.py +11 -0
  29. ngio/ome_zarr_meta/v04/_meta_handlers.py +54 -0
  30. ngio/ome_zarr_meta/v04/_v04_spec_utils.py +412 -0
  31. ngio/tables/__init__.py +21 -5
  32. ngio/tables/_validators.py +192 -0
  33. ngio/tables/backends/__init__.py +8 -0
  34. ngio/tables/backends/_abstract_backend.py +71 -0
  35. ngio/tables/backends/_anndata_utils.py +194 -0
  36. ngio/tables/backends/_anndata_v1.py +75 -0
  37. ngio/tables/backends/_json_v1.py +56 -0
  38. ngio/tables/backends/_table_backends.py +102 -0
  39. ngio/tables/tables_container.py +300 -0
  40. ngio/tables/v1/__init__.py +6 -5
  41. ngio/tables/v1/_feature_table.py +161 -0
  42. ngio/tables/v1/_generic_table.py +99 -182
  43. ngio/tables/v1/_masking_roi_table.py +175 -0
  44. ngio/tables/v1/_roi_table.py +226 -0
  45. ngio/utils/__init__.py +23 -10
  46. ngio/utils/_datasets.py +51 -0
  47. ngio/utils/_errors.py +10 -4
  48. ngio/utils/_zarr_utils.py +378 -0
  49. {ngio-0.1.5.dist-info → ngio-0.2.0a1.dist-info}/METADATA +19 -39
  50. ngio-0.2.0a1.dist-info/RECORD +53 -0
  51. ngio/core/__init__.py +0 -7
  52. ngio/core/dimensions.py +0 -122
  53. ngio/core/image_handler.py +0 -228
  54. ngio/core/image_like_handler.py +0 -549
  55. ngio/core/label_handler.py +0 -410
  56. ngio/core/ngff_image.py +0 -387
  57. ngio/core/utils.py +0 -287
  58. ngio/io/__init__.py +0 -19
  59. ngio/io/_zarr.py +0 -88
  60. ngio/io/_zarr_array_utils.py +0 -0
  61. ngio/io/_zarr_group_utils.py +0 -61
  62. ngio/iterators/__init__.py +0 -1
  63. ngio/ngff_meta/__init__.py +0 -27
  64. ngio/ngff_meta/fractal_image_meta.py +0 -1267
  65. ngio/ngff_meta/meta_handler.py +0 -92
  66. ngio/ngff_meta/utils.py +0 -235
  67. ngio/ngff_meta/v04/__init__.py +0 -6
  68. ngio/ngff_meta/v04/specs.py +0 -158
  69. ngio/ngff_meta/v04/zarr_utils.py +0 -376
  70. ngio/pipes/__init__.py +0 -7
  71. ngio/pipes/_slicer_transforms.py +0 -176
  72. ngio/pipes/_transforms.py +0 -33
  73. ngio/pipes/data_pipe.py +0 -52
  74. ngio/tables/_ad_reader.py +0 -80
  75. ngio/tables/_utils.py +0 -301
  76. ngio/tables/tables_group.py +0 -252
  77. ngio/tables/v1/feature_tables.py +0 -182
  78. ngio/tables/v1/masking_roi_tables.py +0 -243
  79. ngio/tables/v1/roi_tables.py +0 -285
  80. ngio/utils/_common_types.py +0 -5
  81. ngio/utils/_pydantic_utils.py +0 -52
  82. ngio-0.1.5.dist-info/RECORD +0 -44
  83. {ngio-0.1.5.dist-info → ngio-0.2.0a1.dist-info}/WHEEL +0 -0
  84. {ngio-0.1.5.dist-info → ngio-0.2.0a1.dist-info}/licenses/LICENSE +0 -0
ngio/__init__.py CHANGED
@@ -2,14 +2,40 @@
2
2
 
3
3
  from importlib.metadata import PackageNotFoundError, version
4
4
 
5
- from ngio.core import Image, Label, NgffImage
6
-
7
- __all__ = ["Image", "Label", "NgffImage"]
8
-
9
-
10
5
  try:
11
6
  __version__ = version("ngio")
12
7
  except PackageNotFoundError: # pragma: no cover
13
8
  __version__ = "uninstalled"
14
9
  __author__ = "Lorenzo Cerrone"
15
10
  __email__ = "lorenzo.cerrone@uzh.ch"
11
+
12
+ from ngio.common import ArrayLike, Dimensions
13
+ from ngio.hcs import OmeZarrPlate, OmeZarrWell, open_omezarr_plate, open_omezarr_well
14
+ from ngio.images import (
15
+ Image,
16
+ Label,
17
+ OmeZarrContainer,
18
+ create_empty_omezarr,
19
+ create_omezarr_from_array,
20
+ open_image,
21
+ open_omezarr_container,
22
+ )
23
+ from ngio.ome_zarr_meta.ngio_specs import AxesSetup, PixelSize
24
+
25
+ __all__ = [
26
+ "ArrayLike",
27
+ "AxesSetup",
28
+ "Dimensions",
29
+ "Image",
30
+ "Label",
31
+ "OmeZarrContainer",
32
+ "OmeZarrPlate",
33
+ "OmeZarrWell",
34
+ "PixelSize",
35
+ "create_empty_omezarr",
36
+ "create_omezarr_from_array",
37
+ "open_image",
38
+ "open_omezarr_container",
39
+ "open_omezarr_plate",
40
+ "open_omezarr_well",
41
+ ]
@@ -0,0 +1,44 @@
1
+ """Common classes and functions that are used across the package."""
2
+
3
+ from ngio.common._array_pipe import get_pipe, set_pipe
4
+ from ngio.common._axes_transforms import (
5
+ transform_dask_array,
6
+ transform_list,
7
+ transform_numpy_array,
8
+ )
9
+ from ngio.common._common_types import ArrayLike
10
+ from ngio.common._dimensions import Dimensions
11
+ from ngio.common._pyramid import consolidate_pyramid, init_empty_pyramid, on_disk_zoom
12
+ from ngio.common._roi import RasterCooROI, WorldCooROI
13
+ from ngio.common._slicer import (
14
+ SliceTransform,
15
+ compute_and_slices,
16
+ dask_get_slice,
17
+ dask_set_slice,
18
+ numpy_get_slice,
19
+ numpy_set_slice,
20
+ )
21
+ from ngio.common._zoom import dask_zoom, numpy_zoom
22
+
23
+ __all__ = [
24
+ "ArrayLike",
25
+ "Dimensions",
26
+ "RasterCooROI",
27
+ "SliceTransform",
28
+ "WorldCooROI",
29
+ "compute_and_slices",
30
+ "consolidate_pyramid",
31
+ "dask_get_slice",
32
+ "dask_set_slice",
33
+ "dask_zoom",
34
+ "get_pipe",
35
+ "init_empty_pyramid",
36
+ "numpy_get_slice",
37
+ "numpy_set_slice",
38
+ "numpy_zoom",
39
+ "on_disk_zoom",
40
+ "set_pipe",
41
+ "transform_dask_array",
42
+ "transform_list",
43
+ "transform_numpy_array",
44
+ ]
@@ -0,0 +1,160 @@
1
+ from collections.abc import Collection, Iterable
2
+ from typing import Literal
3
+
4
+ import dask
5
+ import dask.delayed
6
+ import numpy as np
7
+ import zarr
8
+
9
+ from ngio.common._axes_transforms import transform_dask_array, transform_numpy_array
10
+ from ngio.common._common_types import ArrayLike
11
+ from ngio.common._dimensions import Dimensions
12
+ from ngio.common._slicer import (
13
+ SliceTransform,
14
+ compute_and_slices,
15
+ dask_get_slice,
16
+ dask_set_slice,
17
+ numpy_get_slice,
18
+ numpy_set_slice,
19
+ )
20
+ from ngio.ome_zarr_meta.ngio_specs import AxesTransformation
21
+ from ngio.utils import NgioValueError
22
+
23
+
24
+ def _compute_from_disk_transforms(
25
+ *,
26
+ dimensions: Dimensions,
27
+ axes_order: Collection[str] | None = None,
28
+ **slice_kwargs: slice | int | Iterable[int],
29
+ ) -> tuple[SliceTransform, tuple[AxesTransformation, ...]]:
30
+ slices = compute_and_slices(dimensions=dimensions, **slice_kwargs)
31
+
32
+ if axes_order is None:
33
+ return slices, ()
34
+
35
+ additional_transformations = dimensions._axes_mapper.to_order(axes_order)
36
+ return slices, additional_transformations
37
+
38
+
39
+ def _compute_to_disk_transforms(
40
+ *,
41
+ dimensions: Dimensions,
42
+ axes_order: Collection[str] | None = None,
43
+ **slice_kwargs: slice | int | Iterable[int],
44
+ ) -> tuple[SliceTransform, tuple[AxesTransformation, ...]]:
45
+ slices = compute_and_slices(dimensions=dimensions, **slice_kwargs)
46
+ if axes_order is None:
47
+ return slices, ()
48
+
49
+ additional_transformations = dimensions._axes_mapper.from_order(axes_order)
50
+ return slices, additional_transformations
51
+
52
+
53
+ def _numpy_get_pipe(
54
+ array: zarr.Array,
55
+ slices: SliceTransform,
56
+ transformations: tuple[AxesTransformation, ...],
57
+ ) -> np.ndarray:
58
+ array = numpy_get_slice(array, slices)
59
+ return transform_numpy_array(array, transformations)
60
+
61
+
62
+ def _delayed_numpy_get_pipe(
63
+ array: zarr.Array,
64
+ slices: SliceTransform,
65
+ transformations: tuple[AxesTransformation, ...],
66
+ ) -> dask.delayed:
67
+ array = dask.delayed(numpy_get_slice)(array, slices)
68
+ return dask.delayed(transform_numpy_array)(array, transformations)
69
+
70
+
71
+ def _dask_get_pipe(
72
+ array: zarr.Array,
73
+ slices: SliceTransform,
74
+ transformations: tuple[AxesTransformation, ...],
75
+ ) -> dask.array:
76
+ array = dask_get_slice(array, slices)
77
+ return transform_dask_array(array, transformations)
78
+
79
+
80
+ def _numpy_set_pipe(
81
+ array: zarr.Array,
82
+ patch: np.ndarray,
83
+ slices: SliceTransform,
84
+ transformations: tuple[AxesTransformation, ...],
85
+ ) -> None:
86
+ patch = transform_numpy_array(patch, transformations)
87
+ numpy_set_slice(array, patch, slices)
88
+
89
+
90
+ def _dask_set_pipe(
91
+ array: zarr.Array,
92
+ patch: np.ndarray,
93
+ slices: SliceTransform,
94
+ transformations: tuple[AxesTransformation, ...],
95
+ ) -> None:
96
+ patch = transform_dask_array(patch, transformations)
97
+ dask_set_slice(array, patch, slices)
98
+
99
+
100
+ def _delayed_numpy_set_pipe(
101
+ array: zarr.Array,
102
+ patch: np.ndarray,
103
+ slices: SliceTransform,
104
+ transformations: tuple[AxesTransformation, ...],
105
+ ) -> dask.delayed:
106
+ patch = dask.delayed(transform_numpy_array)(patch, transformations)
107
+ return dask.delayed(numpy_set_slice)(array, patch, slices)
108
+
109
+
110
+ def get_pipe(
111
+ array: zarr.Array,
112
+ *,
113
+ dimensions: Dimensions,
114
+ axes_order: Collection[str] | None = None,
115
+ mode: Literal["numpy", "dask", "delayed"] = "numpy",
116
+ **slice_kwargs: slice | int | Iterable[int],
117
+ ):
118
+ slices, transformations = _compute_from_disk_transforms(
119
+ dimensions=dimensions, axes_order=axes_order, **slice_kwargs
120
+ )
121
+ match mode:
122
+ case "numpy":
123
+ return _numpy_get_pipe(array, slices, transformations)
124
+ case "dask":
125
+ return _dask_get_pipe(array, slices, transformations)
126
+
127
+ case "delayed_numpy":
128
+ return _delayed_numpy_get_pipe(array, slices, transformations)
129
+
130
+ case _:
131
+ raise NgioValueError(
132
+ f"Unknown get pipe mode {mode}, expected 'numpy', 'dask' or 'delayed'."
133
+ )
134
+
135
+
136
+ def set_pipe(
137
+ array: zarr.Array,
138
+ patch: ArrayLike,
139
+ *,
140
+ dimensions: Dimensions,
141
+ axes_order: Collection[str] | None = None,
142
+ **slice_kwargs: slice | int | Iterable[int],
143
+ ):
144
+ slices, transformations = _compute_to_disk_transforms(
145
+ dimensions=dimensions, axes_order=axes_order, **slice_kwargs
146
+ )
147
+ if isinstance(patch, dask.array.Array):
148
+ _dask_set_pipe(
149
+ array=array, patch=patch, slices=slices, transformations=transformations
150
+ )
151
+ elif isinstance(patch, np.ndarray):
152
+ _numpy_set_pipe(
153
+ array=array, patch=patch, slices=slices, transformations=transformations
154
+ )
155
+ elif isinstance(patch, dask.delayed.Delayed):
156
+ _delayed_numpy_set_pipe(
157
+ array=array, patch=patch, slices=slices, transformations=transformations
158
+ )
159
+ else:
160
+ raise NgioValueError("Unknown patch type, expected numpy, dask or delayed.")
@@ -0,0 +1,63 @@
1
+ from typing import TypeVar
2
+
3
+ import dask.array as da
4
+ import numpy as np
5
+
6
+ from ngio.ome_zarr_meta.ngio_specs._axes import (
7
+ AxesExpand,
8
+ AxesSqueeze,
9
+ AxesTransformation,
10
+ AxesTranspose,
11
+ )
12
+
13
+ T = TypeVar("T")
14
+
15
+
16
+ def transform_list(
17
+ input_list: list[T], default: T, operations: tuple[AxesTransformation, ...]
18
+ ) -> list[T]:
19
+ if isinstance(input_list, tuple):
20
+ input_list = list(input_list)
21
+
22
+ for operation in operations:
23
+ if isinstance(operation, AxesTranspose):
24
+ input_list = [input_list[i] for i in operation.axes]
25
+
26
+ if isinstance(operation, AxesExpand):
27
+ for ax in operation.axes:
28
+ input_list.insert(ax, default)
29
+ elif isinstance(operation, AxesSqueeze):
30
+ for offset, ax in enumerate(operation.axes):
31
+ input_list.pop(ax - offset)
32
+
33
+ return input_list
34
+
35
+
36
+ def transform_numpy_array(
37
+ array: np.ndarray, operations: tuple[AxesTransformation, ...]
38
+ ) -> np.ndarray:
39
+ for operation in operations:
40
+ if isinstance(operation, AxesTranspose):
41
+ array = np.transpose(array, operation.axes)
42
+ elif isinstance(operation, AxesExpand):
43
+ array = np.expand_dims(array, axis=operation.axes)
44
+ elif isinstance(operation, AxesSqueeze):
45
+ array = np.squeeze(array, axis=operation.axes)
46
+ else:
47
+ raise ValueError(f"Unknown operation {operation}")
48
+ return array
49
+
50
+
51
+ def transform_dask_array(
52
+ array: da.Array, operations: tuple[AxesTransformation, ...]
53
+ ) -> da.Array:
54
+ for operation in operations:
55
+ if isinstance(operation, AxesTranspose):
56
+ array = da.transpose(array, axes=operation.axes)
57
+ elif isinstance(operation, AxesExpand):
58
+ array = da.expand_dims(array, axis=operation.axes)
59
+ elif isinstance(operation, AxesSqueeze):
60
+ array = da.squeeze(array, axis=operation.axes)
61
+ else:
62
+ raise ValueError(f"Unknown operation {operation}")
63
+ return array
@@ -0,0 +1,5 @@
1
+ import numpy as np
2
+ import zarr
3
+ from dask import array as da
4
+
5
+ ArrayLike = np.ndarray | da.core.Array | zarr.Array # type: ignore
@@ -0,0 +1,113 @@
1
+ """Dimension metadata.
2
+
3
+ This is not related to the NGFF metadata,
4
+ but it is based on the actual metadata of the image data.
5
+ """
6
+
7
+ from collections.abc import Collection
8
+
9
+ from ngio.common._axes_transforms import transform_list
10
+ from ngio.ome_zarr_meta import AxesMapper
11
+ from ngio.utils import NgioValidationError, NgioValueError
12
+
13
+
14
+ class Dimensions:
15
+ """Dimension metadata."""
16
+
17
+ def __init__(
18
+ self,
19
+ shape: tuple[int, ...],
20
+ axes_mapper: AxesMapper,
21
+ ) -> None:
22
+ """Create a Dimension object from a Zarr array.
23
+
24
+ Args:
25
+ shape: The shape of the Zarr array.
26
+ axes_mapper: The axes mapper object.
27
+ """
28
+ self._shape = shape
29
+ self._axes_mapper = axes_mapper
30
+
31
+ if len(self._shape) != len(self._axes_mapper.on_disk_axes):
32
+ raise NgioValidationError(
33
+ "The number of dimensions must match the number of axes. "
34
+ f"Expected Axis {self._axes_mapper.on_disk_axes_names} but got shape "
35
+ f"{self._shape}."
36
+ )
37
+
38
+ def __str__(self) -> str:
39
+ """Return the string representation of the object."""
40
+ dims = ", ".join(
41
+ f"{ax.on_disk_name}: {s}"
42
+ for ax, s in zip(self._axes_mapper.on_disk_axes, self._shape, strict=True)
43
+ )
44
+ return f"Dimensions({dims})"
45
+
46
+ def get(self, axis_name: str, strict: bool = True) -> int:
47
+ """Return the dimension of the given axis name.
48
+
49
+ Args:
50
+ axis_name: The name of the axis (either canonical or non-canonical).
51
+ strict: If True, raise an error if the axis does not exist.
52
+ """
53
+ index = self._axes_mapper.get_index(axis_name)
54
+ if index is None and strict:
55
+ raise NgioValueError(f"Axis {axis_name} does not exist.")
56
+ elif index is None:
57
+ return 1
58
+ return self._shape[index]
59
+
60
+ def get_shape(self, axes_order: Collection[str]) -> tuple[int, ...]:
61
+ """Return the shape in the given axes order."""
62
+ transforms = self._axes_mapper.to_order(axes_order)
63
+ return tuple(transform_list(list(self._shape), 1, transforms))
64
+
65
+ def get_canonical_shape(self) -> tuple[int, ...]:
66
+ """Return the shape in the canonical order."""
67
+ transforms = self._axes_mapper.to_canonical()
68
+ return tuple(transform_list(list(self._shape), 1, transforms))
69
+
70
+ def __repr__(self) -> str:
71
+ """Return the string representation of the object."""
72
+ return str(self)
73
+
74
+ @property
75
+ def on_disk_shape(self) -> tuple[int, ...]:
76
+ """Return the shape as a tuple."""
77
+ return tuple(self._shape)
78
+
79
+ @property
80
+ def is_time_series(self) -> bool:
81
+ """Return whether the data is a time series."""
82
+ if self.get("t", strict=False) == 1:
83
+ return False
84
+ return True
85
+
86
+ @property
87
+ def is_2d(self) -> bool:
88
+ """Return whether the data is 2D."""
89
+ if self.get("z", strict=False) != 1:
90
+ return False
91
+ return True
92
+
93
+ @property
94
+ def is_2d_time_series(self) -> bool:
95
+ """Return whether the data is a 2D time series."""
96
+ return self.is_2d and self.is_time_series
97
+
98
+ @property
99
+ def is_3d(self) -> bool:
100
+ """Return whether the data is 3D."""
101
+ return not self.is_2d
102
+
103
+ @property
104
+ def is_3d_time_series(self) -> bool:
105
+ """Return whether the data is a 3D time series."""
106
+ return self.is_3d and self.is_time_series
107
+
108
+ @property
109
+ def is_multi_channels(self) -> bool:
110
+ """Return whether the data has multiple channels."""
111
+ if self.get("c", strict=False) == 1:
112
+ return False
113
+ return True
@@ -0,0 +1,222 @@
1
+ import math
2
+ from collections.abc import Collection
3
+ from typing import Literal
4
+
5
+ import dask.array as da
6
+ import numpy as np
7
+ import zarr
8
+
9
+ from ngio.common._zoom import _zoom_inputs_check, dask_zoom, numpy_zoom
10
+ from ngio.utils import AccessModeLiteral, StoreOrGroup, open_group_wrapper
11
+
12
+
13
+ def _on_disk_numpy_zoom(
14
+ source: zarr.Array,
15
+ target: zarr.Array,
16
+ order: Literal[0, 1, 2] = 1,
17
+ ) -> None:
18
+ target[...] = numpy_zoom(source[...], target_shape=target.shape, order=order)
19
+
20
+
21
+ def _on_disk_dask_zoom(
22
+ source: zarr.Array,
23
+ target: zarr.Array,
24
+ order: Literal[0, 1, 2] = 1,
25
+ ) -> None:
26
+ source_array = da.from_zarr(source)
27
+ target_array = dask_zoom(source_array, target_shape=target.shape, order=order)
28
+
29
+ target_array = target_array.rechunk(target.chunks)
30
+ target_array.compute_chunk_sizes()
31
+ target_array.to_zarr(target)
32
+
33
+
34
+ def _on_disk_coarsen(
35
+ source: zarr.Array,
36
+ target: zarr.Array,
37
+ _order: Literal[0, 1] = 1,
38
+ aggregation_function: np.ufunc | None = None,
39
+ ) -> None:
40
+ """Apply a coarsening operation from a source zarr array to a target zarr array.
41
+
42
+ Args:
43
+ source (zarr.Array): The source array to coarsen.
44
+ target (zarr.Array): The target array to save the coarsened result to.
45
+ _order (Literal[0, 1]): The order of interpolation is not really implemented
46
+ for coarsening, but it is kept for compatibility with the zoom function.
47
+ _order=1 -> linear interpolation ~ np.mean
48
+ _order=0 -> nearest interpolation ~ np.max
49
+ aggregation_function (np.ufunc): The aggregation function to use.
50
+ """
51
+ source_array = da.from_zarr(source)
52
+
53
+ _scale, _target_shape = _zoom_inputs_check(
54
+ source_array=source_array, scale=None, target_shape=target.shape
55
+ )
56
+
57
+ assert _target_shape == target.shape, (
58
+ "Target shape must match the target array shape"
59
+ )
60
+
61
+ if aggregation_function is None:
62
+ if _order == 1:
63
+ aggregation_function = np.mean
64
+ elif _order == 0:
65
+ aggregation_function = np.max
66
+ else:
67
+ raise ValueError(
68
+ f"Aggregation function must be provided for order {_order}"
69
+ )
70
+
71
+ coarsening_setup = {}
72
+ for i, s in enumerate(_scale):
73
+ factor = 1 / s
74
+ # This check is very strict, but it is necessary to avoid
75
+ # a few pixels shift in the coarsening
76
+ # We could add a tolerance
77
+ if factor.is_integer():
78
+ coarsening_setup[i] = int(factor)
79
+ else:
80
+ raise ValueError(
81
+ f"Coarsening factor must be an integer, got {factor} on axis {i}"
82
+ )
83
+
84
+ out_target = da.coarsen(
85
+ aggregation_function, source_array, coarsening_setup, trim_excess=True
86
+ )
87
+ out_target = out_target.rechunk(target.chunks)
88
+ out_target.to_zarr(target)
89
+
90
+
91
+ def on_disk_zoom(
92
+ source: zarr.Array,
93
+ target: zarr.Array,
94
+ order: Literal[0, 1, 2] = 1,
95
+ mode: Literal["dask", "numpy", "coarsen"] = "dask",
96
+ ) -> None:
97
+ """Apply a zoom operation from a source zarr array to a target zarr array.
98
+
99
+ Args:
100
+ source (zarr.Array): The source array to zoom.
101
+ target (zarr.Array): The target array to save the zoomed result to.
102
+ order (Literal[0, 1, 2]): The order of interpolation. Defaults to 1.
103
+ mode (Literal["dask", "numpy", "coarsen"]): The mode to use. Defaults to "dask".
104
+ """
105
+ if not isinstance(source, zarr.Array):
106
+ raise ValueError("source must be a zarr array")
107
+
108
+ if not isinstance(target, zarr.Array):
109
+ raise ValueError("target must be a zarr array")
110
+
111
+ if source.dtype != target.dtype:
112
+ raise ValueError("source and target must have the same dtype")
113
+
114
+ match mode:
115
+ case "numpy":
116
+ return _on_disk_numpy_zoom(source, target, order)
117
+ case "dask":
118
+ return _on_disk_dask_zoom(source, target, order)
119
+ case "coarsen":
120
+ return _on_disk_coarsen(
121
+ source,
122
+ target,
123
+ )
124
+ case _:
125
+ raise ValueError("mode must be either 'dask', 'numpy' or 'coarsen'")
126
+
127
+
128
+ def _find_closest_arrays(
129
+ processed: list[zarr.Array], to_be_processed: list[zarr.Array]
130
+ ) -> tuple[int, int]:
131
+ dist_matrix = np.zeros((len(processed), len(to_be_processed)))
132
+ for i, arr_to_proc in enumerate(to_be_processed):
133
+ for j, proc_arr in enumerate(processed):
134
+ dist_matrix[j, i] = np.sqrt(
135
+ np.sum(
136
+ [
137
+ (s1 - s2) ** 2
138
+ for s1, s2 in zip(
139
+ arr_to_proc.shape, proc_arr.shape, strict=False
140
+ )
141
+ ]
142
+ )
143
+ )
144
+
145
+ return np.unravel_index(dist_matrix.argmin(), dist_matrix.shape)
146
+
147
+
148
+ def consolidate_pyramid(
149
+ source: zarr.Array,
150
+ targets: list[zarr.Array],
151
+ order: Literal[0, 1, 2] = 1,
152
+ mode: Literal["dask", "numpy", "coarsen"] = "dask",
153
+ ) -> None:
154
+ """Consolidate the Zarr array."""
155
+ processed = [source]
156
+ to_be_processed = targets
157
+
158
+ while to_be_processed:
159
+ source_id, target_id = _find_closest_arrays(processed, to_be_processed)
160
+
161
+ source_image = processed[source_id]
162
+ target_image = to_be_processed.pop(target_id)
163
+
164
+ on_disk_zoom(
165
+ source=source_image,
166
+ target=target_image,
167
+ mode=mode,
168
+ order=order,
169
+ )
170
+ processed.append(target_image)
171
+
172
+
173
+ def init_empty_pyramid(
174
+ store: StoreOrGroup,
175
+ paths: list[str],
176
+ ref_shape: Collection[int],
177
+ scaling_factors: Collection[float],
178
+ chunks: Collection[int] | None = None,
179
+ dtype: str = "uint16",
180
+ mode: AccessModeLiteral = "a",
181
+ ) -> None:
182
+ # Return the an Image object
183
+ if chunks is not None and len(chunks) != len(ref_shape):
184
+ raise ValueError(
185
+ "The shape and chunks must have the same number of dimensions."
186
+ )
187
+
188
+ if len(ref_shape) != len(scaling_factors):
189
+ raise ValueError(
190
+ "The shape and scaling factor must have the same number of dimensions."
191
+ )
192
+
193
+ root_group, _ = open_group_wrapper(store, mode=mode)
194
+ for path in paths:
195
+ if any(s < 1 for s in ref_shape):
196
+ raise ValueError(
197
+ "Level shape must be at least 1 on all dimensions. "
198
+ f"Calculated shape: {ref_shape} at level {path}."
199
+ )
200
+ new_arr = root_group.zeros(
201
+ name=path,
202
+ shape=ref_shape,
203
+ dtype=dtype,
204
+ chunks=chunks,
205
+ dimension_separator="/",
206
+ )
207
+
208
+ # Todo redo this with when a proper build of pyramid is implemented
209
+ _shape = []
210
+ for s, sc in zip(ref_shape, scaling_factors, strict=True):
211
+ if math.floor(s / sc) % 2 == 0:
212
+ _shape.append(math.floor(s / sc))
213
+ else:
214
+ _shape.append(math.ceil(s / sc))
215
+ ref_shape = _shape
216
+
217
+ if chunks is None:
218
+ chunks = new_arr.chunks
219
+ if chunks is None:
220
+ raise ValueError("Something went wrong with the chunks")
221
+ chunks = [min(c, s) for c, s in zip(chunks, ref_shape, strict=True)]
222
+ return None