ngio 0.4.8__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. ngio/__init__.py +5 -2
  2. ngio/common/__init__.py +11 -6
  3. ngio/common/_masking_roi.py +34 -54
  4. ngio/common/_pyramid.py +322 -75
  5. ngio/common/_roi.py +258 -330
  6. ngio/experimental/iterators/_feature.py +3 -3
  7. ngio/experimental/iterators/_rois_utils.py +10 -11
  8. ngio/hcs/_plate.py +192 -136
  9. ngio/images/_abstract_image.py +539 -35
  10. ngio/images/_create_synt_container.py +45 -47
  11. ngio/images/_create_utils.py +406 -0
  12. ngio/images/_image.py +524 -248
  13. ngio/images/_label.py +257 -180
  14. ngio/images/_masked_image.py +2 -2
  15. ngio/images/_ome_zarr_container.py +658 -255
  16. ngio/io_pipes/_io_pipes.py +9 -9
  17. ngio/io_pipes/_io_pipes_masked.py +7 -7
  18. ngio/io_pipes/_io_pipes_roi.py +6 -6
  19. ngio/io_pipes/_io_pipes_types.py +3 -3
  20. ngio/io_pipes/_match_shape.py +6 -8
  21. ngio/io_pipes/_ops_slices_utils.py +8 -5
  22. ngio/ome_zarr_meta/__init__.py +29 -18
  23. ngio/ome_zarr_meta/_meta_handlers.py +402 -689
  24. ngio/ome_zarr_meta/ngio_specs/__init__.py +4 -0
  25. ngio/ome_zarr_meta/ngio_specs/_axes.py +152 -51
  26. ngio/ome_zarr_meta/ngio_specs/_dataset.py +13 -22
  27. ngio/ome_zarr_meta/ngio_specs/_ngio_hcs.py +129 -91
  28. ngio/ome_zarr_meta/ngio_specs/_ngio_image.py +69 -69
  29. ngio/ome_zarr_meta/v04/__init__.py +5 -1
  30. ngio/ome_zarr_meta/v04/{_v04_spec_utils.py → _v04_spec.py} +55 -86
  31. ngio/ome_zarr_meta/v05/__init__.py +27 -0
  32. ngio/ome_zarr_meta/v05/_custom_models.py +18 -0
  33. ngio/ome_zarr_meta/v05/_v05_spec.py +495 -0
  34. ngio/resources/__init__.py +1 -1
  35. ngio/resources/resource_model.py +1 -1
  36. ngio/tables/_tables_container.py +82 -24
  37. ngio/tables/backends/_abstract_backend.py +7 -0
  38. ngio/tables/backends/_anndata.py +60 -7
  39. ngio/tables/backends/_anndata_utils.py +2 -4
  40. ngio/tables/backends/_csv.py +3 -19
  41. ngio/tables/backends/_json.py +10 -13
  42. ngio/tables/backends/_parquet.py +3 -31
  43. ngio/tables/backends/_py_arrow_backends.py +222 -0
  44. ngio/tables/backends/_utils.py +1 -1
  45. ngio/tables/v1/_roi_table.py +41 -24
  46. ngio/utils/__init__.py +8 -12
  47. ngio/utils/_cache.py +48 -0
  48. ngio/utils/_zarr_utils.py +354 -236
  49. {ngio-0.4.8.dist-info → ngio-0.5.0.dist-info}/METADATA +12 -5
  50. ngio-0.5.0.dist-info/RECORD +88 -0
  51. ngio/images/_create.py +0 -276
  52. ngio/tables/backends/_non_zarr_backends.py +0 -196
  53. ngio/utils/_logger.py +0 -50
  54. ngio-0.4.8.dist-info/RECORD +0 -85
  55. {ngio-0.4.8.dist-info → ngio-0.5.0.dist-info}/WHEEL +0 -0
  56. {ngio-0.4.8.dist-info → ngio-0.5.0.dist-info}/licenses/LICENSE +0 -0
ngio/__init__.py CHANGED
@@ -9,7 +9,7 @@ except PackageNotFoundError: # pragma: no cover
9
9
  __author__ = "Lorenzo Cerrone"
10
10
  __email__ = "lorenzo.cerrone@uzh.ch"
11
11
 
12
- from ngio.common import Dimensions, Roi, RoiPixels
12
+ from ngio.common import Dimensions, Roi, RoiSlice
13
13
  from ngio.hcs import (
14
14
  OmeZarrPlate,
15
15
  OmeZarrWell,
@@ -37,6 +37,7 @@ from ngio.ome_zarr_meta.ngio_specs import (
37
37
  NgffVersions,
38
38
  PixelSize,
39
39
  )
40
+ from ngio.utils import NgioSupportedStore, StoreOrGroup
40
41
 
41
42
  __all__ = [
42
43
  "AxesSetup",
@@ -47,12 +48,14 @@ __all__ = [
47
48
  "ImageInWellPath",
48
49
  "Label",
49
50
  "NgffVersions",
51
+ "NgioSupportedStore",
50
52
  "OmeZarrContainer",
51
53
  "OmeZarrPlate",
52
54
  "OmeZarrWell",
53
55
  "PixelSize",
54
56
  "Roi",
55
- "RoiPixels",
57
+ "RoiSlice",
58
+ "StoreOrGroup",
56
59
  "create_empty_ome_zarr",
57
60
  "create_empty_plate",
58
61
  "create_empty_well",
ngio/common/__init__.py CHANGED
@@ -2,22 +2,27 @@
2
2
 
3
3
  from ngio.common._dimensions import Dimensions
4
4
  from ngio.common._masking_roi import compute_masking_roi
5
- from ngio.common._pyramid import consolidate_pyramid, init_empty_pyramid, on_disk_zoom
6
- from ngio.common._roi import (
7
- Roi,
8
- RoiPixels,
5
+ from ngio.common._pyramid import (
6
+ ChunksLike,
7
+ ImagePyramidBuilder,
8
+ ShardsLike,
9
+ consolidate_pyramid,
10
+ on_disk_zoom,
9
11
  )
12
+ from ngio.common._roi import Roi, RoiSlice
10
13
  from ngio.common._zoom import InterpolationOrder, dask_zoom, numpy_zoom
11
14
 
12
15
  __all__ = [
16
+ "ChunksLike",
13
17
  "Dimensions",
18
+ "ImagePyramidBuilder",
14
19
  "InterpolationOrder",
15
20
  "Roi",
16
- "RoiPixels",
21
+ "RoiSlice",
22
+ "ShardsLike",
17
23
  "compute_masking_roi",
18
24
  "consolidate_pyramid",
19
25
  "dask_zoom",
20
- "init_empty_pyramid",
21
26
  "numpy_zoom",
22
27
  "on_disk_zoom",
23
28
  ]
@@ -1,13 +1,14 @@
1
1
  """Utilities to build masking regions of interest (ROIs)."""
2
2
 
3
3
  import itertools
4
+ from collections.abc import Sequence
4
5
 
5
6
  import dask.array as da
6
7
  import numpy as np
7
8
  import scipy.ndimage as ndi
8
9
  from dask.delayed import delayed
9
10
 
10
- from ngio.common._roi import Roi, RoiPixels
11
+ from ngio.common._roi import Roi
11
12
  from ngio.ome_zarr_meta import PixelSize
12
13
  from ngio.utils import NgioValueError
13
14
 
@@ -98,7 +99,14 @@ def compute_slices(segmentation: np.ndarray) -> dict[int, tuple[slice, ...]]:
98
99
 
99
100
 
100
101
  def lazy_compute_slices(segmentation: da.Array) -> dict[int, tuple[slice, ...]]:
101
- """Compute slices for each label in a segmentation."""
102
+ """Compute slices for each label in a segmentation using lazy evaluation.
103
+
104
+ Args:
105
+ segmentation: The dask segmentation array.
106
+
107
+ Returns:
108
+ A dictionary mapping label IDs to their bounding box slices.
109
+ """
102
110
  global_offsets = _compute_offsets(segmentation.chunks)
103
111
  delayed_chunks = segmentation.to_delayed() # type: ignore
104
112
 
@@ -115,18 +123,32 @@ def lazy_compute_slices(segmentation: da.Array) -> dict[int, tuple[slice, ...]]:
115
123
 
116
124
 
117
125
  def compute_masking_roi(
118
- segmentation: np.ndarray | da.Array, pixel_size: PixelSize
126
+ segmentation: np.ndarray | da.Array,
127
+ pixel_size: PixelSize,
128
+ axes_order: Sequence[str],
119
129
  ) -> list[Roi]:
120
- """Compute a ROIs for each label in a segmentation.
130
+ """Compute ROIs for each label in a segmentation.
131
+
132
+ This function expects a 2D, 3D, or 4D segmentation array.
133
+ The axes order should match the segmentation dimensions.
121
134
 
122
- This function expects a 2D or 3D segmentation array.
123
- And this function expects the axes order to be 'zyx' or 'yx'.
124
- Other axes orders are not supported.
135
+ Args:
136
+ segmentation: The segmentation array (2D, 3D, or 4D).
137
+ pixel_size: The pixel size metadata for coordinate conversion.
138
+ axes_order: The order of axes in the segmentation (e.g., 'zyx' or 'yx').
125
139
 
140
+ Returns:
141
+ A list of Roi objects, one for each unique label in the segmentation.
126
142
  """
127
143
  if segmentation.ndim not in [2, 3, 4]:
128
144
  raise NgioValueError("Only 2D, 3D, and 4D segmentations are supported.")
129
145
 
146
+ if len(axes_order) != segmentation.ndim:
147
+ raise NgioValueError(
148
+ "The length of axes_order must match the number of dimensions "
149
+ "of the segmentation."
150
+ )
151
+
130
152
  if isinstance(segmentation, da.Array):
131
153
  slices = lazy_compute_slices(segmentation)
132
154
  else:
@@ -134,53 +156,11 @@ def compute_masking_roi(
134
156
 
135
157
  rois = []
136
158
  for label, slice_ in slices.items():
137
- if len(slice_) == 2:
138
- min_t, max_t = None, None
139
- min_z, max_z = None, None
140
- min_y, min_x = slice_[0].start, slice_[1].start
141
- max_y, max_x = slice_[0].stop, slice_[1].stop
142
- elif len(slice_) == 3:
143
- min_t, max_t = None, None
144
- min_z, min_y, min_x = slice_[0].start, slice_[1].start, slice_[2].start
145
- max_z, max_y, max_x = slice_[0].stop, slice_[1].stop, slice_[2].stop
146
- elif len(slice_) == 4:
147
- min_t, min_z, min_y, min_x = (
148
- slice_[0].start,
149
- slice_[1].start,
150
- slice_[2].start,
151
- slice_[3].start,
152
- )
153
- max_t, max_z, max_y, max_x = (
154
- slice_[0].stop,
155
- slice_[1].stop,
156
- slice_[2].stop,
157
- slice_[3].stop,
158
- )
159
- else:
160
- raise ValueError("Invalid slice length.")
161
-
162
- if max_t is None:
163
- t_length = None
164
- else:
165
- t_length = max_t - min_t
166
-
167
- if max_z is None:
168
- z_length = None
169
- else:
170
- z_length = max_z - min_z
171
-
172
- roi = RoiPixels(
173
- name=str(label),
174
- x_length=max_x - min_x,
175
- y_length=max_y - min_y,
176
- z_length=z_length,
177
- t_length=t_length,
178
- x=min_x,
179
- y=min_y,
180
- z=min_z,
181
- label=label,
159
+ assert len(slice_) == len(axes_order)
160
+ slices = dict(zip(axes_order, slice_, strict=True))
161
+ roi = Roi.from_values(
162
+ name=str(label), slices=slices, label=label, space="pixel"
182
163
  )
183
-
184
- roi = roi.to_roi(pixel_size)
164
+ roi = roi.to_world(pixel_size=pixel_size)
185
165
  rois.append(roi)
186
166
  return rois
ngio/common/_pyramid.py CHANGED
@@ -1,12 +1,12 @@
1
+ import itertools
1
2
  import math
2
- from collections.abc import Callable, Sequence
3
- from typing import Literal
3
+ from collections.abc import Callable, Mapping, Sequence
4
+ from typing import Any, Literal
4
5
 
5
- import dask
6
6
  import dask.array as da
7
7
  import numpy as np
8
8
  import zarr
9
- from zarr.types import DIMENSION_SEPARATOR
9
+ from pydantic import BaseModel, ConfigDict, model_validator
10
10
 
11
11
  from ngio.common._zoom import (
12
12
  InterpolationOrder,
@@ -15,10 +15,7 @@ from ngio.common._zoom import (
15
15
  numpy_zoom,
16
16
  )
17
17
  from ngio.utils import (
18
- AccessModeLiteral,
19
18
  NgioValueError,
20
- StoreOrGroup,
21
- open_group_wrapper,
22
19
  )
23
20
 
24
21
 
@@ -27,7 +24,10 @@ def _on_disk_numpy_zoom(
27
24
  target: zarr.Array,
28
25
  order: InterpolationOrder,
29
26
  ) -> None:
30
- target[...] = numpy_zoom(source[...], target_shape=target.shape, order=order)
27
+ source_array = source[...]
28
+ if not isinstance(source_array, np.ndarray):
29
+ raise NgioValueError("source zarr array could not be read as a numpy array")
30
+ target[...] = numpy_zoom(source_array, target_shape=target.shape, order=order)
31
31
 
32
32
 
33
33
  def _on_disk_dask_zoom(
@@ -37,18 +37,20 @@ def _on_disk_dask_zoom(
37
37
  ) -> None:
38
38
  source_array = da.from_zarr(source)
39
39
  target_array = dask_zoom(source_array, target_shape=target.shape, order=order)
40
+
40
41
  # This is a potential fix for Dask 2025.11
42
+ # import dask.config
41
43
  # chunk_size_bytes = np.prod(target.chunks) * target_array.dtype.itemsize
42
- # current_chunk_size = dask.config.get("array.chunk-size", 0)
43
- #
44
- #if current_chunk_size < chunk_size_bytes:
45
- # # Increase the chunk size to avoid dask potentially creating
46
- # # corrupted chunks when writing chunks that are not multiple of the
47
- # # target chunk size
48
- # dask.config.set({"array.chunk-size": f"{chunk_size_bytes}B"})
44
+ # current_chunk_size = dask.config.get("array.chunk-size")
45
+ # Increase the chunk size to avoid dask potentially creating
46
+ # corrupted chunks when writing chunks that are not multiple of the
47
+ # target chunk size
48
+ # dask.config.set({"array.chunk-size": f"{chunk_size_bytes}B"})
49
49
  target_array = target_array.rechunk(target.chunks)
50
50
  target_array = target_array.compute_chunk_sizes()
51
51
  target_array.to_zarr(target)
52
+ # Restore previous chunk size
53
+ # dask.config.set({"array.chunk-size": current_chunk_size})
52
54
 
53
55
 
54
56
  def _on_disk_coarsen(
@@ -92,16 +94,7 @@ def _on_disk_coarsen(
92
94
 
93
95
  coarsening_setup = {}
94
96
  for i, s in enumerate(_scale):
95
- factor = 1 / s
96
- # This check is very strict, but it is necessary to avoid
97
- # a few pixels shift in the coarsening
98
- # We could add a tolerance
99
- if factor.is_integer():
100
- coarsening_setup[i] = int(factor)
101
- else:
102
- raise NgioValueError(
103
- f"Coarsening factor must be an integer, got {factor} on axis {i}"
104
- )
97
+ coarsening_setup[i] = int(np.round(1 / s))
105
98
 
106
99
  out_target = da.coarsen(
107
100
  aggregation_function, source_array, coarsening_setup, trim_excess=True
@@ -194,67 +187,321 @@ def consolidate_pyramid(
194
187
  processed.append(target_image)
195
188
 
196
189
 
197
- def _maybe_int(value: float | int) -> float | int:
198
- """Convert a float to an int if it is an integer."""
199
- if isinstance(value, int):
200
- return value
201
- if value.is_integer():
202
- return int(value)
203
- return value
204
-
205
-
206
- def init_empty_pyramid(
207
- store: StoreOrGroup,
208
- paths: list[str],
209
- ref_shape: Sequence[int],
210
- scaling_factors: Sequence[float],
211
- chunks: Sequence[int] | None = None,
212
- dtype: str = "uint16",
213
- mode: AccessModeLiteral = "a",
214
- dimension_separator: DIMENSION_SEPARATOR = "/",
215
- compressor="default",
216
- ) -> None:
217
- # Return the an Image object
218
- if chunks is not None and len(chunks) != len(ref_shape):
219
- raise NgioValueError(
220
- "The shape and chunks must have the same number of dimensions."
221
- )
190
+ ################################################
191
+ #
192
+ # Builders for image pyramids
193
+ #
194
+ ################################################
195
+
196
+ ChunksLike = tuple[int, ...] | Literal["auto"]
197
+ ShardsLike = tuple[int, ...] | Literal["auto"]
198
+
222
199
 
223
- if chunks is not None:
224
- chunks = [min(c, s) for c, s in zip(chunks, ref_shape, strict=True)]
200
+ def compute_shapes_from_scaling_factors(
201
+ base_shape: tuple[int, ...],
202
+ scaling_factors: tuple[float, ...],
203
+ num_levels: int,
204
+ ) -> list[tuple[int, ...]]:
205
+ """Compute the shapes of each level in the pyramid from scaling factors.
206
+
207
+ Args:
208
+ base_shape (tuple[int, ...]): The shape of the base level.
209
+ scaling_factors (tuple[float, ...]): The scaling factors between levels.
210
+ num_levels (int): The number of levels in the pyramid.
225
211
 
226
- if len(ref_shape) != len(scaling_factors):
227
- raise NgioValueError(
228
- "The shape and scaling factor must have the same number of dimensions."
212
+ Returns:
213
+ list[tuple[int, ...]]: The shapes of each level in the pyramid.
214
+ """
215
+ shapes = []
216
+ current_shape = base_shape
217
+ for _ in range(num_levels):
218
+ shapes.append(current_shape)
219
+ current_shape = tuple(
220
+ max(1, math.floor(s / f))
221
+ for s, f in zip(current_shape, scaling_factors, strict=True)
229
222
  )
223
+ return shapes
224
+
225
+
226
+ def _check_order(shapes: Sequence[tuple[int, ...]]):
227
+ """Check if the shapes are in decreasing order."""
228
+ num_pixels = [np.prod(shape) for shape in shapes]
229
+ for i in range(1, len(num_pixels)):
230
+ if num_pixels[i] >= num_pixels[i - 1]:
231
+ raise NgioValueError("Shapes are not in decreasing order.")
230
232
 
231
- # Ensure scaling factors are int if possible
232
- # To reduce the risk of floating point issues
233
- scaling_factors = [_maybe_int(s) for s in scaling_factors]
234
233
 
235
- root_group = open_group_wrapper(store, mode=mode)
234
+ class PyramidLevel(BaseModel):
235
+ path: str
236
+ shape: tuple[int, ...]
237
+ scale: tuple[float, ...]
238
+ translation: tuple[float, ...]
239
+ chunks: ChunksLike = "auto"
240
+ shards: ShardsLike | None = None
236
241
 
237
- for path in paths:
238
- if any(s < 1 for s in ref_shape):
242
+ @model_validator(mode="after")
243
+ def _model_validation(self) -> "PyramidLevel":
244
+ # Same length as shape
245
+ if len(self.scale) != len(self.shape):
239
246
  raise NgioValueError(
240
- "Level shape must be at least 1 on all dimensions. "
241
- f"Calculated shape: {ref_shape} at level {path}."
247
+ "Scale must have the same length as shape "
248
+ f"({len(self.shape)}), got {len(self.scale)}"
242
249
  )
243
- new_arr = root_group.zeros(
244
- name=path,
245
- shape=ref_shape,
246
- dtype=dtype,
250
+ if any(isinstance(s, float) and s < 0 for s in self.scale):
251
+ raise NgioValueError("Scale values must be positive.")
252
+
253
+ if len(self.translation) != len(self.shape):
254
+ raise NgioValueError(
255
+ "Translation must have the same length as shape "
256
+ f"({len(self.shape)}), got {len(self.translation)}"
257
+ )
258
+
259
+ if isinstance(self.chunks, tuple):
260
+ if len(self.chunks) != len(self.shape):
261
+ raise NgioValueError(
262
+ "Chunks must have the same length as shape "
263
+ f"({len(self.shape)}), got {len(self.chunks)}"
264
+ )
265
+ normalized_chunks = []
266
+ for dim_size, chunk_size in zip(self.shape, self.chunks, strict=True):
267
+ normalized_chunks.append(min(dim_size, chunk_size))
268
+ self.chunks = tuple(normalized_chunks)
269
+
270
+ if isinstance(self.shards, tuple):
271
+ if len(self.shards) != len(self.shape):
272
+ raise NgioValueError(
273
+ "Shards must have the same length as shape "
274
+ f"({len(self.shape)}), got {len(self.shards)}"
275
+ )
276
+ normalized_shards = []
277
+ for dim_size, shard_size in zip(self.shape, self.shards, strict=True):
278
+ normalized_shards.append(min(dim_size, shard_size))
279
+ self.shards = tuple(normalized_shards)
280
+ return self
281
+
282
+
283
+ def compute_scales_from_shapes(
284
+ shapes: Sequence[tuple[int, ...]],
285
+ base_scale: tuple[float, ...],
286
+ ) -> list[tuple[float, ...]]:
287
+ scales = [base_scale]
288
+ scale_ = base_scale
289
+ for current_shape, next_shape in itertools.pairwise(shapes):
290
+ # This only works for downsampling pyramids
291
+ # The _check_order function (called before) ensures that the
292
+ # shapes are decreasing
293
+ _scaling_factor = tuple(
294
+ s1 / s2
295
+ for s1, s2 in zip(
296
+ current_shape,
297
+ next_shape,
298
+ strict=True,
299
+ )
300
+ )
301
+ scale_ = tuple(s * f for s, f in zip(scale_, _scaling_factor, strict=True))
302
+ scales.append(scale_)
303
+ return scales
304
+
305
+
306
+ def _compute_translations_from_shapes(
307
+ scales: Sequence[tuple[float, ...]],
308
+ base_translation: Sequence[float] | None,
309
+ ) -> list[tuple[float, ...]]:
310
+ translations = []
311
+ if base_translation is None:
312
+ n_dim = len(scales[0])
313
+ base_translation = tuple(0.0 for _ in range(n_dim))
314
+ else:
315
+ base_translation = tuple(base_translation)
316
+
317
+ translation_ = base_translation
318
+ for _ in scales:
319
+ # TBD: How to update translation
320
+ # For now, we keep it constant but we should probably change it
321
+ # to reflect the shift introduced by downsampling
322
+ # translation_ = translation_ + _scaling_factor
323
+ translations.append(translation_)
324
+ return translations
325
+
326
+
327
+ def _compute_scales_from_factors(
328
+ base_scale: tuple[float, ...], scaling_factors: tuple[float, ...], num_levels: int
329
+ ) -> list[tuple[float, ...]]:
330
+ precision_scales = []
331
+ current_scale = base_scale
332
+ for _ in range(num_levels):
333
+ precision_scales.append(current_scale)
334
+ current_scale = tuple(
335
+ s * f for s, f in zip(current_scale, scaling_factors, strict=True)
336
+ )
337
+ return precision_scales
338
+
339
+
340
+ class ImagePyramidBuilder(BaseModel):
341
+ levels: list[PyramidLevel]
342
+ axes: tuple[str, ...]
343
+ data_type: str = "uint16"
344
+ dimension_separator: Literal[".", "/"] = "/"
345
+ compressors: Any = "auto"
346
+ zarr_format: Literal[2, 3] = 2
347
+ other_array_kwargs: Mapping[str, Any] = {}
348
+
349
+ model_config = ConfigDict(arbitrary_types_allowed=True)
350
+
351
+ @classmethod
352
+ def from_scaling_factors(
353
+ cls,
354
+ levels_paths: tuple[str, ...],
355
+ scaling_factors: tuple[float, ...],
356
+ base_shape: tuple[int, ...],
357
+ base_scale: tuple[float, ...],
358
+ axes: tuple[str, ...],
359
+ base_translation: Sequence[float] | None = None,
360
+ chunks: ChunksLike = "auto",
361
+ shards: ShardsLike | None = None,
362
+ data_type: str = "uint16",
363
+ dimension_separator: Literal[".", "/"] = "/",
364
+ compressors: Any = "auto",
365
+ zarr_format: Literal[2, 3] = 2,
366
+ other_array_kwargs: Mapping[str, Any] | None = None,
367
+ precision_scale: bool = True,
368
+ ) -> "ImagePyramidBuilder":
369
+ # Since shapes needs to be rounded to integers, we compute them here
370
+ # and then pass them to from_shapes
371
+ # This ensures that the shapes and scaling factors are consistent
372
+ # and avoids accumulation of rounding errors
373
+ shapes = compute_shapes_from_scaling_factors(
374
+ base_shape=base_shape,
375
+ scaling_factors=scaling_factors,
376
+ num_levels=len(levels_paths),
377
+ )
378
+
379
+ if precision_scale:
380
+ # Compute precise scales from shapes
381
+ # Since shapes are rounded to integers, the scaling factors
382
+ # may not be exactly the same as the input scaling factors
383
+ # Thus, we compute the scales from the shapes to ensure consistency
384
+ base_scale_ = compute_scales_from_shapes(
385
+ shapes=shapes,
386
+ base_scale=base_scale,
387
+ )
388
+ else:
389
+ base_scale_ = _compute_scales_from_factors(
390
+ base_scale=base_scale,
391
+ scaling_factors=scaling_factors,
392
+ num_levels=len(levels_paths),
393
+ )
394
+
395
+ return cls.from_shapes(
396
+ shapes=shapes,
397
+ base_scale=base_scale_,
398
+ axes=axes,
399
+ base_translation=base_translation,
400
+ levels_paths=levels_paths,
247
401
  chunks=chunks,
402
+ shards=shards,
403
+ data_type=data_type,
248
404
  dimension_separator=dimension_separator,
249
- overwrite=True,
250
- compressor=compressor,
405
+ compressors=compressors,
406
+ zarr_format=zarr_format,
407
+ other_array_kwargs=other_array_kwargs,
251
408
  )
252
409
 
253
- ref_shape = [
254
- math.floor(s / sc) for s, sc in zip(ref_shape, scaling_factors, strict=True)
255
- ]
256
- chunks = tuple(
257
- min(c, s) for c, s in zip(new_arr.chunks, ref_shape, strict=True)
410
+ @classmethod
411
+ def from_shapes(
412
+ cls,
413
+ shapes: Sequence[tuple[int, ...]],
414
+ base_scale: tuple[float, ...] | list[tuple[float, ...]],
415
+ axes: tuple[str, ...],
416
+ base_translation: Sequence[float] | None = None,
417
+ levels_paths: Sequence[str] | None = None,
418
+ chunks: ChunksLike = "auto",
419
+ shards: ShardsLike | None = None,
420
+ data_type: str = "uint16",
421
+ dimension_separator: Literal[".", "/"] = "/",
422
+ compressors: Any = "auto",
423
+ zarr_format: Literal[2, 3] = 2,
424
+ other_array_kwargs: Mapping[str, Any] | None = None,
425
+ ) -> "ImagePyramidBuilder":
426
+ levels = []
427
+ if levels_paths is None:
428
+ levels_paths = tuple(str(i) for i in range(len(shapes)))
429
+
430
+ _check_order(shapes)
431
+ if isinstance(base_scale, tuple) and all(
432
+ isinstance(s, float) for s in base_scale
433
+ ):
434
+ scales = compute_scales_from_shapes(shapes, base_scale)
435
+ elif isinstance(base_scale, list):
436
+ scales = base_scale
437
+ if len(scales) != len(shapes):
438
+ raise NgioValueError(
439
+ "Scales must have the same length as shapes "
440
+ f"({len(shapes)}), got {len(scales)}"
441
+ )
442
+ else:
443
+ raise NgioValueError(
444
+ "base_scale must be either a tuple of floats or a list of tuples "
445
+ " of floats."
446
+ )
447
+
448
+ translations = _compute_translations_from_shapes(scales, base_translation)
449
+ for level_path, shape, scale, translation in zip(
450
+ levels_paths,
451
+ shapes,
452
+ scales,
453
+ translations,
454
+ strict=True,
455
+ ):
456
+ level = PyramidLevel(
457
+ path=level_path,
458
+ shape=shape,
459
+ scale=scale,
460
+ translation=translation,
461
+ chunks=chunks,
462
+ shards=shards,
463
+ )
464
+ levels.append(level)
465
+ other_array_kwargs = other_array_kwargs or {}
466
+ return cls(
467
+ levels=levels,
468
+ axes=axes,
469
+ data_type=data_type,
470
+ dimension_separator=dimension_separator,
471
+ compressors=compressors,
472
+ zarr_format=zarr_format,
473
+ other_array_kwargs=other_array_kwargs,
258
474
  )
259
475
 
260
- return None
476
+ def to_zarr(self, group: zarr.Group) -> None:
477
+ """Save the pyramid specification to a Zarr group.
478
+
479
+ Args:
480
+ group (zarr.Group): The Zarr group to save the pyramid specification to.
481
+ """
482
+ array_static_kwargs = {
483
+ "dtype": self.data_type,
484
+ "overwrite": True,
485
+ "compressors": self.compressors,
486
+ **self.other_array_kwargs,
487
+ }
488
+
489
+ if self.zarr_format == 2:
490
+ array_static_kwargs["chunk_key_encoding"] = {
491
+ "name": "v2",
492
+ "separator": self.dimension_separator,
493
+ }
494
+ else:
495
+ array_static_kwargs["chunk_key_encoding"] = {
496
+ "name": "default",
497
+ "separator": self.dimension_separator,
498
+ }
499
+ array_static_kwargs["dimension_names"] = self.axes
500
+ for p_level in self.levels:
501
+ group.create_array(
502
+ name=p_level.path,
503
+ shape=tuple(p_level.shape),
504
+ chunks=p_level.chunks,
505
+ shards=p_level.shards,
506
+ **array_static_kwargs,
507
+ )