ngio 0.5.0__py3-none-any.whl → 0.5.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. ngio/__init__.py +2 -5
  2. ngio/common/__init__.py +6 -11
  3. ngio/common/_masking_roi.py +54 -34
  4. ngio/common/_pyramid.py +87 -321
  5. ngio/common/_roi.py +330 -258
  6. ngio/experimental/iterators/_feature.py +3 -3
  7. ngio/experimental/iterators/_rois_utils.py +11 -10
  8. ngio/hcs/_plate.py +136 -192
  9. ngio/images/_abstract_image.py +35 -539
  10. ngio/images/_create.py +283 -0
  11. ngio/images/_create_synt_container.py +43 -40
  12. ngio/images/_image.py +251 -517
  13. ngio/images/_label.py +172 -249
  14. ngio/images/_masked_image.py +2 -2
  15. ngio/images/_ome_zarr_container.py +241 -644
  16. ngio/io_pipes/_io_pipes.py +9 -9
  17. ngio/io_pipes/_io_pipes_masked.py +7 -7
  18. ngio/io_pipes/_io_pipes_roi.py +6 -6
  19. ngio/io_pipes/_io_pipes_types.py +3 -3
  20. ngio/io_pipes/_match_shape.py +8 -6
  21. ngio/io_pipes/_ops_slices_utils.py +5 -8
  22. ngio/ome_zarr_meta/__init__.py +18 -29
  23. ngio/ome_zarr_meta/_meta_handlers.py +708 -392
  24. ngio/ome_zarr_meta/ngio_specs/__init__.py +0 -4
  25. ngio/ome_zarr_meta/ngio_specs/_axes.py +51 -152
  26. ngio/ome_zarr_meta/ngio_specs/_dataset.py +22 -13
  27. ngio/ome_zarr_meta/ngio_specs/_ngio_hcs.py +91 -129
  28. ngio/ome_zarr_meta/ngio_specs/_ngio_image.py +68 -57
  29. ngio/ome_zarr_meta/v04/__init__.py +1 -5
  30. ngio/ome_zarr_meta/v04/{_v04_spec.py → _v04_spec_utils.py} +85 -54
  31. ngio/ome_zarr_meta/v05/__init__.py +1 -5
  32. ngio/ome_zarr_meta/v05/{_v05_spec.py → _v05_spec_utils.py} +87 -64
  33. ngio/resources/__init__.py +1 -1
  34. ngio/resources/resource_model.py +1 -1
  35. ngio/tables/_tables_container.py +27 -85
  36. ngio/tables/backends/_anndata.py +8 -58
  37. ngio/tables/backends/_anndata_utils.py +6 -1
  38. ngio/tables/backends/_csv.py +19 -3
  39. ngio/tables/backends/_json.py +13 -10
  40. ngio/tables/backends/_non_zarr_backends.py +196 -0
  41. ngio/tables/backends/_parquet.py +31 -3
  42. ngio/tables/v1/_roi_table.py +27 -44
  43. ngio/utils/__init__.py +12 -8
  44. ngio/utils/_datasets.py +0 -6
  45. ngio/utils/_logger.py +50 -0
  46. ngio/utils/_zarr_utils.py +250 -292
  47. {ngio-0.5.0.dist-info → ngio-0.5.0a1.dist-info}/METADATA +6 -13
  48. ngio-0.5.0a1.dist-info/RECORD +88 -0
  49. {ngio-0.5.0.dist-info → ngio-0.5.0a1.dist-info}/WHEEL +1 -1
  50. ngio/images/_create_utils.py +0 -406
  51. ngio/tables/backends/_py_arrow_backends.py +0 -222
  52. ngio/utils/_cache.py +0 -48
  53. ngio-0.5.0.dist-info/RECORD +0 -88
  54. {ngio-0.5.0.dist-info → ngio-0.5.0a1.dist-info}/licenses/LICENSE +0 -0
ngio/__init__.py CHANGED
@@ -9,7 +9,7 @@ except PackageNotFoundError: # pragma: no cover
9
9
  __author__ = "Lorenzo Cerrone"
10
10
  __email__ = "lorenzo.cerrone@uzh.ch"
11
11
 
12
- from ngio.common import Dimensions, Roi, RoiSlice
12
+ from ngio.common import Dimensions, Roi, RoiPixels
13
13
  from ngio.hcs import (
14
14
  OmeZarrPlate,
15
15
  OmeZarrWell,
@@ -37,7 +37,6 @@ from ngio.ome_zarr_meta.ngio_specs import (
37
37
  NgffVersions,
38
38
  PixelSize,
39
39
  )
40
- from ngio.utils import NgioSupportedStore, StoreOrGroup
41
40
 
42
41
  __all__ = [
43
42
  "AxesSetup",
@@ -48,14 +47,12 @@ __all__ = [
48
47
  "ImageInWellPath",
49
48
  "Label",
50
49
  "NgffVersions",
51
- "NgioSupportedStore",
52
50
  "OmeZarrContainer",
53
51
  "OmeZarrPlate",
54
52
  "OmeZarrWell",
55
53
  "PixelSize",
56
54
  "Roi",
57
- "RoiSlice",
58
- "StoreOrGroup",
55
+ "RoiPixels",
59
56
  "create_empty_ome_zarr",
60
57
  "create_empty_plate",
61
58
  "create_empty_well",
ngio/common/__init__.py CHANGED
@@ -2,27 +2,22 @@
2
2
 
3
3
  from ngio.common._dimensions import Dimensions
4
4
  from ngio.common._masking_roi import compute_masking_roi
5
- from ngio.common._pyramid import (
6
- ChunksLike,
7
- ImagePyramidBuilder,
8
- ShardsLike,
9
- consolidate_pyramid,
10
- on_disk_zoom,
5
+ from ngio.common._pyramid import consolidate_pyramid, init_empty_pyramid, on_disk_zoom
6
+ from ngio.common._roi import (
7
+ Roi,
8
+ RoiPixels,
11
9
  )
12
- from ngio.common._roi import Roi, RoiSlice
13
10
  from ngio.common._zoom import InterpolationOrder, dask_zoom, numpy_zoom
14
11
 
15
12
  __all__ = [
16
- "ChunksLike",
17
13
  "Dimensions",
18
- "ImagePyramidBuilder",
19
14
  "InterpolationOrder",
20
15
  "Roi",
21
- "RoiSlice",
22
- "ShardsLike",
16
+ "RoiPixels",
23
17
  "compute_masking_roi",
24
18
  "consolidate_pyramid",
25
19
  "dask_zoom",
20
+ "init_empty_pyramid",
26
21
  "numpy_zoom",
27
22
  "on_disk_zoom",
28
23
  ]
@@ -1,14 +1,13 @@
1
1
  """Utilities to build masking regions of interest (ROIs)."""
2
2
 
3
3
  import itertools
4
- from collections.abc import Sequence
5
4
 
6
5
  import dask.array as da
7
6
  import numpy as np
8
7
  import scipy.ndimage as ndi
9
8
  from dask.delayed import delayed
10
9
 
11
- from ngio.common._roi import Roi
10
+ from ngio.common._roi import Roi, RoiPixels
12
11
  from ngio.ome_zarr_meta import PixelSize
13
12
  from ngio.utils import NgioValueError
14
13
 
@@ -99,14 +98,7 @@ def compute_slices(segmentation: np.ndarray) -> dict[int, tuple[slice, ...]]:
99
98
 
100
99
 
101
100
  def lazy_compute_slices(segmentation: da.Array) -> dict[int, tuple[slice, ...]]:
102
- """Compute slices for each label in a segmentation using lazy evaluation.
103
-
104
- Args:
105
- segmentation: The dask segmentation array.
106
-
107
- Returns:
108
- A dictionary mapping label IDs to their bounding box slices.
109
- """
101
+ """Compute slices for each label in a segmentation."""
110
102
  global_offsets = _compute_offsets(segmentation.chunks)
111
103
  delayed_chunks = segmentation.to_delayed() # type: ignore
112
104
 
@@ -123,32 +115,18 @@ def lazy_compute_slices(segmentation: da.Array) -> dict[int, tuple[slice, ...]]:
123
115
 
124
116
 
125
117
  def compute_masking_roi(
126
- segmentation: np.ndarray | da.Array,
127
- pixel_size: PixelSize,
128
- axes_order: Sequence[str],
118
+ segmentation: np.ndarray | da.Array, pixel_size: PixelSize
129
119
  ) -> list[Roi]:
130
- """Compute ROIs for each label in a segmentation.
131
-
132
- This function expects a 2D, 3D, or 4D segmentation array.
133
- The axes order should match the segmentation dimensions.
120
+ """Compute a ROIs for each label in a segmentation.
134
121
 
135
- Args:
136
- segmentation: The segmentation array (2D, 3D, or 4D).
137
- pixel_size: The pixel size metadata for coordinate conversion.
138
- axes_order: The order of axes in the segmentation (e.g., 'zyx' or 'yx').
122
+ This function expects a 2D or 3D segmentation array.
123
+ And this function expects the axes order to be 'zyx' or 'yx'.
124
+ Other axes orders are not supported.
139
125
 
140
- Returns:
141
- A list of Roi objects, one for each unique label in the segmentation.
142
126
  """
143
127
  if segmentation.ndim not in [2, 3, 4]:
144
128
  raise NgioValueError("Only 2D, 3D, and 4D segmentations are supported.")
145
129
 
146
- if len(axes_order) != segmentation.ndim:
147
- raise NgioValueError(
148
- "The length of axes_order must match the number of dimensions "
149
- "of the segmentation."
150
- )
151
-
152
130
  if isinstance(segmentation, da.Array):
153
131
  slices = lazy_compute_slices(segmentation)
154
132
  else:
@@ -156,11 +134,53 @@ def compute_masking_roi(
156
134
 
157
135
  rois = []
158
136
  for label, slice_ in slices.items():
159
- assert len(slice_) == len(axes_order)
160
- slices = dict(zip(axes_order, slice_, strict=True))
161
- roi = Roi.from_values(
162
- name=str(label), slices=slices, label=label, space="pixel"
137
+ if len(slice_) == 2:
138
+ min_t, max_t = None, None
139
+ min_z, max_z = None, None
140
+ min_y, min_x = slice_[0].start, slice_[1].start
141
+ max_y, max_x = slice_[0].stop, slice_[1].stop
142
+ elif len(slice_) == 3:
143
+ min_t, max_t = None, None
144
+ min_z, min_y, min_x = slice_[0].start, slice_[1].start, slice_[2].start
145
+ max_z, max_y, max_x = slice_[0].stop, slice_[1].stop, slice_[2].stop
146
+ elif len(slice_) == 4:
147
+ min_t, min_z, min_y, min_x = (
148
+ slice_[0].start,
149
+ slice_[1].start,
150
+ slice_[2].start,
151
+ slice_[3].start,
152
+ )
153
+ max_t, max_z, max_y, max_x = (
154
+ slice_[0].stop,
155
+ slice_[1].stop,
156
+ slice_[2].stop,
157
+ slice_[3].stop,
158
+ )
159
+ else:
160
+ raise ValueError("Invalid slice length.")
161
+
162
+ if max_t is None:
163
+ t_length = None
164
+ else:
165
+ t_length = max_t - min_t
166
+
167
+ if max_z is None:
168
+ z_length = None
169
+ else:
170
+ z_length = max_z - min_z
171
+
172
+ roi = RoiPixels(
173
+ name=str(label),
174
+ x_length=max_x - min_x,
175
+ y_length=max_y - min_y,
176
+ z_length=z_length,
177
+ t_length=t_length,
178
+ x=min_x,
179
+ y=min_y,
180
+ z=min_z,
181
+ label=label,
163
182
  )
164
- roi = roi.to_world(pixel_size=pixel_size)
183
+
184
+ roi = roi.to_roi(pixel_size)
165
185
  rois.append(roi)
166
186
  return rois
ngio/common/_pyramid.py CHANGED
@@ -1,13 +1,13 @@
1
- import itertools
2
1
  import math
3
- from collections.abc import Callable, Mapping, Sequence
4
- from typing import Any, Literal
2
+ from collections.abc import Callable, Sequence
3
+ from typing import Literal
5
4
 
6
5
  import dask.array as da
7
6
  import numpy as np
8
7
  import zarr
9
- from pydantic import BaseModel, ConfigDict, model_validator
8
+ from zarr.core.array import CompressorLike
10
9
 
10
+ # from zarr.types import DIMENSION_SEPARATOR
11
11
  from ngio.common._zoom import (
12
12
  InterpolationOrder,
13
13
  _zoom_inputs_check,
@@ -15,7 +15,10 @@ from ngio.common._zoom import (
15
15
  numpy_zoom,
16
16
  )
17
17
  from ngio.utils import (
18
+ AccessModeLiteral,
18
19
  NgioValueError,
20
+ StoreOrGroup,
21
+ open_group_wrapper,
19
22
  )
20
23
 
21
24
 
@@ -38,19 +41,9 @@ def _on_disk_dask_zoom(
38
41
  source_array = da.from_zarr(source)
39
42
  target_array = dask_zoom(source_array, target_shape=target.shape, order=order)
40
43
 
41
- # This is a potential fix for Dask 2025.11
42
- # import dask.config
43
- # chunk_size_bytes = np.prod(target.chunks) * target_array.dtype.itemsize
44
- # current_chunk_size = dask.config.get("array.chunk-size")
45
- # Increase the chunk size to avoid dask potentially creating
46
- # corrupted chunks when writing chunks that are not multiple of the
47
- # target chunk size
48
- # dask.config.set({"array.chunk-size": f"{chunk_size_bytes}B"})
49
- target_array = target_array.rechunk(target.chunks)
50
- target_array = target_array.compute_chunk_sizes()
44
+ target_array = target_array.rechunk(target.chunks) # type: ignore
45
+ target_array.compute_chunk_sizes()
51
46
  target_array.to_zarr(target)
52
- # Restore previous chunk size
53
- # dask.config.set({"array.chunk-size": current_chunk_size})
54
47
 
55
48
 
56
49
  def _on_disk_coarsen(
@@ -94,7 +87,16 @@ def _on_disk_coarsen(
94
87
 
95
88
  coarsening_setup = {}
96
89
  for i, s in enumerate(_scale):
97
- coarsening_setup[i] = int(np.round(1 / s))
90
+ factor = 1 / s
91
+ # This check is very strict, but it is necessary to avoid
92
+ # a few pixels shift in the coarsening
93
+ # We could add a tolerance
94
+ if factor.is_integer():
95
+ coarsening_setup[i] = int(factor)
96
+ else:
97
+ raise NgioValueError(
98
+ f"Coarsening factor must be an integer, got {factor} on axis {i}"
99
+ )
98
100
 
99
101
  out_target = da.coarsen(
100
102
  aggregation_function, source_array, coarsening_setup, trim_excess=True
@@ -187,321 +189,85 @@ def consolidate_pyramid(
187
189
  processed.append(target_image)
188
190
 
189
191
 
190
- ################################################
191
- #
192
- # Builders for image pyramids
193
- #
194
- ################################################
195
-
196
- ChunksLike = tuple[int, ...] | Literal["auto"]
197
- ShardsLike = tuple[int, ...] | Literal["auto"]
198
-
199
-
200
- def compute_shapes_from_scaling_factors(
201
- base_shape: tuple[int, ...],
202
- scaling_factors: tuple[float, ...],
203
- num_levels: int,
204
- ) -> list[tuple[int, ...]]:
205
- """Compute the shapes of each level in the pyramid from scaling factors.
206
-
207
- Args:
208
- base_shape (tuple[int, ...]): The shape of the base level.
209
- scaling_factors (tuple[float, ...]): The scaling factors between levels.
210
- num_levels (int): The number of levels in the pyramid.
211
-
212
- Returns:
213
- list[tuple[int, ...]]: The shapes of each level in the pyramid.
214
- """
215
- shapes = []
216
- current_shape = base_shape
217
- for _ in range(num_levels):
218
- shapes.append(current_shape)
219
- current_shape = tuple(
220
- max(1, math.floor(s / f))
221
- for s, f in zip(current_shape, scaling_factors, strict=True)
192
+ def _maybe_int(value: float | int) -> float | int:
193
+ """Convert a float to an int if it is an integer."""
194
+ if isinstance(value, int):
195
+ return value
196
+ if value.is_integer():
197
+ return int(value)
198
+ return value
199
+
200
+
201
+ def init_empty_pyramid(
202
+ store: StoreOrGroup,
203
+ paths: list[str],
204
+ ref_shape: Sequence[int],
205
+ scaling_factors: Sequence[float],
206
+ axes: Sequence[str],
207
+ chunks: Sequence[int] | Literal["auto"] = "auto",
208
+ dtype: str = "uint16",
209
+ mode: AccessModeLiteral = "a",
210
+ dimension_separator: Literal[".", "/"] = "/",
211
+ compressors: CompressorLike = "auto",
212
+ zarr_format: Literal[2, 3] = 2,
213
+ ) -> None:
214
+ # Return the an Image object
215
+ if chunks != "auto" and len(chunks) != len(ref_shape):
216
+ raise NgioValueError(
217
+ "The shape and chunks must have the same number of dimensions."
222
218
  )
223
- return shapes
224
219
 
220
+ if chunks != "auto":
221
+ chunks = tuple(min(c, s) for c, s in zip(chunks, ref_shape, strict=True))
222
+ else:
223
+ chunks = "auto"
225
224
 
226
- def _check_order(shapes: Sequence[tuple[int, ...]]):
227
- """Check if the shapes are in decreasing order."""
228
- num_pixels = [np.prod(shape) for shape in shapes]
229
- for i in range(1, len(num_pixels)):
230
- if num_pixels[i] >= num_pixels[i - 1]:
231
- raise NgioValueError("Shapes are not in decreasing order.")
232
-
225
+ if len(ref_shape) != len(scaling_factors):
226
+ raise NgioValueError(
227
+ "The shape and scaling factor must have the same number of dimensions."
228
+ )
233
229
 
234
- class PyramidLevel(BaseModel):
235
- path: str
236
- shape: tuple[int, ...]
237
- scale: tuple[float, ...]
238
- translation: tuple[float, ...]
239
- chunks: ChunksLike = "auto"
240
- shards: ShardsLike | None = None
230
+ # Ensure scaling factors are int if possible
231
+ # To reduce the risk of floating point issues
232
+ scaling_factors = [_maybe_int(s) for s in scaling_factors]
241
233
 
242
- @model_validator(mode="after")
243
- def _model_validation(self) -> "PyramidLevel":
244
- # Same length as shape
245
- if len(self.scale) != len(self.shape):
246
- raise NgioValueError(
247
- "Scale must have the same length as shape "
248
- f"({len(self.shape)}), got {len(self.scale)}"
249
- )
250
- if any(isinstance(s, float) and s < 0 for s in self.scale):
251
- raise NgioValueError("Scale values must be positive.")
234
+ root_group = open_group_wrapper(store, mode=mode, zarr_format=zarr_format)
252
235
 
253
- if len(self.translation) != len(self.shape):
254
- raise NgioValueError(
255
- "Translation must have the same length as shape "
256
- f"({len(self.shape)}), got {len(self.translation)}"
257
- )
236
+ array_static_kwargs = {
237
+ "dtype": dtype,
238
+ "overwrite": True,
239
+ "compressors": compressors,
240
+ }
258
241
 
259
- if isinstance(self.chunks, tuple):
260
- if len(self.chunks) != len(self.shape):
261
- raise NgioValueError(
262
- "Chunks must have the same length as shape "
263
- f"({len(self.shape)}), got {len(self.chunks)}"
264
- )
265
- normalized_chunks = []
266
- for dim_size, chunk_size in zip(self.shape, self.chunks, strict=True):
267
- normalized_chunks.append(min(dim_size, chunk_size))
268
- self.chunks = tuple(normalized_chunks)
269
-
270
- if isinstance(self.shards, tuple):
271
- if len(self.shards) != len(self.shape):
272
- raise NgioValueError(
273
- "Shards must have the same length as shape "
274
- f"({len(self.shape)}), got {len(self.shards)}"
275
- )
276
- normalized_shards = []
277
- for dim_size, shard_size in zip(self.shape, self.shards, strict=True):
278
- normalized_shards.append(min(dim_size, shard_size))
279
- self.shards = tuple(normalized_shards)
280
- return self
281
-
282
-
283
- def compute_scales_from_shapes(
284
- shapes: Sequence[tuple[int, ...]],
285
- base_scale: tuple[float, ...],
286
- ) -> list[tuple[float, ...]]:
287
- scales = [base_scale]
288
- scale_ = base_scale
289
- for current_shape, next_shape in itertools.pairwise(shapes):
290
- # This only works for downsampling pyramids
291
- # The _check_order function (called before) ensures that the
292
- # shapes are decreasing
293
- _scaling_factor = tuple(
294
- s1 / s2
295
- for s1, s2 in zip(
296
- current_shape,
297
- next_shape,
298
- strict=True,
299
- )
300
- )
301
- scale_ = tuple(s * f for s, f in zip(scale_, _scaling_factor, strict=True))
302
- scales.append(scale_)
303
- return scales
304
-
305
-
306
- def _compute_translations_from_shapes(
307
- scales: Sequence[tuple[float, ...]],
308
- base_translation: Sequence[float] | None,
309
- ) -> list[tuple[float, ...]]:
310
- translations = []
311
- if base_translation is None:
312
- n_dim = len(scales[0])
313
- base_translation = tuple(0.0 for _ in range(n_dim))
242
+ if zarr_format == 2:
243
+ array_static_kwargs["chunk_key_encoding"] = {
244
+ "name": "v2",
245
+ "separator": dimension_separator,
246
+ }
314
247
  else:
315
- base_translation = tuple(base_translation)
316
-
317
- translation_ = base_translation
318
- for _ in scales:
319
- # TBD: How to update translation
320
- # For now, we keep it constant but we should probably change it
321
- # to reflect the shift introduced by downsampling
322
- # translation_ = translation_ + _scaling_factor
323
- translations.append(translation_)
324
- return translations
325
-
326
-
327
- def _compute_scales_from_factors(
328
- base_scale: tuple[float, ...], scaling_factors: tuple[float, ...], num_levels: int
329
- ) -> list[tuple[float, ...]]:
330
- precision_scales = []
331
- current_scale = base_scale
332
- for _ in range(num_levels):
333
- precision_scales.append(current_scale)
334
- current_scale = tuple(
335
- s * f for s, f in zip(current_scale, scaling_factors, strict=True)
336
- )
337
- return precision_scales
338
-
339
-
340
- class ImagePyramidBuilder(BaseModel):
341
- levels: list[PyramidLevel]
342
- axes: tuple[str, ...]
343
- data_type: str = "uint16"
344
- dimension_separator: Literal[".", "/"] = "/"
345
- compressors: Any = "auto"
346
- zarr_format: Literal[2, 3] = 2
347
- other_array_kwargs: Mapping[str, Any] = {}
348
-
349
- model_config = ConfigDict(arbitrary_types_allowed=True)
350
-
351
- @classmethod
352
- def from_scaling_factors(
353
- cls,
354
- levels_paths: tuple[str, ...],
355
- scaling_factors: tuple[float, ...],
356
- base_shape: tuple[int, ...],
357
- base_scale: tuple[float, ...],
358
- axes: tuple[str, ...],
359
- base_translation: Sequence[float] | None = None,
360
- chunks: ChunksLike = "auto",
361
- shards: ShardsLike | None = None,
362
- data_type: str = "uint16",
363
- dimension_separator: Literal[".", "/"] = "/",
364
- compressors: Any = "auto",
365
- zarr_format: Literal[2, 3] = 2,
366
- other_array_kwargs: Mapping[str, Any] | None = None,
367
- precision_scale: bool = True,
368
- ) -> "ImagePyramidBuilder":
369
- # Since shapes needs to be rounded to integers, we compute them here
370
- # and then pass them to from_shapes
371
- # This ensures that the shapes and scaling factors are consistent
372
- # and avoids accumulation of rounding errors
373
- shapes = compute_shapes_from_scaling_factors(
374
- base_shape=base_shape,
375
- scaling_factors=scaling_factors,
376
- num_levels=len(levels_paths),
377
- )
248
+ array_static_kwargs["chunk_key_encoding"] = {
249
+ "name": "default",
250
+ "separator": dimension_separator,
251
+ }
252
+ array_static_kwargs["dimension_names"] = axes
378
253
 
379
- if precision_scale:
380
- # Compute precise scales from shapes
381
- # Since shapes are rounded to integers, the scaling factors
382
- # may not be exactly the same as the input scaling factors
383
- # Thus, we compute the scales from the shapes to ensure consistency
384
- base_scale_ = compute_scales_from_shapes(
385
- shapes=shapes,
386
- base_scale=base_scale,
387
- )
388
- else:
389
- base_scale_ = _compute_scales_from_factors(
390
- base_scale=base_scale,
391
- scaling_factors=scaling_factors,
392
- num_levels=len(levels_paths),
254
+ for path in paths:
255
+ if any(s < 1 for s in ref_shape):
256
+ raise NgioValueError(
257
+ "Level shape must be at least 1 on all dimensions. "
258
+ f"Calculated shape: {ref_shape} at level {path}."
393
259
  )
394
-
395
- return cls.from_shapes(
396
- shapes=shapes,
397
- base_scale=base_scale_,
398
- axes=axes,
399
- base_translation=base_translation,
400
- levels_paths=levels_paths,
260
+ new_arr = root_group.create_array(
261
+ name=path,
262
+ shape=tuple(ref_shape),
401
263
  chunks=chunks,
402
- shards=shards,
403
- data_type=data_type,
404
- dimension_separator=dimension_separator,
405
- compressors=compressors,
406
- zarr_format=zarr_format,
407
- other_array_kwargs=other_array_kwargs,
264
+ **array_static_kwargs,
408
265
  )
409
266
 
410
- @classmethod
411
- def from_shapes(
412
- cls,
413
- shapes: Sequence[tuple[int, ...]],
414
- base_scale: tuple[float, ...] | list[tuple[float, ...]],
415
- axes: tuple[str, ...],
416
- base_translation: Sequence[float] | None = None,
417
- levels_paths: Sequence[str] | None = None,
418
- chunks: ChunksLike = "auto",
419
- shards: ShardsLike | None = None,
420
- data_type: str = "uint16",
421
- dimension_separator: Literal[".", "/"] = "/",
422
- compressors: Any = "auto",
423
- zarr_format: Literal[2, 3] = 2,
424
- other_array_kwargs: Mapping[str, Any] | None = None,
425
- ) -> "ImagePyramidBuilder":
426
- levels = []
427
- if levels_paths is None:
428
- levels_paths = tuple(str(i) for i in range(len(shapes)))
429
-
430
- _check_order(shapes)
431
- if isinstance(base_scale, tuple) and all(
432
- isinstance(s, float) for s in base_scale
433
- ):
434
- scales = compute_scales_from_shapes(shapes, base_scale)
435
- elif isinstance(base_scale, list):
436
- scales = base_scale
437
- if len(scales) != len(shapes):
438
- raise NgioValueError(
439
- "Scales must have the same length as shapes "
440
- f"({len(shapes)}), got {len(scales)}"
441
- )
442
- else:
443
- raise NgioValueError(
444
- "base_scale must be either a tuple of floats or a list of tuples "
445
- " of floats."
446
- )
447
-
448
- translations = _compute_translations_from_shapes(scales, base_translation)
449
- for level_path, shape, scale, translation in zip(
450
- levels_paths,
451
- shapes,
452
- scales,
453
- translations,
454
- strict=True,
455
- ):
456
- level = PyramidLevel(
457
- path=level_path,
458
- shape=shape,
459
- scale=scale,
460
- translation=translation,
461
- chunks=chunks,
462
- shards=shards,
463
- )
464
- levels.append(level)
465
- other_array_kwargs = other_array_kwargs or {}
466
- return cls(
467
- levels=levels,
468
- axes=axes,
469
- data_type=data_type,
470
- dimension_separator=dimension_separator,
471
- compressors=compressors,
472
- zarr_format=zarr_format,
473
- other_array_kwargs=other_array_kwargs,
267
+ ref_shape = [
268
+ math.floor(s / sc) for s, sc in zip(ref_shape, scaling_factors, strict=True)
269
+ ]
270
+ chunks = tuple(
271
+ min(c, s) for c, s in zip(new_arr.chunks, ref_shape, strict=True)
474
272
  )
475
-
476
- def to_zarr(self, group: zarr.Group) -> None:
477
- """Save the pyramid specification to a Zarr group.
478
-
479
- Args:
480
- group (zarr.Group): The Zarr group to save the pyramid specification to.
481
- """
482
- array_static_kwargs = {
483
- "dtype": self.data_type,
484
- "overwrite": True,
485
- "compressors": self.compressors,
486
- **self.other_array_kwargs,
487
- }
488
-
489
- if self.zarr_format == 2:
490
- array_static_kwargs["chunk_key_encoding"] = {
491
- "name": "v2",
492
- "separator": self.dimension_separator,
493
- }
494
- else:
495
- array_static_kwargs["chunk_key_encoding"] = {
496
- "name": "default",
497
- "separator": self.dimension_separator,
498
- }
499
- array_static_kwargs["dimension_names"] = self.axes
500
- for p_level in self.levels:
501
- group.create_array(
502
- name=p_level.path,
503
- shape=tuple(p_level.shape),
504
- chunks=p_level.chunks,
505
- shards=p_level.shards,
506
- **array_static_kwargs,
507
- )
273
+ return None