ngio 0.2.0a2__py3-none-any.whl → 0.5.0b4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (106) hide show
  1. ngio/__init__.py +40 -12
  2. ngio/common/__init__.py +16 -32
  3. ngio/common/_dimensions.py +270 -48
  4. ngio/common/_masking_roi.py +153 -0
  5. ngio/common/_pyramid.py +267 -73
  6. ngio/common/_roi.py +290 -66
  7. ngio/common/_synt_images_utils.py +101 -0
  8. ngio/common/_zoom.py +54 -22
  9. ngio/experimental/__init__.py +5 -0
  10. ngio/experimental/iterators/__init__.py +15 -0
  11. ngio/experimental/iterators/_abstract_iterator.py +390 -0
  12. ngio/experimental/iterators/_feature.py +189 -0
  13. ngio/experimental/iterators/_image_processing.py +130 -0
  14. ngio/experimental/iterators/_mappers.py +48 -0
  15. ngio/experimental/iterators/_rois_utils.py +126 -0
  16. ngio/experimental/iterators/_segmentation.py +235 -0
  17. ngio/hcs/__init__.py +17 -58
  18. ngio/hcs/_plate.py +1354 -0
  19. ngio/images/__init__.py +30 -9
  20. ngio/images/_abstract_image.py +968 -0
  21. ngio/images/_create_synt_container.py +132 -0
  22. ngio/images/_create_utils.py +423 -0
  23. ngio/images/_image.py +926 -0
  24. ngio/images/_label.py +417 -0
  25. ngio/images/_masked_image.py +531 -0
  26. ngio/images/_ome_zarr_container.py +1235 -0
  27. ngio/images/_table_ops.py +471 -0
  28. ngio/io_pipes/__init__.py +75 -0
  29. ngio/io_pipes/_io_pipes.py +361 -0
  30. ngio/io_pipes/_io_pipes_masked.py +488 -0
  31. ngio/io_pipes/_io_pipes_roi.py +146 -0
  32. ngio/io_pipes/_io_pipes_types.py +56 -0
  33. ngio/io_pipes/_match_shape.py +377 -0
  34. ngio/io_pipes/_ops_axes.py +344 -0
  35. ngio/io_pipes/_ops_slices.py +411 -0
  36. ngio/io_pipes/_ops_slices_utils.py +199 -0
  37. ngio/io_pipes/_ops_transforms.py +104 -0
  38. ngio/io_pipes/_zoom_transform.py +180 -0
  39. ngio/ome_zarr_meta/__init__.py +39 -15
  40. ngio/ome_zarr_meta/_meta_handlers.py +490 -96
  41. ngio/ome_zarr_meta/ngio_specs/__init__.py +24 -10
  42. ngio/ome_zarr_meta/ngio_specs/_axes.py +268 -234
  43. ngio/ome_zarr_meta/ngio_specs/_channels.py +125 -41
  44. ngio/ome_zarr_meta/ngio_specs/_dataset.py +42 -87
  45. ngio/ome_zarr_meta/ngio_specs/_ngio_hcs.py +536 -2
  46. ngio/ome_zarr_meta/ngio_specs/_ngio_image.py +202 -198
  47. ngio/ome_zarr_meta/ngio_specs/_pixel_size.py +72 -34
  48. ngio/ome_zarr_meta/v04/__init__.py +21 -5
  49. ngio/ome_zarr_meta/v04/_custom_models.py +18 -0
  50. ngio/ome_zarr_meta/v04/{_v04_spec_utils.py → _v04_spec.py} +151 -90
  51. ngio/ome_zarr_meta/v05/__init__.py +27 -0
  52. ngio/ome_zarr_meta/v05/_custom_models.py +18 -0
  53. ngio/ome_zarr_meta/v05/_v05_spec.py +511 -0
  54. ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/mask.png +0 -0
  55. ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/nuclei.png +0 -0
  56. ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/raw.jpg +0 -0
  57. ngio/resources/__init__.py +55 -0
  58. ngio/resources/resource_model.py +36 -0
  59. ngio/tables/__init__.py +20 -4
  60. ngio/tables/_abstract_table.py +270 -0
  61. ngio/tables/_tables_container.py +449 -0
  62. ngio/tables/backends/__init__.py +50 -1
  63. ngio/tables/backends/_abstract_backend.py +200 -31
  64. ngio/tables/backends/_anndata.py +139 -0
  65. ngio/tables/backends/_anndata_utils.py +10 -114
  66. ngio/tables/backends/_csv.py +19 -0
  67. ngio/tables/backends/_json.py +92 -0
  68. ngio/tables/backends/_parquet.py +19 -0
  69. ngio/tables/backends/_py_arrow_backends.py +222 -0
  70. ngio/tables/backends/_table_backends.py +162 -38
  71. ngio/tables/backends/_utils.py +608 -0
  72. ngio/tables/v1/__init__.py +19 -4
  73. ngio/tables/v1/_condition_table.py +71 -0
  74. ngio/tables/v1/_feature_table.py +79 -115
  75. ngio/tables/v1/_generic_table.py +21 -90
  76. ngio/tables/v1/_roi_table.py +486 -137
  77. ngio/transforms/__init__.py +5 -0
  78. ngio/transforms/_zoom.py +19 -0
  79. ngio/utils/__init__.py +16 -14
  80. ngio/utils/_cache.py +48 -0
  81. ngio/utils/_datasets.py +121 -13
  82. ngio/utils/_fractal_fsspec_store.py +42 -0
  83. ngio/utils/_zarr_utils.py +374 -218
  84. ngio-0.5.0b4.dist-info/METADATA +147 -0
  85. ngio-0.5.0b4.dist-info/RECORD +88 -0
  86. {ngio-0.2.0a2.dist-info → ngio-0.5.0b4.dist-info}/WHEEL +1 -1
  87. ngio/common/_array_pipe.py +0 -160
  88. ngio/common/_axes_transforms.py +0 -63
  89. ngio/common/_common_types.py +0 -5
  90. ngio/common/_slicer.py +0 -97
  91. ngio/images/abstract_image.py +0 -240
  92. ngio/images/create.py +0 -251
  93. ngio/images/image.py +0 -389
  94. ngio/images/label.py +0 -236
  95. ngio/images/omezarr_container.py +0 -535
  96. ngio/ome_zarr_meta/_generic_handlers.py +0 -320
  97. ngio/ome_zarr_meta/v04/_meta_handlers.py +0 -54
  98. ngio/tables/_validators.py +0 -192
  99. ngio/tables/backends/_anndata_v1.py +0 -75
  100. ngio/tables/backends/_json_v1.py +0 -56
  101. ngio/tables/tables_container.py +0 -300
  102. ngio/tables/v1/_masking_roi_table.py +0 -175
  103. ngio/utils/_logger.py +0 -29
  104. ngio-0.2.0a2.dist-info/METADATA +0 -95
  105. ngio-0.2.0a2.dist-info/RECORD +0 -53
  106. {ngio-0.2.0a2.dist-info → ngio-0.5.0b4.dist-info}/licenses/LICENSE +0 -0
ngio/common/_pyramid.py CHANGED
@@ -1,51 +1,71 @@
1
- import math
2
- from collections.abc import Collection
3
- from typing import Literal
1
+ from collections.abc import Callable, Mapping, Sequence
2
+ from typing import Any, Literal
4
3
 
5
4
  import dask.array as da
6
5
  import numpy as np
7
6
  import zarr
7
+ from pydantic import BaseModel, ConfigDict, model_validator
8
8
 
9
- from ngio.common._zoom import _zoom_inputs_check, dask_zoom, numpy_zoom
10
- from ngio.utils import AccessModeLiteral, StoreOrGroup, open_group_wrapper
9
+ from ngio.common._zoom import (
10
+ InterpolationOrder,
11
+ _zoom_inputs_check,
12
+ dask_zoom,
13
+ numpy_zoom,
14
+ )
15
+ from ngio.utils import (
16
+ NgioValueError,
17
+ )
11
18
 
12
19
 
13
20
  def _on_disk_numpy_zoom(
14
21
  source: zarr.Array,
15
22
  target: zarr.Array,
16
- order: Literal[0, 1, 2] = 1,
23
+ order: InterpolationOrder,
17
24
  ) -> None:
18
- target[...] = numpy_zoom(source[...], target_shape=target.shape, order=order)
25
+ source_array = source[...]
26
+ if not isinstance(source_array, np.ndarray):
27
+ raise NgioValueError("source zarr array could not be read as a numpy array")
28
+ target[...] = numpy_zoom(source_array, target_shape=target.shape, order=order)
19
29
 
20
30
 
21
31
  def _on_disk_dask_zoom(
22
32
  source: zarr.Array,
23
33
  target: zarr.Array,
24
- order: Literal[0, 1, 2] = 1,
34
+ order: InterpolationOrder,
25
35
  ) -> None:
26
36
  source_array = da.from_zarr(source)
27
37
  target_array = dask_zoom(source_array, target_shape=target.shape, order=order)
28
38
 
39
+ # This is a potential fix for Dask 2025.11
40
+ # import dask.config
41
+ # chunk_size_bytes = np.prod(target.chunks) * target_array.dtype.itemsize
42
+ # current_chunk_size = dask.config.get("array.chunk-size")
43
+ # Increase the chunk size to avoid dask potentially creating
44
+ # corrupted chunks when writing chunks that are not multiple of the
45
+ # target chunk size
46
+ # dask.config.set({"array.chunk-size": f"{chunk_size_bytes}B"})
29
47
  target_array = target_array.rechunk(target.chunks)
30
- target_array.compute_chunk_sizes()
48
+ target_array = target_array.compute_chunk_sizes()
31
49
  target_array.to_zarr(target)
50
+ # Restore previous chunk size
51
+ # dask.config.set({"array.chunk-size": current_chunk_size})
32
52
 
33
53
 
34
54
  def _on_disk_coarsen(
35
55
  source: zarr.Array,
36
56
  target: zarr.Array,
37
- _order: Literal[0, 1] = 1,
38
- aggregation_function: np.ufunc | None = None,
57
+ order: InterpolationOrder = "linear",
58
+ aggregation_function: Callable | None = None,
39
59
  ) -> None:
40
60
  """Apply a coarsening operation from a source zarr array to a target zarr array.
41
61
 
42
62
  Args:
43
63
  source (zarr.Array): The source array to coarsen.
44
64
  target (zarr.Array): The target array to save the coarsened result to.
45
- _order (Literal[0, 1]): The order of interpolation is not really implemented
65
+ order (InterpolationOrder): The order of interpolation is not really implemented
46
66
  for coarsening, but it is kept for compatibility with the zoom function.
47
- _order=1 -> linear interpolation ~ np.mean
48
- _order=0 -> nearest interpolation ~ np.max
67
+ order="linear" -> linear interpolation ~ np.mean
68
+ order="nearest" -> nearest interpolation ~ np.max
49
69
  aggregation_function (np.ufunc): The aggregation function to use.
50
70
  """
51
71
  source_array = da.from_zarr(source)
@@ -59,13 +79,15 @@ def _on_disk_coarsen(
59
79
  )
60
80
 
61
81
  if aggregation_function is None:
62
- if _order == 1:
82
+ if order == "linear":
63
83
  aggregation_function = np.mean
64
- elif _order == 0:
84
+ elif order == "nearest":
65
85
  aggregation_function = np.max
86
+ elif order == "cubic":
87
+ raise NgioValueError("Cubic interpolation is not supported for coarsening.")
66
88
  else:
67
- raise ValueError(
68
- f"Aggregation function must be provided for order {_order}"
89
+ raise NgioValueError(
90
+ f"Aggregation function must be provided for order {order}"
69
91
  )
70
92
 
71
93
  coarsening_setup = {}
@@ -77,7 +99,7 @@ def _on_disk_coarsen(
77
99
  if factor.is_integer():
78
100
  coarsening_setup[i] = int(factor)
79
101
  else:
80
- raise ValueError(
102
+ raise NgioValueError(
81
103
  f"Coarsening factor must be an integer, got {factor} on axis {i}"
82
104
  )
83
105
 
@@ -91,7 +113,7 @@ def _on_disk_coarsen(
91
113
  def on_disk_zoom(
92
114
  source: zarr.Array,
93
115
  target: zarr.Array,
94
- order: Literal[0, 1, 2] = 1,
116
+ order: InterpolationOrder = "linear",
95
117
  mode: Literal["dask", "numpy", "coarsen"] = "dask",
96
118
  ) -> None:
97
119
  """Apply a zoom operation from a source zarr array to a target zarr array.
@@ -99,17 +121,17 @@ def on_disk_zoom(
99
121
  Args:
100
122
  source (zarr.Array): The source array to zoom.
101
123
  target (zarr.Array): The target array to save the zoomed result to.
102
- order (Literal[0, 1, 2]): The order of interpolation. Defaults to 1.
124
+ order (InterpolationOrder): The order of interpolation. Defaults to "linear".
103
125
  mode (Literal["dask", "numpy", "coarsen"]): The mode to use. Defaults to "dask".
104
126
  """
105
127
  if not isinstance(source, zarr.Array):
106
- raise ValueError("source must be a zarr array")
128
+ raise NgioValueError("source must be a zarr array")
107
129
 
108
130
  if not isinstance(target, zarr.Array):
109
- raise ValueError("target must be a zarr array")
131
+ raise NgioValueError("target must be a zarr array")
110
132
 
111
133
  if source.dtype != target.dtype:
112
- raise ValueError("source and target must have the same dtype")
134
+ raise NgioValueError("source and target must have the same dtype")
113
135
 
114
136
  match mode:
115
137
  case "numpy":
@@ -122,12 +144,12 @@ def on_disk_zoom(
122
144
  target,
123
145
  )
124
146
  case _:
125
- raise ValueError("mode must be either 'dask', 'numpy' or 'coarsen'")
147
+ raise NgioValueError("mode must be either 'dask', 'numpy' or 'coarsen'")
126
148
 
127
149
 
128
150
  def _find_closest_arrays(
129
151
  processed: list[zarr.Array], to_be_processed: list[zarr.Array]
130
- ) -> tuple[int, int]:
152
+ ) -> tuple[np.intp, np.intp]:
131
153
  dist_matrix = np.zeros((len(processed), len(to_be_processed)))
132
154
  for i, arr_to_proc in enumerate(to_be_processed):
133
155
  for j, proc_arr in enumerate(processed):
@@ -142,13 +164,15 @@ def _find_closest_arrays(
142
164
  )
143
165
  )
144
166
 
145
- return np.unravel_index(dist_matrix.argmin(), dist_matrix.shape)
167
+ indices = np.unravel_index(dist_matrix.argmin(), dist_matrix.shape)
168
+ assert len(indices) == 2, "Indices must be of length 2"
169
+ return indices
146
170
 
147
171
 
148
172
  def consolidate_pyramid(
149
173
  source: zarr.Array,
150
174
  targets: list[zarr.Array],
151
- order: Literal[0, 1, 2] = 1,
175
+ order: InterpolationOrder = "linear",
152
176
  mode: Literal["dask", "numpy", "coarsen"] = "dask",
153
177
  ) -> None:
154
178
  """Consolidate the Zarr array."""
@@ -170,54 +194,224 @@ def consolidate_pyramid(
170
194
  processed.append(target_image)
171
195
 
172
196
 
173
- def init_empty_pyramid(
174
- store: StoreOrGroup,
175
- paths: list[str],
176
- ref_shape: Collection[int],
177
- scaling_factors: Collection[float],
178
- chunks: Collection[int] | None = None,
179
- dtype: str = "uint16",
180
- mode: AccessModeLiteral = "a",
181
- ) -> None:
182
- # Return the an Image object
183
- if chunks is not None and len(chunks) != len(ref_shape):
184
- raise ValueError(
185
- "The shape and chunks must have the same number of dimensions."
186
- )
197
+ ################################################
198
+ #
199
+ # Builders for image pyramids
200
+ #
201
+ ################################################
187
202
 
188
- if len(ref_shape) != len(scaling_factors):
189
- raise ValueError(
190
- "The shape and scaling factor must have the same number of dimensions."
191
- )
203
+ ChunksLike = tuple[int, ...] | Literal["auto"]
204
+ ShardsLike = tuple[int, ...] | Literal["auto"]
205
+
206
+
207
+ def shapes_from_scaling_factors(
208
+ base_shape: tuple[int, ...],
209
+ scaling_factors: tuple[float, ...],
210
+ num_levels: int,
211
+ ) -> list[tuple[int, ...]]:
212
+ """Compute the shapes of each level in the pyramid from scaling factors.
192
213
 
193
- root_group, _ = open_group_wrapper(store, mode=mode)
194
- for path in paths:
195
- if any(s < 1 for s in ref_shape):
196
- raise ValueError(
197
- "Level shape must be at least 1 on all dimensions. "
198
- f"Calculated shape: {ref_shape} at level {path}."
214
+ Args:
215
+ base_shape (tuple[int, ...]): The shape of the base level.
216
+ scaling_factors (tuple[float, ...]): The scaling factors between levels.
217
+ num_levels (int): The number of levels in the pyramid.
218
+
219
+ Returns:
220
+ list[tuple[int, ...]]: The shapes of each level in the pyramid.
221
+ """
222
+ shapes = []
223
+ current_shape = base_shape
224
+ for _ in range(num_levels):
225
+ shapes.append(current_shape)
226
+ current_shape = tuple(
227
+ max(1, int(s / f))
228
+ for s, f in zip(current_shape, scaling_factors, strict=True)
229
+ )
230
+ return shapes
231
+
232
+
233
+ def _check_order(shapes: Sequence[tuple[int, ...]]):
234
+ """Check if the shapes are in decreasing order."""
235
+ num_pixels = [np.prod(shape) for shape in shapes]
236
+ for i in range(1, len(num_pixels)):
237
+ if num_pixels[i] >= num_pixels[i - 1]:
238
+ raise NgioValueError("Shapes are not in decreasing order.")
239
+
240
+
241
+ class PyramidLevel(BaseModel):
242
+ path: str
243
+ shape: tuple[int, ...]
244
+ scale: tuple[float, ...]
245
+ chunks: ChunksLike = "auto"
246
+ shards: ShardsLike | None = None
247
+
248
+ @model_validator(mode="after")
249
+ def _model_validation(self) -> "PyramidLevel":
250
+ # Same length as shape
251
+ if len(self.scale) != len(self.shape):
252
+ raise NgioValueError(
253
+ "Scale must have the same length as shape "
254
+ f"({len(self.shape)}), got {len(self.scale)}"
199
255
  )
200
- new_arr = root_group.zeros(
201
- name=path,
202
- shape=ref_shape,
203
- dtype=dtype,
256
+ if any(isinstance(s, float) and s < 0 for s in self.scale):
257
+ raise NgioValueError("Scale values must be positive.")
258
+
259
+ if isinstance(self.chunks, tuple):
260
+ if len(self.chunks) != len(self.shape):
261
+ raise NgioValueError(
262
+ "Chunks must have the same length as shape "
263
+ f"({len(self.shape)}), got {len(self.chunks)}"
264
+ )
265
+ normalized_chunks = []
266
+ for dim_size, chunk_size in zip(self.shape, self.chunks, strict=True):
267
+ normalized_chunks.append(min(dim_size, chunk_size))
268
+ self.chunks = tuple(normalized_chunks)
269
+
270
+ if isinstance(self.shards, tuple):
271
+ if len(self.shards) != len(self.shape):
272
+ raise NgioValueError(
273
+ "Shards must have the same length as shape "
274
+ f"({len(self.shape)}), got {len(self.shards)}"
275
+ )
276
+ normalized_shards = []
277
+ for dim_size, shard_size in zip(self.shape, self.shards, strict=True):
278
+ normalized_shards.append(min(dim_size, shard_size))
279
+ self.shards = tuple(normalized_shards)
280
+ return self
281
+
282
+
283
+ class ImagePyramidBuilder(BaseModel):
284
+ levels: list[PyramidLevel]
285
+ axes: tuple[str, ...]
286
+ data_type: str = "uint16"
287
+ dimension_separator: Literal[".", "/"] = "/"
288
+ compressors: Any = "auto"
289
+ zarr_format: Literal[2, 3] = 2
290
+ other_array_kwargs: Mapping[str, Any] = {}
291
+
292
+ model_config = ConfigDict(arbitrary_types_allowed=True)
293
+
294
+ @classmethod
295
+ def from_scaling_factors(
296
+ cls,
297
+ levels_paths: tuple[str, ...],
298
+ scaling_factors: tuple[float, ...],
299
+ base_shape: tuple[int, ...],
300
+ base_scale: tuple[float, ...],
301
+ axes: tuple[str, ...],
302
+ chunks: ChunksLike = "auto",
303
+ shards: ShardsLike | None = None,
304
+ data_type: str = "uint16",
305
+ dimension_separator: Literal[".", "/"] = "/",
306
+ compressors: Any = "auto",
307
+ zarr_format: Literal[2, 3] = 2,
308
+ other_array_kwargs: Mapping[str, Any] | None = None,
309
+ ) -> "ImagePyramidBuilder":
310
+ shapes = shapes_from_scaling_factors(
311
+ base_shape=base_shape,
312
+ scaling_factors=scaling_factors,
313
+ num_levels=len(levels_paths),
314
+ )
315
+ return cls.from_shapes(
316
+ shapes=shapes,
317
+ base_scale=base_scale,
318
+ axes=axes,
319
+ levels_paths=levels_paths,
204
320
  chunks=chunks,
205
- dimension_separator="/",
206
- overwrite=True,
321
+ shards=shards,
322
+ data_type=data_type,
323
+ dimension_separator=dimension_separator,
324
+ compressors=compressors,
325
+ zarr_format=zarr_format,
326
+ other_array_kwargs=other_array_kwargs,
327
+ )
328
+
329
+ @classmethod
330
+ def from_shapes(
331
+ cls,
332
+ shapes: Sequence[tuple[int, ...]],
333
+ base_scale: tuple[float, ...],
334
+ axes: tuple[str, ...],
335
+ levels_paths: Sequence[str] | None = None,
336
+ chunks: ChunksLike = "auto",
337
+ shards: ShardsLike | None = None,
338
+ data_type: str = "uint16",
339
+ dimension_separator: Literal[".", "/"] = "/",
340
+ compressors: Any = "auto",
341
+ zarr_format: Literal[2, 3] = 2,
342
+ other_array_kwargs: Mapping[str, Any] | None = None,
343
+ ) -> "ImagePyramidBuilder":
344
+ levels = []
345
+ if levels_paths is None:
346
+ levels_paths = tuple(str(i) for i in range(len(shapes)))
347
+ _check_order(shapes)
348
+ scale_ = base_scale
349
+ for i, (path, shape) in enumerate(zip(levels_paths, shapes, strict=True)):
350
+ levels.append(
351
+ PyramidLevel(
352
+ path=path,
353
+ shape=shape,
354
+ scale=scale_,
355
+ chunks=chunks,
356
+ shards=shards,
357
+ )
358
+ )
359
+ if i + 1 < len(shapes):
360
+ # This only works for downsampling pyramids
361
+ # The _check_order function ensures that
362
+ # shapes are decreasing
363
+ next_shape = shapes[i + 1]
364
+ scaling_factor = tuple(
365
+ s1 / s2
366
+ for s1, s2 in zip(
367
+ shape,
368
+ next_shape,
369
+ strict=True,
370
+ )
371
+ )
372
+ scale_ = tuple(
373
+ s * f for s, f in zip(scale_, scaling_factor, strict=True)
374
+ )
375
+ other_array_kwargs = other_array_kwargs or {}
376
+ return cls(
377
+ levels=levels,
378
+ axes=axes,
379
+ data_type=data_type,
380
+ dimension_separator=dimension_separator,
381
+ compressors=compressors,
382
+ zarr_format=zarr_format,
383
+ other_array_kwargs=other_array_kwargs,
207
384
  )
208
385
 
209
- # Todo redo this with when a proper build of pyramid is implemented
210
- _shape = []
211
- for s, sc in zip(ref_shape, scaling_factors, strict=True):
212
- if math.floor(s / sc) % 2 == 0:
213
- _shape.append(math.floor(s / sc))
214
- else:
215
- _shape.append(math.ceil(s / sc))
216
- ref_shape = _shape
217
-
218
- if chunks is None:
219
- chunks = new_arr.chunks
220
- if chunks is None:
221
- raise ValueError("Something went wrong with the chunks")
222
- chunks = [min(c, s) for c, s in zip(chunks, ref_shape, strict=True)]
223
- return None
386
+ def to_zarr(self, group: zarr.Group) -> None:
387
+ """Save the pyramid specification to a Zarr group.
388
+
389
+ Args:
390
+ group (zarr.Group): The Zarr group to save the pyramid specification to.
391
+ """
392
+ array_static_kwargs = {
393
+ "dtype": self.data_type,
394
+ "overwrite": True,
395
+ "compressors": self.compressors,
396
+ **self.other_array_kwargs,
397
+ }
398
+
399
+ if self.zarr_format == 2:
400
+ array_static_kwargs["chunk_key_encoding"] = {
401
+ "name": "v2",
402
+ "separator": self.dimension_separator,
403
+ }
404
+ else:
405
+ array_static_kwargs["chunk_key_encoding"] = {
406
+ "name": "default",
407
+ "separator": self.dimension_separator,
408
+ }
409
+ array_static_kwargs["dimension_names"] = self.axes
410
+ for p_level in self.levels:
411
+ group.create_array(
412
+ name=p_level.path,
413
+ shape=tuple(p_level.shape),
414
+ chunks=p_level.chunks,
415
+ shards=p_level.shards,
416
+ **array_static_kwargs,
417
+ )