ngio 0.2.0a3__py3-none-any.whl → 0.2.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. ngio/__init__.py +4 -4
  2. ngio/common/__init__.py +12 -2
  3. ngio/common/_array_pipe.py +106 -0
  4. ngio/common/_axes_transforms.py +3 -2
  5. ngio/common/_dimensions.py +7 -0
  6. ngio/common/_masking_roi.py +158 -0
  7. ngio/common/_pyramid.py +16 -11
  8. ngio/common/_roi.py +74 -0
  9. ngio/common/_slicer.py +1 -2
  10. ngio/common/_zoom.py +5 -3
  11. ngio/hcs/__init__.py +2 -57
  12. ngio/hcs/plate.py +399 -0
  13. ngio/images/abstract_image.py +97 -28
  14. ngio/images/create.py +48 -29
  15. ngio/images/image.py +99 -46
  16. ngio/images/label.py +109 -92
  17. ngio/images/masked_image.py +259 -0
  18. ngio/images/omezarr_container.py +201 -64
  19. ngio/ome_zarr_meta/__init__.py +25 -13
  20. ngio/ome_zarr_meta/_meta_handlers.py +718 -69
  21. ngio/ome_zarr_meta/ngio_specs/__init__.py +8 -0
  22. ngio/ome_zarr_meta/ngio_specs/_channels.py +11 -0
  23. ngio/ome_zarr_meta/ngio_specs/_ngio_hcs.py +374 -2
  24. ngio/ome_zarr_meta/ngio_specs/_ngio_image.py +169 -119
  25. ngio/ome_zarr_meta/ngio_specs/_pixel_size.py +35 -3
  26. ngio/ome_zarr_meta/v04/__init__.py +17 -5
  27. ngio/ome_zarr_meta/v04/_v04_spec_utils.py +85 -12
  28. ngio/tables/__init__.py +2 -0
  29. ngio/tables/_validators.py +2 -4
  30. ngio/tables/backends/_anndata_utils.py +2 -1
  31. ngio/tables/backends/_anndata_v1.py +2 -1
  32. ngio/tables/backends/_json_v1.py +1 -1
  33. ngio/tables/tables_container.py +12 -2
  34. ngio/tables/v1/__init__.py +1 -2
  35. ngio/tables/v1/_feature_table.py +7 -5
  36. ngio/tables/v1/_generic_table.py +65 -11
  37. ngio/tables/v1/_roi_table.py +145 -27
  38. ngio/utils/_datasets.py +4 -2
  39. ngio/utils/_fractal_fsspec_store.py +3 -2
  40. ngio/utils/_logger.py +3 -1
  41. ngio/utils/_zarr_utils.py +25 -2
  42. {ngio-0.2.0a3.dist-info → ngio-0.2.0b1.dist-info}/METADATA +4 -1
  43. ngio-0.2.0b1.dist-info/RECORD +54 -0
  44. ngio/ome_zarr_meta/_generic_handlers.py +0 -320
  45. ngio/ome_zarr_meta/v04/_meta_handlers.py +0 -54
  46. ngio/tables/v1/_masking_roi_table.py +0 -175
  47. ngio-0.2.0a3.dist-info/RECORD +0 -54
  48. {ngio-0.2.0a3.dist-info → ngio-0.2.0b1.dist-info}/WHEEL +0 -0
  49. {ngio-0.2.0a3.dist-info → ngio-0.2.0b1.dist-info}/licenses/LICENSE +0 -0
@@ -14,8 +14,6 @@ import numpy as np
14
14
  from pydantic import BaseModel
15
15
 
16
16
  from ngio.ome_zarr_meta.ngio_specs._axes import (
17
- SpaceUnits,
18
- TimeUnits,
19
17
  canonical_axes,
20
18
  )
21
19
  from ngio.ome_zarr_meta.ngio_specs._channels import Channel, ChannelsMeta
@@ -56,15 +54,59 @@ class AbstractNgioImageMeta:
56
54
  raise NgioValidationError("At least one dataset must be provided.")
57
55
 
58
56
  self._datasets = datasets
57
+ self._axes_mapper = datasets[0].axes_mapper
59
58
 
60
59
  def __repr__(self):
61
60
  class_name = type(self).__name__
62
61
  paths = [dataset.path for dataset in self.datasets]
63
- on_disk_axes = self.datasets[0].axes_mapper.on_disk_axes_names
64
- return (
65
- f"{class_name}(name={self.name}, "
66
- f"datasets={paths}, "
67
- f"on_disk_axes={on_disk_axes})"
62
+ on_disk_axes = self.axes_mapper.on_disk_axes_names
63
+ return f"{class_name}(name={self.name}, datasets={paths}, axes={on_disk_axes})"
64
+
65
+ @classmethod
66
+ def default_init(
67
+ cls,
68
+ levels: int | Collection[str],
69
+ axes_names: Collection[str],
70
+ pixel_size: PixelSize,
71
+ scaling_factors: Collection[float] | None = None,
72
+ name: str | None = None,
73
+ version: str = "0.4",
74
+ ):
75
+ """Initialize the ImageMeta object."""
76
+ axes = canonical_axes(
77
+ axes_names,
78
+ space_units=pixel_size.space_unit,
79
+ time_units=pixel_size.time_unit,
80
+ )
81
+
82
+ px_size_dict = pixel_size.as_dict()
83
+ scale = [px_size_dict.get(ax.on_disk_name, 1.0) for ax in axes]
84
+ translation = [0.0] * len(scale)
85
+
86
+ if scaling_factors is None:
87
+ _default = {"x": 2.0, "y": 2.0}
88
+ scaling_factors = [_default.get(ax.on_disk_name, 1.0) for ax in axes]
89
+
90
+ if isinstance(levels, int):
91
+ levels = [str(i) for i in range(levels)]
92
+
93
+ datasets = []
94
+ for level in levels:
95
+ dataset = Dataset(
96
+ path=level,
97
+ on_disk_axes=axes,
98
+ on_disk_scale=scale,
99
+ on_disk_translation=translation,
100
+ allow_non_canonical_axes=False,
101
+ strict_canonical_order=True,
102
+ )
103
+ datasets.append(dataset)
104
+ scale = [s * f for s, f in zip(scale, scaling_factors, strict=True)]
105
+
106
+ return cls(
107
+ version=version,
108
+ name=name,
109
+ datasets=datasets,
68
110
  )
69
111
 
70
112
  @property
@@ -82,6 +124,11 @@ class AbstractNgioImageMeta:
82
124
  """List of datasets in the multiscale."""
83
125
  return self._datasets
84
126
 
127
+ @property
128
+ def axes_mapper(self):
129
+ """Return the axes mapper."""
130
+ return self._axes_mapper
131
+
85
132
  @property
86
133
  def levels(self) -> int:
87
134
  """Number of levels in the multiscale."""
@@ -105,33 +152,86 @@ class AbstractNgioImageMeta:
105
152
  raise NgioValueError(f"Index {idx} out of range.")
106
153
  return self.datasets[idx]
107
154
 
108
- def _get_dataset_by_pixel_size(
109
- self, pixel_size: PixelSize, strict: bool = False, tol: float = 1e-6
110
- ) -> Dataset:
111
- """Get a dataset with the closest pixel size.
155
+ def _find_closest_dataset(
156
+ self, pixel_size: PixelSize, mode: str = "any"
157
+ ) -> Dataset | None:
158
+ """Find the closest dataset to the given pixel size.
112
159
 
113
160
  Args:
114
161
  pixel_size(PixelSize): The pixel size to search for.
115
- strict(bool): If True, the pixel size must smaller than tol.
116
- tol(float): Any pixel size with a distance less than tol will be considered.
162
+ mode(str): The mode to find the closest dataset.
163
+ "any": Will find the closest dataset.
164
+ "lr": Will find closest "lower" resolution dataset.
165
+ "hr": Will find closest "higher" resolution
117
166
  """
118
167
  min_dist = np.inf
119
-
120
168
  closest_dataset = None
121
- for dataset in self.datasets:
122
- dist = dataset.pixel_size.distance(pixel_size)
169
+
170
+ if mode == "any":
171
+ datasets = self.datasets
172
+ elif mode == "lr":
173
+ # Lower resolution means that the pixel size is larger.
174
+ datasets = [d for d in self.datasets if d.pixel_size > pixel_size]
175
+ elif mode == "hr":
176
+ # Higher resolution means that the pixel size is smaller.
177
+ datasets = [d for d in self.datasets if d.pixel_size < pixel_size]
178
+ else:
179
+ raise NgioValueError(f"Mode {mode} not recognized.")
180
+
181
+ for d in datasets:
182
+ dist = d.pixel_size.distance(pixel_size)
123
183
  if dist < min_dist:
124
184
  min_dist = dist
125
- closest_dataset = dataset
185
+ closest_dataset = d
186
+
187
+ return closest_dataset
188
+
189
+ def _get_closest_dataset(
190
+ self, pixel_size: PixelSize, strict: bool = False
191
+ ) -> Dataset:
192
+ """Get a dataset with the closest pixel size.
193
+
194
+ Args:
195
+ pixel_size(PixelSize): The pixel size to search for.
196
+ strict(bool): If True, the pixel size must be exactly the same.
197
+ """
198
+ closest_dataset = self._find_closest_dataset(pixel_size, mode="any")
126
199
 
127
200
  if closest_dataset is None:
128
201
  raise NgioValueError("No dataset found.")
129
202
 
130
- if strict and min_dist > tol:
131
- raise NgioValueError("No dataset with a pixel size close enough.")
132
-
203
+ if strict and closest_dataset.pixel_size != pixel_size:
204
+ raise NgioValueError(
205
+ "No dataset with a pixel size close enough. "
206
+ "Best match is "
207
+ f"{closest_dataset.path}:{closest_dataset.pixel_size}"
208
+ )
133
209
  return closest_dataset
134
210
 
211
+ def get_lowest_resolution_dataset(self) -> Dataset:
212
+ """Get the dataset with the lowest resolution."""
213
+ dataset = self.datasets[-1]
214
+ while True:
215
+ lower_res_dataset = self._find_closest_dataset(
216
+ dataset.pixel_size, mode="lr"
217
+ )
218
+ if lower_res_dataset is None:
219
+ break
220
+ dataset = lower_res_dataset
221
+ return dataset
222
+
223
+ def get_highest_resolution_dataset(self) -> Dataset:
224
+ """Get the dataset with the highest resolution."""
225
+ dataset = self.datasets[0]
226
+ while True:
227
+ higher_res_dataset = self._find_closest_dataset(
228
+ dataset.pixel_size, mode="hr"
229
+ )
230
+ if higher_res_dataset is None:
231
+ break
232
+ dataset = higher_res_dataset
233
+ return dataset
234
+
135
235
  def get_dataset(
136
236
  self,
137
237
  *,
@@ -169,115 +269,65 @@ class AbstractNgioImageMeta:
169
269
  elif idx is not None:
170
270
  return self._get_dataset_by_index(idx)
171
271
  elif pixel_size is not None:
172
- return self._get_dataset_by_pixel_size(pixel_size, strict=strict)
272
+ return self._get_closest_dataset(pixel_size, strict=strict)
173
273
  else:
174
274
  return self.get_highest_resolution_dataset()
175
275
 
176
- @classmethod
177
- def default_init(
178
- cls,
179
- levels: int | Collection[str],
180
- axes_names: Collection[str],
181
- pixel_size: PixelSize,
182
- scaling_factors: Collection[float] | None = None,
183
- name: str | None = None,
184
- version: str = "0.4",
185
- ):
186
- """Initialize the ImageMeta object."""
187
- axes = canonical_axes(
188
- axes_names,
189
- space_units=pixel_size.space_unit,
190
- time_units=pixel_size.time_unit,
191
- )
192
-
193
- px_size_dict = pixel_size.as_dict()
194
- scale = [px_size_dict.get(ax.on_disk_name, 1.0) for ax in axes]
195
- translation = [0.0] * len(scale)
196
-
197
- if scaling_factors is None:
198
- _default = {"x": 2.0, "y": 2.0}
199
- scaling_factors = [_default.get(ax.on_disk_name, 1.0) for ax in axes]
200
-
201
- if isinstance(levels, int):
202
- levels = [str(i) for i in range(levels)]
203
-
204
- datasets = []
205
- for level in levels:
206
- dataset = Dataset(
207
- path=level,
208
- on_disk_axes=axes,
209
- on_disk_scale=scale,
210
- on_disk_translation=translation,
211
- allow_non_canonical_axes=False,
212
- strict_canonical_order=True,
276
+ def _get_closest_datasets(self, path: str | None = None) -> tuple[Dataset, Dataset]:
277
+ """Get the closest datasets to a dataset."""
278
+ dataset = self.get_dataset(path=path)
279
+ lr_dataset = self._find_closest_dataset(dataset.pixel_size, mode="lr")
280
+ if lr_dataset is None:
281
+ raise NgioValueError(
282
+ "No lower resolution dataset found. "
283
+ "This is the lowest resolution dataset."
213
284
  )
214
- datasets.append(dataset)
215
- scale = [s * f for s, f in zip(scale, scaling_factors, strict=True)]
285
+ return dataset, lr_dataset
216
286
 
217
- return cls(
218
- version=version,
219
- name=name,
220
- datasets=datasets,
221
- )
287
+ def scaling_factor(self, path: str | None = None) -> list[float]:
288
+ """Get the scaling factors from a dataset to its lower resolution."""
289
+ if self.levels == 1:
290
+ return [1.0] * len(self.axes_mapper.on_disk_axes_names)
291
+ dataset, lr_dataset = self._get_closest_datasets(path=path)
222
292
 
223
- def get_highest_resolution_dataset(self) -> Dataset:
224
- """Get the dataset with the highest resolution."""
225
- return self._get_dataset_by_pixel_size(
226
- pixel_size=PixelSize(
227
- x=0.0,
228
- y=0.0,
229
- z=0.0,
230
- t=0.0,
231
- space_unit=SpaceUnits.micrometer,
232
- time_unit=TimeUnits.s,
233
- ),
234
- strict=False,
235
- )
293
+ scaling_factors = []
294
+ for ax_name in self.axes_mapper.on_disk_axes_names:
295
+ s_d = dataset.get_scale(ax_name)
296
+ s_lr_d = lr_dataset.get_scale(ax_name)
297
+ scaling_factors.append(s_lr_d / s_d)
298
+ return scaling_factors
299
+
300
+ def yx_scaling(self, path: str | None = None) -> tuple[float, float]:
301
+ """Get the scaling factor from a dataset to its lower resolution."""
302
+ if self.levels == 1:
303
+ return 1.0, 1.0
304
+ dataset, lr_dataset = self._get_closest_datasets(path=path)
305
+
306
+ if lr_dataset is None:
307
+ raise NgioValueError(
308
+ "No lower resolution dataset found. "
309
+ "This is the lowest resolution dataset."
310
+ )
236
311
 
237
- def get_lowest_resolution_dataset(self) -> Dataset:
238
- """Get the dataset with the lowest resolution."""
239
- return self._get_dataset_by_pixel_size(
240
- pixel_size=PixelSize(
241
- x=1000.0,
242
- y=1000.0,
243
- z=1000.0,
244
- t=1000.0,
245
- space_unit=SpaceUnits.micrometer,
246
- time_unit=TimeUnits.s,
247
- ),
248
- strict=False,
249
- )
312
+ s_d = dataset.get_scale("y")
313
+ s_lr_d = lr_dataset.get_scale("y")
314
+ scale_y = s_lr_d / s_d
250
315
 
251
- def get_scaling_factor(self, axis_name: str) -> float:
252
- """Get the scaling factors of the dataset."""
253
- scaling_factors = []
254
- for d1, d2 in zip(self.datasets[1:], self.datasets[:-1], strict=True):
255
- scale_d1 = d1.get_scale(axis_name)
256
- scale_d2 = d2.get_scale(axis_name)
257
- scaling_factors.append(scale_d1 / scale_d2)
316
+ s_d = dataset.get_scale("x")
317
+ s_lr_d = lr_dataset.get_scale("x")
318
+ scale_x = s_lr_d / s_d
258
319
 
259
- if not np.allclose(scaling_factors, scaling_factors[0]):
260
- raise NgioValidationError(
261
- f"Inconsistent scaling factors are not supported. {scaling_factors}"
262
- )
263
- return scaling_factors[0]
320
+ return scale_y, scale_x
264
321
 
265
- @property
266
- def xy_scaling_factor(self) -> float:
267
- """Get the xy scaling factor of the dataset."""
268
- x_scaling_factors = self.get_scaling_factor("x")
269
- y_scaling_factors = self.get_scaling_factor("y")
270
- if not np.isclose(x_scaling_factors, y_scaling_factors):
271
- raise NgioValidationError(
272
- "Inconsistent scaling factors are not supported. "
273
- f"{x_scaling_factors}, {y_scaling_factors}"
274
- )
275
- return x_scaling_factors
322
+ def z_scaling(self, path: str | None = None) -> float:
323
+ """Get the scaling factor from a dataset to its lower resolution."""
324
+ if self.levels == 1:
325
+ return 1.0
326
+ dataset, lr_dataset = self._get_closest_datasets(path=path)
276
327
 
277
- @property
278
- def z_scaling_factor(self) -> float:
279
- """Get the z scaling factor of the dataset."""
280
- return self.get_scaling_factor("z")
328
+ s_d = dataset.get_scale("z")
329
+ s_lr_d = lr_dataset.get_scale("z")
330
+ return s_lr_d / s_d
281
331
 
282
332
 
283
333
  class NgioLabelMeta(AbstractNgioImageMeta):
@@ -294,7 +344,7 @@ class NgioLabelMeta(AbstractNgioImageMeta):
294
344
  super().__init__(version, name, datasets)
295
345
 
296
346
  # Make sure that there are no channel axes
297
- channel_axis = self.datasets[0].axes_mapper.get_axis("c")
347
+ channel_axis = self.axes_mapper.get_axis("c")
298
348
  if channel_axis is not None:
299
349
  raise NgioValidationError("Label metadata must not have channel axes.")
300
350
 
@@ -1,5 +1,8 @@
1
1
  """Fractal internal module for dataset metadata handling."""
2
2
 
3
+ import math
4
+ from functools import total_ordering
5
+
3
6
  import numpy as np
4
7
 
5
8
  from ngio.ome_zarr_meta.ngio_specs import SpaceUnits, TimeUnits
@@ -20,6 +23,7 @@ def _validate_type(value: float, name: str) -> float:
20
23
  return float(value)
21
24
 
22
25
 
26
+ @total_ordering
23
27
  class PixelSize:
24
28
  """PixelSize class to store the pixel size in 3D space."""
25
29
 
@@ -28,7 +32,7 @@ class PixelSize:
28
32
  x: float,
29
33
  y: float,
30
34
  z: float,
31
- t: float = 0,
35
+ t: float = 1,
32
36
  space_unit: SpaceUnits = SpaceUnits.micrometer,
33
37
  time_unit: TimeUnits | None = TimeUnits.s,
34
38
  ):
@@ -50,10 +54,38 @@ class PixelSize:
50
54
  """Return a string representation of the pixel size."""
51
55
  return f"PixelSize(x={self.x}, y={self.y}, z={self.z}, t={self.t})"
52
56
 
57
+ def __eq__(self, other) -> bool:
58
+ """Check if two pixel sizes are equal."""
59
+ if not isinstance(other, PixelSize):
60
+ raise TypeError("Can only compare PixelSize with PixelSize.")
61
+
62
+ if (
63
+ self.time_unit is not None
64
+ and other.time_unit is None
65
+ and self.time_unit != other.time_unit
66
+ ):
67
+ return False
68
+
69
+ if self.space_unit != other.space_unit:
70
+ return False
71
+ return math.isclose(self.distance(other), 0)
72
+
73
+ def __lt__(self, other: "PixelSize") -> bool:
74
+ """Check if one pixel size is less than the other."""
75
+ if not isinstance(other, PixelSize):
76
+ raise TypeError("Can only compare PixelSize with PixelSize.")
77
+ ref = PixelSize(0, 0, 0, 0, self.space_unit, self.time_unit)
78
+ return self.distance(ref) < other.distance(ref)
79
+
53
80
  def as_dict(self) -> dict:
54
81
  """Return the pixel size as a dictionary."""
55
82
  return {"t": self.t, "z": self.z, "y": self.y, "x": self.x}
56
83
 
84
+ @property
85
+ def tzyx(self) -> tuple[float, float, float, float]:
86
+ """Return the voxel size in t, z, y, x order."""
87
+ return self.t, self.z, self.y, self.x
88
+
57
89
  @property
58
90
  def zyx(self) -> tuple[float, float, float]:
59
91
  """Return the voxel size in z, y, x order."""
@@ -80,5 +112,5 @@ class PixelSize:
80
112
  return self.t
81
113
 
82
114
  def distance(self, other: "PixelSize") -> float:
83
- """Return the distance between two pixel sizes in 3D space."""
84
- return float(np.linalg.norm(np.array(self.zyx) - np.array(other.zyx)))
115
+ """Return the distance between two pixel sizes."""
116
+ return float(np.linalg.norm(np.array(self.tzyx) - np.array(other.tzyx)))
@@ -1,11 +1,23 @@
1
1
  """Utility to read/write OME-Zarr metadata v0.4."""
2
2
 
3
- from ngio.ome_zarr_meta.v04._meta_handlers import (
4
- V04ImageMetaHandler,
5
- V04LabelMetaHandler,
3
+ from ngio.ome_zarr_meta.v04._v04_spec_utils import (
4
+ ngio_to_v04_image_meta,
5
+ ngio_to_v04_label_meta,
6
+ ngio_to_v04_plate_meta,
7
+ ngio_to_v04_well_meta,
8
+ v04_to_ngio_image_meta,
9
+ v04_to_ngio_label_meta,
10
+ v04_to_ngio_plate_meta,
11
+ v04_to_ngio_well_meta,
6
12
  )
7
13
 
8
14
  __all__ = [
9
- "V04ImageMetaHandler",
10
- "V04LabelMetaHandler",
15
+ "ngio_to_v04_image_meta",
16
+ "ngio_to_v04_label_meta",
17
+ "ngio_to_v04_plate_meta",
18
+ "ngio_to_v04_well_meta",
19
+ "v04_to_ngio_image_meta",
20
+ "v04_to_ngio_label_meta",
21
+ "v04_to_ngio_plate_meta",
22
+ "v04_to_ngio_well_meta",
11
23
  ]
@@ -15,6 +15,7 @@ from ome_zarr_models.v04.coordinate_transformations import VectorScale as Vector
15
15
  from ome_zarr_models.v04.coordinate_transformations import (
16
16
  VectorTranslation as VectorTranslationV04,
17
17
  )
18
+ from ome_zarr_models.v04.hcs import HCSAttrs as HCSAttrsV04
18
19
  from ome_zarr_models.v04.image import ImageAttrs as ImageAttrsV04
19
20
  from ome_zarr_models.v04.image_label import ImageLabelAttrs as LabelAttrsV04
20
21
  from ome_zarr_models.v04.multiscales import Dataset as DatasetV04
@@ -22,6 +23,7 @@ from ome_zarr_models.v04.multiscales import Multiscale as MultiscaleV04
22
23
  from ome_zarr_models.v04.omero import Channel as ChannelV04
23
24
  from ome_zarr_models.v04.omero import Omero as OmeroV04
24
25
  from ome_zarr_models.v04.omero import Window as WindowV04
26
+ from ome_zarr_models.v04.well import WellAttrs as WellAttrsV04
25
27
  from pydantic import ValidationError
26
28
 
27
29
  from ngio.ome_zarr_meta.ngio_specs import (
@@ -35,6 +37,8 @@ from ngio.ome_zarr_meta.ngio_specs import (
35
37
  ImageLabelSource,
36
38
  NgioImageMeta,
37
39
  NgioLabelMeta,
40
+ NgioPlateMeta,
41
+ NgioWellMeta,
38
42
  default_channel_name,
39
43
  )
40
44
  from ngio.ome_zarr_meta.ngio_specs._ngio_image import NgffVersion
@@ -301,10 +305,11 @@ def v04_to_ngio_label_meta(
301
305
  )
302
306
 
303
307
 
304
- def _ngio_to_v04_multiscale(datasets: list[Dataset]) -> MultiscaleV04:
308
+ def _ngio_to_v04_multiscale(name: str | None, datasets: list[Dataset]) -> MultiscaleV04:
305
309
  """Convert a ngio multiscale to a v04 multiscale.
306
310
 
307
311
  Args:
312
+ name (str | None): The name of the multiscale.
308
313
  datasets (list[Dataset]): The ngio datasets.
309
314
 
310
315
  Returns:
@@ -340,9 +345,7 @@ def _ngio_to_v04_multiscale(datasets: list[Dataset]) -> MultiscaleV04:
340
345
  DatasetV04(path=dataset.path, coordinateTransformations=transform)
341
346
  )
342
347
  return MultiscaleV04(
343
- axes=v04_axes,
344
- datasets=tuple(v04_datasets),
345
- version="0.4",
348
+ axes=v04_axes, datasets=tuple(v04_datasets), version="0.4", name=name
346
349
  )
347
350
 
348
351
 
@@ -387,11 +390,13 @@ def ngio_to_v04_image_meta(metadata: NgioImageMeta) -> dict:
387
390
  Returns:
388
391
  dict: The v04 image metadata.
389
392
  """
390
- v04_muliscale = _ngio_to_v04_multiscale(metadata.datasets)
393
+ v04_muliscale = _ngio_to_v04_multiscale(
394
+ name=metadata.name, datasets=metadata.datasets
395
+ )
391
396
  v04_omero = _ngio_to_v04_omero(metadata._channels_meta)
392
397
 
393
398
  v04_image = ImageAttrsV04(multiscales=[v04_muliscale], omero=v04_omero)
394
- return v04_image.model_dump(exclude_none=True)
399
+ return v04_image.model_dump(exclude_none=True, by_alias=True)
395
400
 
396
401
 
397
402
  def ngio_to_v04_label_meta(metadata: NgioLabelMeta) -> dict:
@@ -403,10 +408,78 @@ def ngio_to_v04_label_meta(metadata: NgioLabelMeta) -> dict:
403
408
  Returns:
404
409
  dict: The v04 image metadata.
405
410
  """
406
- v04_muliscale = _ngio_to_v04_multiscale(metadata.datasets)
407
- v04_label = LabelAttrsV04(
408
- multiscales=[v04_muliscale],
409
- # image_label is aliased as 'imae-label'
410
- image_label=metadata.image_label.model_dump(), # type: ignore
411
+ v04_muliscale = _ngio_to_v04_multiscale(
412
+ name=metadata.name, datasets=metadata.datasets
411
413
  )
412
- return v04_label.model_dump(exclude_none=True)
414
+ labels_meta = {
415
+ "multiscales": [v04_muliscale],
416
+ "image-label": metadata.image_label.model_dump(),
417
+ }
418
+ v04_label = LabelAttrsV04(**labels_meta)
419
+ return v04_label.model_dump(exclude_none=True, by_alias=True)
420
+
421
+
422
+ def v04_to_ngio_well_meta(
423
+ metadata: dict,
424
+ ) -> tuple[bool, NgioWellMeta | ValidationError]:
425
+ """Convert a v04 well metadata to a ngio well metadata.
426
+
427
+ Args:
428
+ metadata (dict): The v04 well metadata.
429
+
430
+ Returns:
431
+ result (bool): True if the conversion was successful, False otherwise.
432
+ ngio_well_meta (NgioWellMeta): The ngio well metadata.
433
+ """
434
+ try:
435
+ v04_well = WellAttrsV04(**metadata)
436
+ except ValidationError as e:
437
+ return False, e
438
+
439
+ return True, NgioWellMeta(**v04_well.model_dump())
440
+
441
+
442
+ def v04_to_ngio_plate_meta(
443
+ metadata: dict,
444
+ ) -> tuple[bool, NgioPlateMeta | ValidationError]:
445
+ """Convert a v04 plate metadata to a ngio plate metadata.
446
+
447
+ Args:
448
+ metadata (dict): The v04 plate metadata.
449
+
450
+ Returns:
451
+ result (bool): True if the conversion was successful, False otherwise.
452
+ ngio_plate_meta (NgioPlateMeta): The ngio plate metadata.
453
+ """
454
+ try:
455
+ v04_plate = HCSAttrsV04(**metadata)
456
+ except ValidationError as e:
457
+ return False, e
458
+
459
+ return True, NgioPlateMeta(**v04_plate.model_dump())
460
+
461
+
462
+ def ngio_to_v04_well_meta(metadata: NgioWellMeta) -> dict:
463
+ """Convert a ngio well metadata to a v04 well metadata.
464
+
465
+ Args:
466
+ metadata (NgioWellMeta): The ngio well metadata.
467
+
468
+ Returns:
469
+ dict: The v04 well metadata.
470
+ """
471
+ v04_well = WellAttrsV04(**metadata.model_dump())
472
+ return v04_well.model_dump(exclude_none=True, by_alias=True)
473
+
474
+
475
+ def ngio_to_v04_plate_meta(metadata: NgioPlateMeta) -> dict:
476
+ """Convert a ngio plate metadata to a v04 plate metadata.
477
+
478
+ Args:
479
+ metadata (NgioPlateMeta): The ngio plate metadata.
480
+
481
+ Returns:
482
+ dict: The v04 plate metadata.
483
+ """
484
+ v04_plate = HCSAttrsV04(**metadata.model_dump())
485
+ return v04_plate.model_dump(exclude_none=True, by_alias=True)
ngio/tables/__init__.py CHANGED
@@ -3,6 +3,7 @@
3
3
  from ngio.tables.backends import ImplementedTableBackends
4
4
  from ngio.tables.tables_container import (
5
5
  FeatureTable,
6
+ GenericRoiTable,
6
7
  MaskingROITable,
7
8
  RoiTable,
8
9
  Table,
@@ -15,6 +16,7 @@ from ngio.tables.v1._generic_table import GenericTable
15
16
 
16
17
  __all__ = [
17
18
  "FeatureTable",
19
+ "GenericRoiTable",
18
20
  "GenericTable",
19
21
  "ImplementedTableBackends",
20
22
  "MaskingROITable",
@@ -4,9 +4,7 @@ from typing import Protocol
4
4
  import pandas as pd
5
5
  import pandas.api.types as ptypes
6
6
 
7
- from ngio.utils import (
8
- NgioTableValidationError,
9
- )
7
+ from ngio.utils import NgioTableValidationError, NgioValueError
10
8
 
11
9
 
12
10
  class TableValidator(Protocol):
@@ -134,7 +132,7 @@ def validate_index_dtype(dataframe: pd.DataFrame, index_type: str) -> pd.DataFra
134
132
  f"Table index must be of integer type, got {dataframe.index.dtype}"
135
133
  )
136
134
  case _:
137
- raise ValueError(f"index_type {index_type} not recognized")
135
+ raise NgioValueError(f"index_type {index_type} not recognized")
138
136
 
139
137
  return dataframe
140
138
 
@@ -17,6 +17,7 @@ from anndata.experimental import read_dispatched
17
17
  from ngio.tables._validators import validate_index_dtype, validate_index_key
18
18
  from ngio.utils import (
19
19
  NgioTableValidationError,
20
+ NgioValueError,
20
21
  StoreOrGroup,
21
22
  open_group_wrapper,
22
23
  )
@@ -90,7 +91,7 @@ def custom_read_zarr(
90
91
  _clean_uns(adata)
91
92
 
92
93
  if not isinstance(adata, AnnData):
93
- raise ValueError(f"Expected an AnnData object, but got {type(adata)}")
94
+ raise NgioValueError(f"Expected an AnnData object, but got {type(adata)}")
94
95
  return adata
95
96
 
96
97
 
@@ -10,6 +10,7 @@ from ngio.tables.backends._anndata_utils import (
10
10
  custom_read_zarr,
11
11
  dataframe_to_anndata,
12
12
  )
13
+ from ngio.utils import NgioValueError
13
14
 
14
15
 
15
16
  class AnnDataBackend(AbstractTableBackend):
@@ -65,7 +66,7 @@ class AnnDataBackend(AbstractTableBackend):
65
66
  """Consolidate the metadata in the store."""
66
67
  store = self._group_handler.store
67
68
  if not isinstance(store, str | Path):
68
- raise ValueError(
69
+ raise NgioValueError(
69
70
  "To write an AnnData object the store must be a local path/str."
70
71
  )
71
72