ngio 0.1.6__py3-none-any.whl → 0.2.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. ngio/__init__.py +31 -5
  2. ngio/common/__init__.py +44 -0
  3. ngio/common/_array_pipe.py +160 -0
  4. ngio/common/_axes_transforms.py +63 -0
  5. ngio/common/_common_types.py +5 -0
  6. ngio/common/_dimensions.py +113 -0
  7. ngio/common/_pyramid.py +223 -0
  8. ngio/{core/roi.py → common/_roi.py} +22 -23
  9. ngio/common/_slicer.py +97 -0
  10. ngio/{pipes/_zoom_utils.py → common/_zoom.py} +2 -78
  11. ngio/hcs/__init__.py +60 -0
  12. ngio/images/__init__.py +23 -0
  13. ngio/images/abstract_image.py +240 -0
  14. ngio/images/create.py +251 -0
  15. ngio/images/image.py +389 -0
  16. ngio/images/label.py +236 -0
  17. ngio/images/omezarr_container.py +535 -0
  18. ngio/ome_zarr_meta/__init__.py +35 -0
  19. ngio/ome_zarr_meta/_generic_handlers.py +320 -0
  20. ngio/ome_zarr_meta/_meta_handlers.py +142 -0
  21. ngio/ome_zarr_meta/ngio_specs/__init__.py +63 -0
  22. ngio/ome_zarr_meta/ngio_specs/_axes.py +481 -0
  23. ngio/ome_zarr_meta/ngio_specs/_channels.py +378 -0
  24. ngio/ome_zarr_meta/ngio_specs/_dataset.py +134 -0
  25. ngio/ome_zarr_meta/ngio_specs/_ngio_hcs.py +5 -0
  26. ngio/ome_zarr_meta/ngio_specs/_ngio_image.py +434 -0
  27. ngio/ome_zarr_meta/ngio_specs/_pixel_size.py +84 -0
  28. ngio/ome_zarr_meta/v04/__init__.py +11 -0
  29. ngio/ome_zarr_meta/v04/_meta_handlers.py +54 -0
  30. ngio/ome_zarr_meta/v04/_v04_spec_utils.py +412 -0
  31. ngio/tables/__init__.py +21 -5
  32. ngio/tables/_validators.py +192 -0
  33. ngio/tables/backends/__init__.py +8 -0
  34. ngio/tables/backends/_abstract_backend.py +71 -0
  35. ngio/tables/backends/_anndata_utils.py +194 -0
  36. ngio/tables/backends/_anndata_v1.py +75 -0
  37. ngio/tables/backends/_json_v1.py +56 -0
  38. ngio/tables/backends/_table_backends.py +102 -0
  39. ngio/tables/tables_container.py +300 -0
  40. ngio/tables/v1/__init__.py +6 -5
  41. ngio/tables/v1/_feature_table.py +161 -0
  42. ngio/tables/v1/_generic_table.py +99 -182
  43. ngio/tables/v1/_masking_roi_table.py +175 -0
  44. ngio/tables/v1/_roi_table.py +226 -0
  45. ngio/utils/__init__.py +23 -10
  46. ngio/utils/_datasets.py +51 -0
  47. ngio/utils/_errors.py +10 -4
  48. ngio/utils/_zarr_utils.py +378 -0
  49. {ngio-0.1.6.dist-info → ngio-0.2.0a2.dist-info}/METADATA +18 -39
  50. ngio-0.2.0a2.dist-info/RECORD +53 -0
  51. ngio/core/__init__.py +0 -7
  52. ngio/core/dimensions.py +0 -122
  53. ngio/core/image_handler.py +0 -228
  54. ngio/core/image_like_handler.py +0 -549
  55. ngio/core/label_handler.py +0 -410
  56. ngio/core/ngff_image.py +0 -387
  57. ngio/core/utils.py +0 -287
  58. ngio/io/__init__.py +0 -19
  59. ngio/io/_zarr.py +0 -88
  60. ngio/io/_zarr_array_utils.py +0 -0
  61. ngio/io/_zarr_group_utils.py +0 -60
  62. ngio/iterators/__init__.py +0 -1
  63. ngio/ngff_meta/__init__.py +0 -27
  64. ngio/ngff_meta/fractal_image_meta.py +0 -1267
  65. ngio/ngff_meta/meta_handler.py +0 -92
  66. ngio/ngff_meta/utils.py +0 -235
  67. ngio/ngff_meta/v04/__init__.py +0 -6
  68. ngio/ngff_meta/v04/specs.py +0 -158
  69. ngio/ngff_meta/v04/zarr_utils.py +0 -376
  70. ngio/pipes/__init__.py +0 -7
  71. ngio/pipes/_slicer_transforms.py +0 -176
  72. ngio/pipes/_transforms.py +0 -33
  73. ngio/pipes/data_pipe.py +0 -52
  74. ngio/tables/_ad_reader.py +0 -80
  75. ngio/tables/_utils.py +0 -301
  76. ngio/tables/tables_group.py +0 -252
  77. ngio/tables/v1/feature_tables.py +0 -182
  78. ngio/tables/v1/masking_roi_tables.py +0 -243
  79. ngio/tables/v1/roi_tables.py +0 -285
  80. ngio/utils/_common_types.py +0 -5
  81. ngio/utils/_pydantic_utils.py +0 -52
  82. ngio-0.1.6.dist-info/RECORD +0 -44
  83. {ngio-0.1.6.dist-info → ngio-0.2.0a2.dist-info}/WHEEL +0 -0
  84. {ngio-0.1.6.dist-info → ngio-0.2.0a2.dist-info}/licenses/LICENSE +0 -0
@@ -1,1267 +0,0 @@
1
- """Image metadata models.
2
-
3
- This module contains the models for the image metadata.
4
- These metadata models are not adhering to the OME standard.
5
- But they can be built from the OME standard metadata, and the
6
- can be converted to the OME standard.
7
- """
8
-
9
- from collections.abc import Collection
10
- from difflib import SequenceMatcher
11
- from enum import Enum
12
- from typing import Any, TypeVar
13
-
14
- import numpy as np
15
- from pydantic import BaseModel, Field, field_validator
16
- from typing_extensions import Self
17
-
18
- from ngio.utils._pydantic_utils import BaseWithExtraFields
19
-
20
- T = TypeVar("T")
21
-
22
-
23
- class NgffVersion(str, Enum):
24
- """Allowed NGFF versions."""
25
-
26
- v04 = "0.4"
27
-
28
-
29
- ################################################################################################
30
- #
31
- # Omero Section of the Metadata is used to store channel information and visualisation
32
- # settings.
33
- # This section is transitory and will be likely changed in the future.
34
- #
35
- #################################################################################################
36
-
37
-
38
- class NgioColors(str, Enum):
39
- """Default colors for the channels."""
40
-
41
- dapi = "0000FF"
42
- hoechst = "0000FF"
43
- gfp = "00FF00"
44
- cy3 = "FFFF00"
45
- cy5 = "FF0000"
46
- brightfield = "808080"
47
- red = "FF0000"
48
- yellow = "FFFF00"
49
- magenta = "FF00FF"
50
- cyan = "00FFFF"
51
- gray = "808080"
52
- green = "00FF00"
53
-
54
- @staticmethod
55
- def semi_random_pick(channel_name: str | None = None) -> "NgioColors":
56
- """Try to fuzzy match the color to the channel name.
57
-
58
- - If a channel name is given will try to match the channel name to the color.
59
- - If name has the paatern 'channel_x' cyclic rotate over a list of colors
60
- [cyan, magenta, yellow, green]
61
- - If no channel name is given will return a random color.
62
- """
63
- available_colors = NgioColors._member_names_
64
-
65
- if channel_name is None:
66
- # Purely random color
67
- color_str = available_colors[np.random.randint(0, len(available_colors))]
68
- return NgioColors.__members__[color_str]
69
-
70
- if channel_name.startswith("channel_"):
71
- # Rotate over a list of colors
72
- defaults_colors = [
73
- NgioColors.cyan,
74
- NgioColors.magenta,
75
- NgioColors.yellow,
76
- NgioColors.green,
77
- ]
78
-
79
- try:
80
- index = int(channel_name.split("_")[-1]) % len(defaults_colors)
81
- return defaults_colors[index]
82
- except ValueError:
83
- # If the name of the channel is something like
84
- # channel_dapi this will fail an proceed to the
85
- # standard fuzzy match
86
- pass
87
-
88
- similarity = {}
89
- for color in available_colors:
90
- # try to match the color to the channel name
91
- similarity[color] = SequenceMatcher(None, channel_name, color).ratio()
92
- color_str = max(similarity, key=similarity.get)
93
- return NgioColors.__members__[color_str]
94
-
95
-
96
- def valid_hex_color(v: str) -> bool:
97
- """Validate a hexadecimal color.
98
-
99
- Check that `color` is made of exactly six elements which are letters
100
- (a-f or A-F) or digits (0-9).
101
- If fail, raise a ValueError.
102
-
103
- Implementation source:
104
- https://github.com/fractal-analytics-platform/fractal-tasks-core/fractal_tasks_core/channels.py#L87
105
- Original authors:
106
- - Tommaso Comparin <tommaso.comparin@exact-lab.it>
107
- """
108
- if len(v) != 6:
109
- return False
110
- allowed_characters = "abcdefABCDEF0123456789"
111
- for character in v:
112
- if character not in allowed_characters:
113
- return False
114
- return True
115
-
116
-
117
- class ChannelVisualisation(BaseWithExtraFields):
118
- """Channel visualisation model.
119
-
120
- Contains the information about the visualisation of a channel.
121
-
122
- Attributes:
123
- color(str): The color of the channel in hexadecimal format or a color name.
124
- min(int | float): The minimum value of the channel.
125
- max(int | float): The maximum value of the channel.
126
- start(int | float): The start value of the channel.
127
- end(int | float): The end value of the channel.
128
- active(bool): Whether the channel is active.
129
- """
130
-
131
- color: str | NgioColors | None = Field(default=None, validate_default=True)
132
- min: int | float = 0
133
- max: int | float = 65535
134
- start: int | float = 0
135
- end: int | float = 65535
136
- active: bool = True
137
-
138
- @field_validator("color", mode="after")
139
- @classmethod
140
- def validate_color(cls, value: str | NgioColors) -> str:
141
- """Color validator.
142
-
143
- There are three possible values to set a color:
144
- - A hexadecimal string.
145
- - A color name.
146
- - A NgioColors element.
147
- """
148
- if value is None:
149
- return NgioColors.semi_random_pick().value
150
- if isinstance(value, str) and valid_hex_color(value):
151
- return value
152
- elif isinstance(value, str):
153
- value_lower = value.lower()
154
- return NgioColors.semi_random_pick(value_lower).value
155
- elif isinstance(value, NgioColors):
156
- return value.value
157
- else:
158
- raise ValueError("Invalid color value.")
159
-
160
- @classmethod
161
- def lazy_init(
162
- cls,
163
- color: str | NgioColors | None = None,
164
- start: int | float | None = None,
165
- end: int | float | None = None,
166
- active: bool = True,
167
- data_type: Any = np.uint16,
168
- ) -> "ChannelVisualisation":
169
- """Create a ChannelVisualisation object with the default unit.
170
-
171
- Args:
172
- color(str): The color of the channel in hexadecimal format or a color name.
173
- start(int | float | None): The start value of the channel.
174
- end(int | float | None): The end value of the channel.
175
- data_type(Any): The data type of the channel.
176
- active(bool): Whether the channel should be shown by default.
177
- """
178
- for func in [np.iinfo, np.finfo]:
179
- try:
180
- min_value = func(data_type).min
181
- max_value = func(data_type).max
182
- break
183
- except ValueError:
184
- continue
185
- else:
186
- raise ValueError(f"Invalid data type {data_type}.")
187
-
188
- start = start if start is not None else min_value
189
- end = end if end is not None else max_value
190
- return ChannelVisualisation(
191
- color=color,
192
- min=min_value,
193
- max=max_value,
194
- start=start,
195
- end=end,
196
- active=active,
197
- )
198
-
199
-
200
- class Channel(BaseModel):
201
- """Information about a channel in the image.
202
-
203
- Attributes:
204
- label(str): The label of the channel.
205
- wavelength_id(str): The wavelength ID of the channel.
206
- extra_fields(dict): To reduce the api surface, extra fields are stored in the
207
- the channel attributes will be stored in the extra_fields attribute.
208
- """
209
-
210
- label: str
211
- wavelength_id: str | None = None
212
- channel_visualisation: ChannelVisualisation
213
-
214
- @classmethod
215
- def lazy_init(
216
- cls,
217
- label: str,
218
- wavelength_id: str | None = None,
219
- color: str | NgioColors | None = None,
220
- start: int | float | None = None,
221
- end: int | float | None = None,
222
- active: bool = True,
223
- data_type: Any = np.uint16,
224
- ) -> "Channel":
225
- """Create a Channel object with the default unit.
226
-
227
- Args:
228
- label(str): The label of the channel.
229
- wavelength_id(str | None): The wavelength ID of the channel.
230
- color(str): The color of the channel in hexadecimal format or a color name.
231
- If None, the color will be picked based on the label.
232
- start(int | float | None): The start value of the channel.
233
- end(int | float | None): The end value of the channel.
234
- active(bool): Whether the channel should be shown by default.
235
- data_type(Any): The data type of the channel.
236
- """
237
- if color is None:
238
- # If no color is provided, try to pick a color based on the label
239
- # See the NgioColors.semi_random_pick method for more details.
240
- color = label
241
-
242
- channel_visualization = ChannelVisualisation.lazy_init(
243
- color=color, start=start, end=end, active=active, data_type=data_type
244
- )
245
-
246
- if wavelength_id is None:
247
- # TODO Evaluate if a better default value can be used
248
- wavelength_id = label
249
-
250
- return cls(
251
- label=label,
252
- wavelength_id=wavelength_id,
253
- channel_visualisation=channel_visualization,
254
- )
255
-
256
-
257
- def _check_elements(elements: Collection[T], expected_type: Any) -> Collection[T]:
258
- """Check that the elements are of the same type."""
259
- if len(elements) == 0:
260
- raise ValueError("At least one element must be provided.")
261
-
262
- for element in elements:
263
- if not isinstance(element, expected_type):
264
- raise ValueError(
265
- f"All elements must be of the same type {expected_type}. Got {element}."
266
- )
267
-
268
- return elements
269
-
270
-
271
- def _check_unique(elements: Collection[T]) -> Collection[T]:
272
- """Check that the elements are unique."""
273
- if len(set(elements)) != len(elements):
274
- raise ValueError("All elements must be unique.")
275
- return elements
276
-
277
-
278
- class Omero(BaseWithExtraFields):
279
- """Information about the OMERO metadata.
280
-
281
- Attributes:
282
- channels(list[Channel]): The list of channels in the image.
283
- extra_fields(dict): To reduce the api surface, extra fields are stored in the
284
- the omero attributes will be stored in the extra_fields attribute.
285
- """
286
-
287
- channels: list[Channel] = Field(default_factory=list)
288
-
289
- @classmethod
290
- def lazy_init(
291
- cls,
292
- labels: Collection[str] | int,
293
- wavelength_id: Collection[str] | None = None,
294
- colors: Collection[str | NgioColors] | None = None,
295
- start: Collection[int | float] | int | float | None = None,
296
- end: Collection[int | float] | int | float | None = None,
297
- active: Collection[bool] | None = None,
298
- data_type: Any = np.uint16,
299
- **omero_kwargs: dict,
300
- ) -> "Omero":
301
- """Create an Omero object with the default unit.
302
-
303
- Args:
304
- labels(Collection[str] | int): The list of channels names in the image.
305
- If an integer is provided, the channels will be named "channel_i".
306
- wavelength_id(Collection[str] | None): The wavelength ID of the channel.
307
- If None, the wavelength ID will be the same as the channel name.
308
- colors(Collection[str, NgioColors] | None): The list of colors for the
309
- channels. If None, the colors will be random.
310
- start(Collection[int | float] | int | float | None): The start value of the
311
- channel. If None, the start value will be the minimum value of the
312
- data type.
313
- end(Collection[int | float] | int | float | None): The end value of the
314
- channel. If None, the end value will be the maximum value of the
315
- data type.
316
- data_type(Any): The data type of the channel. Will be used to set the
317
- min and max values of the channel.
318
- active (Collection[bool] | None):active(bool): Whether the channel should
319
- be shown by default.
320
- omero_kwargs(dict): Extra fields to store in the omero attributes.
321
- """
322
- if isinstance(labels, int):
323
- labels = [f"channel_{i}" for i in range(labels)]
324
-
325
- labels = _check_elements(labels, str)
326
- labels = _check_unique(labels)
327
-
328
- _wavelength_id: Collection[str | None] = [None] * len(labels)
329
- if isinstance(wavelength_id, Collection):
330
- _wavelength_id = _check_elements(wavelength_id, str)
331
- _wavelength_id = _check_unique(wavelength_id)
332
-
333
- _colors: Collection[str | NgioColors] = ["random"] * len(labels)
334
- if isinstance(colors, Collection):
335
- _colors = _check_elements(colors, str | NgioColors)
336
-
337
- _start: Collection[int | float | None] = [None] * len(labels)
338
- if isinstance(start, Collection):
339
- _start = _check_elements(start, (int, float))
340
-
341
- _end: Collection[int | float | None] = [None] * len(labels)
342
- if isinstance(end, Collection):
343
- _end = _check_elements(end, (int, float))
344
-
345
- _active: Collection[bool] = [True] * len(labels)
346
- if isinstance(active, Collection):
347
- _active = _check_elements(active, bool)
348
-
349
- omero_channels = []
350
- for ch_name, w_id, color, s, e, a in zip(
351
- labels, _wavelength_id, _colors, _start, _end, _active, strict=True
352
- ):
353
- omero_channels.append(
354
- Channel.lazy_init(
355
- label=ch_name,
356
- wavelength_id=w_id,
357
- color=color,
358
- start=s,
359
- end=e,
360
- active=a,
361
- data_type=data_type,
362
- )
363
- )
364
- return cls(channels=omero_channels, **omero_kwargs)
365
-
366
-
367
- ################################################################################################
368
- #
369
- # Axis Types and Units
370
- # We define a small set of axis types and units that can be used in the metadata.
371
- # This axis types are more restrictive than the OME standard.
372
- # We do that to simplify the data processing.
373
- #
374
- #################################################################################################
375
-
376
-
377
- class AxisType(str, Enum):
378
- """Allowed axis types."""
379
-
380
- channel = "channel"
381
- time = "time"
382
- space = "space"
383
-
384
-
385
- class SpaceUnits(str, Enum):
386
- """Allowed space units."""
387
-
388
- nanometer = "nanometer"
389
- nm = "nm"
390
- micrometer = "micrometer"
391
- um = "um"
392
- millimeter = "millimeter"
393
- mm = "mm"
394
- centimeter = "centimeter"
395
- cm = "cm"
396
-
397
- @classmethod
398
- def allowed_names(self) -> list[str]:
399
- """Get the allowed space axis names."""
400
- return list(SpaceUnits.__members__.keys())
401
-
402
-
403
- class SpaceNames(str, Enum):
404
- """Allowed space axis names."""
405
-
406
- z = "z"
407
- y = "y"
408
- x = "x"
409
-
410
- @classmethod
411
- def allowed_names(self) -> list[str]:
412
- """Get the allowed space axis names."""
413
- return list(SpaceNames.__members__.keys())
414
-
415
-
416
- class ChannelNames(str, Enum):
417
- """Allowed channel axis names."""
418
-
419
- c = "c"
420
-
421
- @classmethod
422
- def allowed_names(self) -> list[str]:
423
- """Get the allowed channel axis names."""
424
- return list(ChannelNames.__members__.keys())
425
-
426
-
427
- class TimeUnits(str, Enum):
428
- """Allowed time units."""
429
-
430
- seconds = "seconds"
431
- s = "s"
432
-
433
- @classmethod
434
- def allowed_names(self) -> list[str]:
435
- """Get the allowed time axis names."""
436
- return list(TimeUnits.__members__.keys())
437
-
438
-
439
- class TimeNames(str, Enum):
440
- """Allowed time axis names."""
441
-
442
- t = "t"
443
-
444
- @classmethod
445
- def allowed_names(self) -> list[str]:
446
- """Get the allowed time axis names."""
447
- return list(TimeNames.__members__.keys())
448
-
449
-
450
- ################################################################################################
451
- #
452
- # PixelSize model
453
- # The PixelSize model is used to store the pixel size in 3D space.
454
- # The model does not store scaling factors and units for other axes.
455
- #
456
- #################################################################################################
457
-
458
-
459
- class PixelSize(BaseModel):
460
- """PixelSize class to store the pixel size in 3D space."""
461
-
462
- x: float = Field(..., ge=0)
463
- y: float = Field(..., ge=0)
464
- z: float = Field(1.0, ge=0)
465
- unit: SpaceUnits = SpaceUnits.micrometer
466
- virtual: bool = False
467
-
468
- def __str__(self) -> str:
469
- """Return the string representation of the object."""
470
- return f"PixelSize(x={self.x}, y={self.y}, z={self.z}, unit={self.unit.value})"
471
-
472
- @classmethod
473
- def from_list(
474
- cls, sizes: list[float], unit: SpaceUnits = SpaceUnits.micrometer
475
- ) -> "PixelSize":
476
- """Build a PixelSize object from a list of sizes.
477
-
478
- Order of the sizes:
479
- - for 2d: [y, x]
480
- - for 3d: [z, y, x]
481
-
482
- Note: The order of the sizes must be z, y, x.
483
-
484
- Args:
485
- sizes(list[float]): The list of sizes.
486
- unit(SpaceUnits): The unit of the sizes.
487
- """
488
- if len(sizes) == 2:
489
- return cls(y=sizes[0], x=sizes[1], z=1, unit=unit)
490
- elif len(sizes) == 3:
491
- return cls(z=sizes[0], y=sizes[1], x=sizes[2], unit=unit)
492
- else:
493
- raise ValueError("Invalid pixel size list. Must have 2 or 3 elements.")
494
-
495
- def as_dict(self) -> dict:
496
- """Return the pixel size as a dictionary."""
497
- return {"z": self.z, "y": self.y, "x": self.x}
498
-
499
- @property
500
- def zyx(self) -> tuple[float, float, float]:
501
- """Return the voxel size in z, y, x order."""
502
- return self.z, self.y, self.x
503
-
504
- @property
505
- def yx(self) -> tuple[float, float]:
506
- """Return the xy plane pixel size in y, x order."""
507
- return self.y, self.x
508
-
509
- @property
510
- def voxel_volume(self) -> float:
511
- """Return the volume of a voxel."""
512
- return self.y * self.x * self.z
513
-
514
- @property
515
- def xy_plane_area(self) -> float:
516
- """Return the area of the xy plane."""
517
- return self.y * self.x
518
-
519
- def distance(self, other: "PixelSize") -> float:
520
- """Return the distance between two pixel sizes."""
521
- return float(np.linalg.norm(np.array(self.zyx) - np.array(other.zyx)))
522
-
523
-
524
- ################################################################################################
525
- #
526
- # Axis and Dataset models are the two core components of the OME-NFF
527
- # multiscale metadata.
528
- # The Axis model is used to store the information about an axis (name, unit, type).
529
- # The Dataset model is used to store the information about a
530
- # dataset (path, axes, scale).
531
- #
532
- # The Dataset and Axis have two representations:
533
- # - on_disk: The representation of the metadata as stored on disk. This representation
534
- # preserves the order of the axes and the scale transformation.
535
- # - canonical: The representation of the metadata in the canonical order.
536
- # This representation is used to simplify the data processing.
537
- #
538
- #################################################################################################
539
-
540
-
541
- class Axis:
542
- """Axis infos model."""
543
-
544
- def __init__(
545
- self,
546
- name: str | TimeNames | SpaceNames,
547
- unit: SpaceUnits | TimeUnits | None = None,
548
- ) -> None:
549
- """Initialize the Axis object.
550
-
551
- Args:
552
- name(str): The name of the axis.
553
- unit(SpaceUnits | TimeUnits | None): The unit of the axis.
554
- """
555
- if name is None:
556
- raise ValueError("Axis name cannot be None.")
557
-
558
- if isinstance(name, Enum):
559
- name = name.value
560
-
561
- self._name = name
562
- self._unit = unit
563
-
564
- if name in TimeNames.allowed_names():
565
- self._type = AxisType.time
566
-
567
- if unit is None:
568
- self._unit = TimeUnits.s
569
- elif unit not in TimeUnits.allowed_names():
570
- raise ValueError(f"Invalid time unit {unit}.")
571
- else:
572
- self._unit = unit
573
-
574
- elif name in SpaceNames.allowed_names():
575
- self._type = AxisType.space
576
-
577
- if unit is None:
578
- self._unit = SpaceUnits.um
579
- elif unit not in SpaceUnits.allowed_names():
580
- raise ValueError(f"Invalid space unit {unit}.")
581
- else:
582
- self._unit = unit
583
-
584
- elif name in ChannelNames.allowed_names():
585
- self._type = AxisType.channel
586
- if unit is not None:
587
- raise ValueError("Channel axis cannot have a unit.")
588
- self._unit = None
589
- else:
590
- raise ValueError(f"Invalid axis name {name}.")
591
-
592
- @classmethod
593
- def lazy_create(
594
- cls,
595
- name: str | TimeNames | SpaceNames,
596
- time_unit: TimeUnits | None = None,
597
- space_unit: SpaceUnits | None = None,
598
- ) -> "Axis":
599
- """Create an Axis object with the default unit."""
600
- if name in TimeNames.allowed_names():
601
- return cls(name=name, unit=time_unit)
602
- elif name in SpaceNames.allowed_names():
603
- return cls(name=name, unit=space_unit)
604
- else:
605
- return cls(name=name, unit=None)
606
-
607
- @classmethod
608
- def batch_create(
609
- cls,
610
- axes_names: Collection[str | SpaceNames | TimeNames],
611
- time_unit: TimeUnits | None = None,
612
- space_unit: SpaceUnits | None = None,
613
- ) -> list["Axis"]:
614
- """Create a list of Axis objects from a list of dictionaries."""
615
- axes = []
616
- for name in axes_names:
617
- axes.append(
618
- cls.lazy_create(name=name, time_unit=time_unit, space_unit=space_unit)
619
- )
620
- return axes
621
-
622
- @property
623
- def name(self) -> str:
624
- """Get the name of the axis."""
625
- return self._name
626
-
627
- @property
628
- def unit(self) -> SpaceUnits | TimeUnits | None:
629
- """Get the unit of the axis."""
630
- return self._unit
631
-
632
- @unit.setter
633
- def unit(self, unit: SpaceUnits | TimeUnits | None) -> None:
634
- """Set the unit of the axis."""
635
- self._unit = unit
636
-
637
- @property
638
- def type(self) -> AxisType:
639
- """Get the type of the axis."""
640
- return self._type
641
-
642
- def model_dump(self) -> dict:
643
- """Return the axis information in a dictionary."""
644
- _dict = {"name": self.name, "unit": self.unit, "type": self.type}
645
- # Remove None values
646
- return {k: v for k, v in _dict.items() if v is not None}
647
-
648
-
649
- class Dataset:
650
- """Model for a dataset in the multiscale.
651
-
652
- To initialize the Dataset object, the path, the axes, scale, and translation list
653
- can be provided with on_disk order.
654
-
655
- The Dataset object will reorder the scale and translation lists according to the
656
- following canonical order of the axes:
657
- * Time axis (if present)
658
- * Channel axis (if present)
659
- * Z axis (if present)
660
- * Y axis (Mandatory)
661
- * X axis (Mandatory)
662
- """
663
-
664
- def __init__(
665
- self,
666
- *,
667
- path: str,
668
- on_disk_axes: list[Axis],
669
- on_disk_scale: list[float],
670
- on_disk_translation: list[float] | None = None,
671
- canonical_order: list[str] | None = None,
672
- ):
673
- """Initialize the Dataset object.
674
-
675
- Args:
676
- path(str): The path of the dataset.
677
- on_disk_axes(list[Axis]): The list of axes in the multiscale.
678
- on_disk_scale(list[float]): The list of scale transformation.
679
- The scale transformation must have the same length as the axes.
680
- on_disk_translation(list[float] | None): The list of translation.
681
- canonical_order(list[str] | None): The canonical order of the axes.
682
- If None, the default order is ["t", "c", "z", "y", "x"].
683
- """
684
- self._path = path
685
-
686
- # Canonical order validation
687
- if canonical_order is None:
688
- self._canonical_order = ["t", "c", "z", "y", "x"]
689
- else:
690
- self._canonical_order = canonical_order
691
-
692
- for ax in on_disk_axes:
693
- if ax.name not in self._canonical_order:
694
- raise ValueError(f"Axis {ax.name} not found in the canonical order.")
695
-
696
- if len(set(self._canonical_order)) != len(self._canonical_order):
697
- raise ValueError("Canonical order must have unique elements.")
698
-
699
- if len(set(on_disk_axes)) != len(on_disk_axes):
700
- raise ValueError("on_disk axes must have unique elements.")
701
-
702
- self._on_disk_axes = on_disk_axes
703
-
704
- # Scale transformation validation
705
- if len(on_disk_scale) != len(on_disk_axes):
706
- raise ValueError(
707
- "Inconsistent scale transformation. "
708
- "The scale transformation must have the same length."
709
- )
710
- self._scale = on_disk_scale
711
-
712
- # Translation transformation validation
713
- if on_disk_translation is not None and len(on_disk_translation) != len(
714
- on_disk_axes
715
- ):
716
- raise ValueError(
717
- "Inconsistent translation transformation. "
718
- "The translation transformation must have the same length."
719
- )
720
-
721
- self._translation = on_disk_translation
722
-
723
- # Compute the index mapping between the canonical order and the actual order
724
- _map = {ax.name: i for i, ax in enumerate(on_disk_axes)}
725
-
726
- self._index_mapping = {}
727
- for name in self._canonical_order:
728
- _index = _map.get(name, None)
729
- if _index is not None:
730
- self._index_mapping[name] = _index
731
-
732
- self._ordered_axes = [
733
- on_disk_axes[i] for i in self._index_mapping.values() if i is not None
734
- ]
735
-
736
- @property
737
- def path(self) -> str:
738
- """Get the path of the dataset."""
739
- return self._path
740
-
741
- @property
742
- def index_mapping(self) -> dict[str, int]:
743
- """Get the mapping between the canonical order and the actual order."""
744
- return self._index_mapping
745
-
746
- @property
747
- def axes(self) -> list[Axis]:
748
- """Get the axes in the canonical order."""
749
- return self._ordered_axes
750
-
751
- @property
752
- def on_disk_axes_names(self) -> list[str]:
753
- """Get the axes in the on-disk order."""
754
- return [ax.name for ax in self._on_disk_axes]
755
-
756
- @property
757
- def axes_order(self) -> list[int]:
758
- """Get the mapping between the canonical order and the on-disk order.
759
-
760
- Example:
761
- on_disk_order = ["z", "c", "y", "x"]
762
- canonical_order = ["c", "z", "y", "x"]
763
- axes_order = [1, 0, 2, 3]
764
- """
765
- on_disk_axes = self.on_disk_axes_names
766
- canonical_axes = self.axes_names
767
- return [on_disk_axes.index(ax) for ax in canonical_axes]
768
-
769
- @property
770
- def reverse_axes_order(self) -> list[int]:
771
- """Get the mapping between the on-disk order and the canonical order.
772
-
773
- It is the inverse of the axes_order.
774
- """
775
- sorted_order = np.argsort(self.axes_order).tolist()
776
- return sorted_order # type: ignore
777
-
778
- @property
779
- def scale(self) -> list[float]:
780
- """Get the scale transformation of the dataset in the canonical order."""
781
- return [self._scale[i] for i in self._index_mapping.values() if i is not None]
782
-
783
- @property
784
- def time_spacing(self) -> float:
785
- """Get the time spacing of the dataset."""
786
- t = self.index_mapping.get("t")
787
- if t is None:
788
- return 1.0
789
-
790
- scale_t = self.scale[t]
791
- return scale_t
792
-
793
- @property
794
- def on_disk_scale(self) -> list[float]:
795
- """Get the scale transformation of the dataset in the on-disk order."""
796
- return self._scale
797
-
798
- @property
799
- def translation(self) -> list[float] | None:
800
- """Get the translation transformation of the dataset in the canonical order."""
801
- if self._translation is None:
802
- return None
803
- return [self._translation[i] for i in self._index_mapping.values()]
804
-
805
- @property
806
- def axes_names(self) -> list[str]:
807
- """Get the axes names in the canonical order."""
808
- return [ax.name for ax in self.axes]
809
-
810
- @property
811
- def space_axes_names(self) -> list[str]:
812
- """Get the spatial axes names in the canonical order."""
813
- return [ax.name for ax in self.axes if ax.type == AxisType.space]
814
-
815
- @property
816
- def space_axes_unit(self) -> SpaceUnits:
817
- """Get the unit of the spatial axes."""
818
- types = [ax.unit for ax in self.axes if ax.type == AxisType.space]
819
- if len(set(types)) > 1:
820
- raise ValueError("Inconsistent spatial axes units.")
821
- return_type = types[0]
822
- if return_type is None:
823
- raise ValueError("Spatial axes must have a unit.")
824
- if return_type not in SpaceUnits.allowed_names():
825
- raise ValueError(f"Invalid space unit {return_type}.")
826
- return SpaceUnits(return_type)
827
-
828
- @property
829
- def pixel_size(self) -> PixelSize:
830
- """Get the pixel size of the dataset."""
831
- pixel_sizes = {}
832
-
833
- for ax, scale in zip(self.axes, self.scale, strict=True):
834
- if ax.type == AxisType.space:
835
- pixel_sizes[ax.name] = scale
836
-
837
- return PixelSize(
838
- x=pixel_sizes["x"],
839
- y=pixel_sizes["y"],
840
- z=pixel_sizes.get("z", 1.0),
841
- unit=self.space_axes_unit,
842
- )
843
-
844
- @property
845
- def time_axis_unit(self) -> TimeUnits | None:
846
- """Get the unit of the time axis."""
847
- types = [ax.unit for ax in self.axes if ax.type == AxisType.time]
848
- if len(types) == 0:
849
- return None
850
- elif len(types) == 1:
851
- return TimeUnits(types[0])
852
- else:
853
- raise ValueError("Multiple time axes found. Only one time axis is allowed.")
854
-
855
- def remove_axis(self, axis_name: str) -> "Dataset":
856
- """Remove an axis from the dataset.
857
-
858
- Args:
859
- axis_name(str): The name of the axis to remove.
860
- """
861
- if axis_name not in self.axes_names:
862
- raise ValueError(f"Axis {axis_name} not found in the dataset.")
863
-
864
- if axis_name in ["x", "y"]:
865
- raise ValueError("Cannot remove mandatory axes x and y.")
866
-
867
- axes_idx = self.index_mapping[axis_name]
868
-
869
- new_on_disk_axes = self._on_disk_axes.copy()
870
- new_on_disk_axes.pop(axes_idx)
871
-
872
- new_scale = self._scale.copy()
873
- new_scale.pop(axes_idx)
874
-
875
- if self._translation is not None:
876
- new_translation = self._translation.copy()
877
- new_translation.pop(axes_idx)
878
- else:
879
- new_translation = None
880
-
881
- return Dataset(
882
- path=self.path,
883
- on_disk_axes=new_on_disk_axes,
884
- on_disk_scale=new_scale,
885
- on_disk_translation=new_translation,
886
- canonical_order=self._canonical_order,
887
- )
888
-
889
-
890
- ################################################################################################
891
- #
892
- # BaseMeta, ImageMeta and LabelMeta are the core models to represent the multiscale the
893
- # OME-NGFF spec on memory. The are the only interfaces to interact with
894
- # the metadata on-disk and the metadata in memory.
895
- #
896
- #################################################################################################
897
- class BaseMeta:
898
- """Base class for ImageMeta and LabelMeta."""
899
-
900
- def __init__(self, version: str, name: str | None, datasets: list[Dataset]) -> None:
901
- """Initialize the ImageMeta object."""
902
- self._version = NgffVersion(version)
903
- self._name = name
904
-
905
- if len(datasets) == 0:
906
- raise ValueError("At least one dataset must be provided.")
907
-
908
- self._datasets = datasets
909
-
910
- @property
911
- def version(self) -> NgffVersion:
912
- """Version of the OME-NFF metadata used to build the object."""
913
- return self._version
914
-
915
- @property
916
- def name(self) -> str | None:
917
- """Name of the image."""
918
- return self._name
919
-
920
- @property
921
- def datasets(self) -> list[Dataset]:
922
- """List of datasets in the multiscale."""
923
- return self._datasets
924
-
925
- @property
926
- def num_levels(self) -> int:
927
- """Number of levels in the multiscale."""
928
- return len(self.datasets)
929
-
930
- @property
931
- def levels_paths(self) -> list[str]:
932
- """List of paths of the datasets."""
933
- return [dataset.path for dataset in self.datasets]
934
-
935
- @property
936
- def index_mapping(self) -> dict[str, int]:
937
- """Get the mapping between the canonical order and the actual order."""
938
- return self.datasets[0].index_mapping
939
-
940
- @property
941
- def axes(self) -> list[Axis]:
942
- """List of axes in the canonical order."""
943
- return self.datasets[0].axes
944
-
945
- @property
946
- def axes_names(self) -> list[str]:
947
- """List of axes names in the canonical order."""
948
- return self.datasets[0].axes_names
949
-
950
- @property
951
- def space_axes_names(self) -> list[str]:
952
- """List of spatial axes names in the canonical order."""
953
- return self.datasets[0].space_axes_names
954
-
955
- @property
956
- def space_axes_unit(self) -> SpaceUnits:
957
- """Get the unit of the spatial axes."""
958
- return self.datasets[0].space_axes_unit
959
-
960
- @property
961
- def time_axis_unit(self) -> TimeUnits | None:
962
- """Get the unit of the time axis."""
963
- return self.datasets[0].time_axis_unit
964
-
965
- def _get_dataset_by_path(self, path: str) -> Dataset:
966
- """Get a dataset by its path."""
967
- for dataset in self.datasets:
968
- if dataset.path == path:
969
- return dataset
970
- raise ValueError(f"Dataset with path {path} not found.")
971
-
972
- def _get_dataset_by_index(self, idx: int) -> Dataset:
973
- """Get a dataset by its index."""
974
- if idx < 0 or idx >= len(self.datasets):
975
- raise ValueError(f"Index {idx} out of range.")
976
- return self.datasets[idx]
977
-
978
- def _get_dataset_by_pixel_size(
979
- self, pixel_size: PixelSize, strict: bool = False, tol: float = 1e-6
980
- ) -> Dataset:
981
- """Get a dataset with the closest pixel size.
982
-
983
- Args:
984
- pixel_size(PixelSize): The pixel size to search for.
985
- strict(bool): If True, the pixel size must smaller than tol.
986
- tol(float): Any pixel size with a distance less than tol will be considered.
987
- """
988
- min_dist = np.inf
989
-
990
- for dataset in self.datasets:
991
- dist = dataset.pixel_size.distance(pixel_size)
992
- if dist < min_dist:
993
- min_dist = dist
994
- closest_dataset = dataset
995
-
996
- if strict and min_dist > tol:
997
- raise ValueError("No dataset with a pixel size close enough.")
998
-
999
- return closest_dataset
1000
-
1001
- def get_dataset(
1002
- self,
1003
- *,
1004
- path: str | None = None,
1005
- idx: int | None = None,
1006
- pixel_size: PixelSize | None = None,
1007
- highest_resolution: bool = False,
1008
- strict: bool = False,
1009
- ) -> Dataset:
1010
- """Get a dataset by its path, index or pixel size.
1011
-
1012
- Args:
1013
- path(str): The path of the dataset.
1014
- idx(int): The index of the dataset.
1015
- pixel_size(PixelSize): The pixel size to search for.
1016
- highest_resolution(bool): If True, the dataset with the highest resolution
1017
- strict(bool): If True, the pixel size must be exactly the same.
1018
- If pixel_size is None, strict is ignored.
1019
- """
1020
- # Only one of the arguments must be provided
1021
- if (
1022
- sum(
1023
- [
1024
- path is not None,
1025
- idx is not None,
1026
- pixel_size is not None,
1027
- highest_resolution,
1028
- ]
1029
- )
1030
- != 1
1031
- ):
1032
- raise ValueError("get_dataset must receive only one argument.")
1033
-
1034
- if path is not None:
1035
- return self._get_dataset_by_path(path)
1036
- elif idx is not None:
1037
- return self._get_dataset_by_index(idx)
1038
- elif pixel_size is not None:
1039
- return self._get_dataset_by_pixel_size(pixel_size, strict=strict)
1040
- elif highest_resolution:
1041
- return self.get_highest_resolution_dataset()
1042
- else:
1043
- raise ValueError("get_dataset has no valid arguments.")
1044
-
1045
- def get_highest_resolution_dataset(self) -> Dataset:
1046
- """Get the dataset with the highest resolution."""
1047
- return self._get_dataset_by_pixel_size(
1048
- pixel_size=PixelSize(x=0.0, y=0.0, z=0.0, unit=SpaceUnits.um), strict=False
1049
- )
1050
-
1051
- def scale(self, path: str | None = None, idx: int | None = None) -> list[float]:
1052
- """Get the scale transformation of a dataset.
1053
-
1054
- Args:
1055
- path(str): The path of the dataset.
1056
- idx(int): The index of the dataset.
1057
- """
1058
- return self.get_dataset(path=path, idx=idx).scale
1059
-
1060
- def _scaling_factors(self) -> list[float]:
1061
- scaling_factors = []
1062
- for d1, d2 in zip(self.datasets[1:], self.datasets[:-1], strict=True):
1063
- scaling_factors.append(
1064
- [d1 / d2 for d1, d2 in zip(d1.scale, d2.scale, strict=True)]
1065
- )
1066
-
1067
- for sf in scaling_factors:
1068
- assert (
1069
- sf == scaling_factors[0]
1070
- ), "Inconsistent scaling factors not well supported."
1071
- return scaling_factors[0]
1072
-
1073
- @property
1074
- def xy_scaling_factor(self) -> float:
1075
- """Get the xy scaling factor of the dataset."""
1076
- scaling_factors = self._scaling_factors()
1077
- x, y = self.index_mapping.get("x"), self.index_mapping.get("y")
1078
- if x is None or y is None:
1079
- raise ValueError("Mandatory axes x and y not found.")
1080
-
1081
- x_scaling_f = scaling_factors[x]
1082
- y_scaling_f = scaling_factors[y]
1083
-
1084
- if not np.allclose(x_scaling_f, y_scaling_f):
1085
- raise ValueError("Inconsistent xy scaling factor.")
1086
- return x_scaling_f
1087
-
1088
- @property
1089
- def z_scaling_factor(self) -> float:
1090
- """Get the z scaling factor of the dataset."""
1091
- scaling_factors = self._scaling_factors()
1092
- z = self.index_mapping.get("z")
1093
- if z is None:
1094
- return 1.0
1095
-
1096
- z_scaling_f = scaling_factors[z]
1097
- return z_scaling_f
1098
-
1099
- def translation(
1100
- self, path: str | None = None, idx: int | None = None
1101
- ) -> list[float] | None:
1102
- """Get the translation transformation of a dataset.
1103
-
1104
- Args:
1105
- path(str): The path of the dataset.
1106
- idx(int): The index of the dataset.
1107
- """
1108
- return self.get_dataset(path=path, idx=idx).translation
1109
-
1110
- def pixel_size(self, path: str | None = None, idx: int | None = None) -> PixelSize:
1111
- """Get the pixel size of a dataset.
1112
-
1113
- Args:
1114
- path(str): The path of the dataset.
1115
- idx(int): The index of the dataset.
1116
- """
1117
- return self.get_dataset(path=path, idx=idx).pixel_size
1118
-
1119
- def remove_axis(self, axis_name: str) -> Self:
1120
- """Remove an axis from the metadata.
1121
-
1122
- Args:
1123
- axis_name(str): The name of the axis to remove.
1124
- """
1125
- new_datasets = [dataset.remove_axis(axis_name) for dataset in self.datasets]
1126
- return self.__class__(
1127
- version=self.version, name=self.name, datasets=new_datasets
1128
- )
1129
-
1130
-
1131
- class LabelMeta(BaseMeta):
1132
- """Label metadata model."""
1133
-
1134
- def __init__(self, version: str, name: str | None, datasets: list[Dataset]) -> None:
1135
- """Initialize the ImageMeta object."""
1136
- super().__init__(version, name, datasets)
1137
-
1138
- # Make sure that there are no channel axes
1139
- for ax in self.datasets[0].axes:
1140
- if ax.type == AxisType.channel:
1141
- raise ValueError("Channel axes are not allowed in ImageMeta.")
1142
-
1143
-
1144
- class ImageMeta(BaseMeta):
1145
- """Image metadata model."""
1146
-
1147
- def __init__(
1148
- self,
1149
- version: str,
1150
- name: str | None,
1151
- datasets: list[Dataset],
1152
- omero: Omero | None = None,
1153
- ) -> None:
1154
- """Initialize the ImageMeta object."""
1155
- super().__init__(version=version, name=name, datasets=datasets)
1156
- self._omero = omero
1157
-
1158
- @property
1159
- def omero(self) -> Omero | None:
1160
- """Get the OMERO metadata."""
1161
- return self._omero
1162
-
1163
- def set_omero(self, omero: Omero) -> None:
1164
- """Set omero metadata."""
1165
- self._omero = omero
1166
-
1167
- def lazy_init_omero(
1168
- self,
1169
- labels: list[str] | int,
1170
- wavelength_ids: list[str] | None = None,
1171
- colors: list[str] | None = None,
1172
- active: list[bool] | None = None,
1173
- start: list[int | float] | None = None,
1174
- end: list[int | float] | None = None,
1175
- data_type: Any = np.uint16,
1176
- ) -> None:
1177
- """Set the OMERO metadata for the image.
1178
-
1179
- Args:
1180
- labels (list[str]|int): The labels of the channels.
1181
- wavelength_ids (list[str], optional): The wavelengths of the channels.
1182
- colors (list[str], optional): The colors of the channels.
1183
- adjust_window (bool, optional): Whether to adjust the window.
1184
- start_percentile (int, optional): The start percentile.
1185
- end_percentile (int, optional): The end percentile.
1186
- active (list[bool], optional): Whether the channel is active.
1187
- start (list[int | float], optional): The start value of the channel.
1188
- end (list[int | float], optional): The end value of the channel.
1189
- end (int): The end value of the channel.
1190
- data_type (Any): The data type of the channel.
1191
- """
1192
- omero = Omero.lazy_init(
1193
- labels=labels,
1194
- wavelength_id=wavelength_ids,
1195
- colors=colors,
1196
- active=active,
1197
- start=start,
1198
- end=end,
1199
- data_type=data_type,
1200
- )
1201
- self.set_omero(omero=omero)
1202
-
1203
- @property
1204
- def channels(self) -> list[Channel]:
1205
- """Get the channels in the image."""
1206
- if self._omero is None:
1207
- return []
1208
- assert self.omero is not None
1209
- return self.omero.channels
1210
-
1211
- @property
1212
- def channel_labels(self) -> list[str]:
1213
- """Get the labels of the channels in the image."""
1214
- return [channel.label for channel in self.channels]
1215
-
1216
- @property
1217
- def channel_wavelength_ids(self) -> list[str | None]:
1218
- """Get the wavelength IDs of the channels in the image."""
1219
- return [channel.wavelength_id for channel in self.channels]
1220
-
1221
- def _get_channel_idx_by_label(self, label: str) -> int | None:
1222
- """Get the index of a channel by its label."""
1223
- if self._omero is None:
1224
- return None
1225
-
1226
- if label not in self.channel_labels:
1227
- raise ValueError(f"Channel with label {label} not found.")
1228
-
1229
- return self.channel_labels.index(label)
1230
-
1231
- def _get_channel_idx_by_wavelength_id(self, wavelength_id: str) -> int | None:
1232
- """Get the index of a channel by its wavelength ID."""
1233
- if self._omero is None:
1234
- return None
1235
-
1236
- if wavelength_id not in self.channel_wavelength_ids:
1237
- raise ValueError(f"Channel with wavelength ID {wavelength_id} not found.")
1238
-
1239
- return self.channel_wavelength_ids.index(wavelength_id)
1240
-
1241
- def get_channel_idx(
1242
- self, label: str | None = None, wavelength_id: str | None = None
1243
- ) -> int | None:
1244
- """Get the index of a channel by its label or wavelength ID."""
1245
- # Only one of the arguments must be provided
1246
- if sum([label is not None, wavelength_id is not None]) != 1:
1247
- raise ValueError("get_channel_idx must receive only one argument.")
1248
-
1249
- if label is not None:
1250
- return self._get_channel_idx_by_label(label)
1251
- elif wavelength_id is not None:
1252
- return self._get_channel_idx_by_wavelength_id(wavelength_id)
1253
- else:
1254
- raise ValueError(
1255
- "get_channel_idx must receive either label or wavelength_id."
1256
- )
1257
-
1258
- def to_label(self, name: str | None = None) -> LabelMeta:
1259
- """Convert the ImageMeta to a LabelMeta."""
1260
- image_meta = self.remove_axis("c")
1261
- name = self.name if name is None else name
1262
- return LabelMeta(
1263
- version=self.version, name=self.name, datasets=image_meta.datasets
1264
- )
1265
-
1266
-
1267
- ImageLabelMeta = ImageMeta | LabelMeta