ngio 0.3.4__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ngio/__init__.py +7 -2
- ngio/common/__init__.py +5 -52
- ngio/common/_dimensions.py +270 -55
- ngio/common/_masking_roi.py +38 -10
- ngio/common/_pyramid.py +51 -30
- ngio/common/_roi.py +269 -82
- ngio/common/_synt_images_utils.py +101 -0
- ngio/common/_zoom.py +49 -19
- ngio/experimental/__init__.py +5 -0
- ngio/experimental/iterators/__init__.py +15 -0
- ngio/experimental/iterators/_abstract_iterator.py +390 -0
- ngio/experimental/iterators/_feature.py +189 -0
- ngio/experimental/iterators/_image_processing.py +130 -0
- ngio/experimental/iterators/_mappers.py +48 -0
- ngio/experimental/iterators/_rois_utils.py +127 -0
- ngio/experimental/iterators/_segmentation.py +235 -0
- ngio/hcs/_plate.py +41 -36
- ngio/images/__init__.py +22 -1
- ngio/images/_abstract_image.py +403 -176
- ngio/images/_create.py +31 -15
- ngio/images/_create_synt_container.py +138 -0
- ngio/images/_image.py +452 -63
- ngio/images/_label.py +56 -30
- ngio/images/_masked_image.py +387 -129
- ngio/images/_ome_zarr_container.py +237 -67
- ngio/{common → images}/_table_ops.py +41 -41
- ngio/io_pipes/__init__.py +75 -0
- ngio/io_pipes/_io_pipes.py +361 -0
- ngio/io_pipes/_io_pipes_masked.py +488 -0
- ngio/io_pipes/_io_pipes_roi.py +152 -0
- ngio/io_pipes/_io_pipes_types.py +56 -0
- ngio/io_pipes/_match_shape.py +376 -0
- ngio/io_pipes/_ops_axes.py +344 -0
- ngio/io_pipes/_ops_slices.py +446 -0
- ngio/io_pipes/_ops_slices_utils.py +196 -0
- ngio/io_pipes/_ops_transforms.py +104 -0
- ngio/io_pipes/_zoom_transform.py +175 -0
- ngio/ome_zarr_meta/__init__.py +4 -2
- ngio/ome_zarr_meta/ngio_specs/__init__.py +4 -10
- ngio/ome_zarr_meta/ngio_specs/_axes.py +186 -175
- ngio/ome_zarr_meta/ngio_specs/_channels.py +55 -18
- ngio/ome_zarr_meta/ngio_specs/_dataset.py +48 -122
- ngio/ome_zarr_meta/ngio_specs/_ngio_hcs.py +6 -15
- ngio/ome_zarr_meta/ngio_specs/_ngio_image.py +38 -87
- ngio/ome_zarr_meta/ngio_specs/_pixel_size.py +17 -1
- ngio/ome_zarr_meta/v04/_v04_spec_utils.py +34 -31
- ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/mask.png +0 -0
- ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/nuclei.png +0 -0
- ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/raw.jpg +0 -0
- ngio/resources/__init__.py +55 -0
- ngio/resources/resource_model.py +36 -0
- ngio/tables/backends/_abstract_backend.py +5 -6
- ngio/tables/backends/_anndata.py +1 -2
- ngio/tables/backends/_anndata_utils.py +3 -3
- ngio/tables/backends/_non_zarr_backends.py +1 -1
- ngio/tables/backends/_table_backends.py +0 -1
- ngio/tables/backends/_utils.py +3 -3
- ngio/tables/v1/_roi_table.py +165 -70
- ngio/transforms/__init__.py +5 -0
- ngio/transforms/_zoom.py +19 -0
- ngio/utils/__init__.py +2 -3
- ngio/utils/_datasets.py +5 -0
- ngio/utils/_logger.py +19 -0
- ngio/utils/_zarr_utils.py +6 -6
- {ngio-0.3.4.dist-info → ngio-0.4.0.dist-info}/METADATA +24 -22
- ngio-0.4.0.dist-info/RECORD +85 -0
- ngio/common/_array_pipe.py +0 -288
- ngio/common/_axes_transforms.py +0 -64
- ngio/common/_common_types.py +0 -5
- ngio/common/_slicer.py +0 -96
- ngio-0.3.4.dist-info/RECORD +0 -61
- {ngio-0.3.4.dist-info → ngio-0.4.0.dist-info}/WHEEL +0 -0
- {ngio-0.3.4.dist-info → ngio-0.4.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,87 +1,51 @@
|
|
|
1
1
|
"""Fractal internal module for dataset metadata handling."""
|
|
2
2
|
|
|
3
|
-
from collections.abc import
|
|
3
|
+
from collections.abc import Sequence
|
|
4
4
|
|
|
5
5
|
from ngio.ome_zarr_meta.ngio_specs._axes import (
|
|
6
|
-
|
|
7
|
-
AxesSetup,
|
|
8
|
-
Axis,
|
|
9
|
-
AxisType,
|
|
10
|
-
DefaultSpaceUnit,
|
|
11
|
-
DefaultTimeUnit,
|
|
12
|
-
SpaceUnits,
|
|
13
|
-
TimeUnits,
|
|
6
|
+
AxesHandler,
|
|
14
7
|
)
|
|
15
8
|
from ngio.ome_zarr_meta.ngio_specs._pixel_size import PixelSize
|
|
16
9
|
from ngio.utils import NgioValidationError
|
|
17
10
|
|
|
18
11
|
|
|
19
12
|
class Dataset:
|
|
20
|
-
"""Model for a dataset in the multiscale.
|
|
21
|
-
|
|
22
|
-
To initialize the Dataset object, the path, the axes, scale, and translation list
|
|
23
|
-
can be provided with on_disk order.
|
|
24
|
-
"""
|
|
13
|
+
"""Model for a dataset in the multiscale."""
|
|
25
14
|
|
|
26
15
|
def __init__(
|
|
27
16
|
self,
|
|
28
17
|
*,
|
|
29
18
|
# args coming from ngff specs
|
|
30
19
|
path: str,
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
# user defined args
|
|
35
|
-
axes_setup: AxesSetup | None = None,
|
|
36
|
-
allow_non_canonical_axes: bool = False,
|
|
37
|
-
strict_canonical_order: bool = False,
|
|
20
|
+
axes_handler: AxesHandler,
|
|
21
|
+
scale: Sequence[float],
|
|
22
|
+
translation: Sequence[float] | None = None,
|
|
38
23
|
):
|
|
39
24
|
"""Initialize the Dataset object.
|
|
40
25
|
|
|
41
26
|
Args:
|
|
42
27
|
path (str): The path of the dataset.
|
|
43
|
-
|
|
44
|
-
|
|
28
|
+
axes_handler (AxesHandler): The axes handler object.
|
|
29
|
+
scale (list[float]): The list of scale transformation.
|
|
45
30
|
The scale transformation must have the same length as the axes.
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
allow_non_canonical_axes (bool): Allow non-canonical axes.
|
|
49
|
-
strict_canonical_order (bool): Strict canonical order.
|
|
31
|
+
translation (list[float] | None): The list of translation.
|
|
32
|
+
The translation must have the same length as the axes.
|
|
50
33
|
"""
|
|
51
34
|
self._path = path
|
|
52
|
-
self.
|
|
53
|
-
on_disk_axes=on_disk_axes,
|
|
54
|
-
axes_setup=axes_setup,
|
|
55
|
-
allow_non_canonical_axes=allow_non_canonical_axes,
|
|
56
|
-
strict_canonical_order=strict_canonical_order,
|
|
57
|
-
)
|
|
35
|
+
self._axes_handler = axes_handler
|
|
58
36
|
|
|
59
|
-
if len(
|
|
37
|
+
if len(scale) != len(axes_handler.axes):
|
|
60
38
|
raise NgioValidationError(
|
|
61
39
|
"The length of the scale transformation must be the same as the axes."
|
|
62
40
|
)
|
|
63
|
-
self.
|
|
41
|
+
self._scale = list(scale)
|
|
64
42
|
|
|
65
|
-
|
|
66
|
-
if len(
|
|
43
|
+
translation = translation or [0.0] * len(axes_handler.axes)
|
|
44
|
+
if len(translation) != len(axes_handler.axes):
|
|
67
45
|
raise NgioValidationError(
|
|
68
46
|
"The length of the translation must be the same as the axes."
|
|
69
47
|
)
|
|
70
|
-
self.
|
|
71
|
-
|
|
72
|
-
def get_scale(self, axis_name: str) -> float:
|
|
73
|
-
"""Return the scale for a given axis."""
|
|
74
|
-
idx = self._axes_mapper.get_index(axis_name)
|
|
75
|
-
if idx is None:
|
|
76
|
-
return 1.0
|
|
77
|
-
return self._on_disk_scale[idx]
|
|
78
|
-
|
|
79
|
-
def get_translation(self, axis_name: str) -> float:
|
|
80
|
-
"""Return the translation for a given axis."""
|
|
81
|
-
idx = self._axes_mapper.get_index(axis_name)
|
|
82
|
-
if idx is None:
|
|
83
|
-
return 0.0
|
|
84
|
-
return self._on_disk_translation[idx]
|
|
48
|
+
self._translation = list(translation)
|
|
85
49
|
|
|
86
50
|
@property
|
|
87
51
|
def path(self) -> str:
|
|
@@ -89,84 +53,46 @@ class Dataset:
|
|
|
89
53
|
return self._path
|
|
90
54
|
|
|
91
55
|
@property
|
|
92
|
-
def
|
|
93
|
-
"""Return the
|
|
94
|
-
|
|
95
|
-
y_axis = self._axes_mapper.get_axis("y")
|
|
96
|
-
|
|
97
|
-
if x_axis is None or y_axis is None:
|
|
98
|
-
raise NgioValidationError(
|
|
99
|
-
"The dataset must have x and y axes to determine the space unit."
|
|
100
|
-
)
|
|
101
|
-
|
|
102
|
-
if x_axis.unit == y_axis.unit:
|
|
103
|
-
return x_axis.unit
|
|
104
|
-
else:
|
|
105
|
-
raise NgioValidationError(
|
|
106
|
-
"Inconsistent space units. "
|
|
107
|
-
f"x={x_axis.unit} and y={y_axis.unit} should have the same unit."
|
|
108
|
-
)
|
|
109
|
-
|
|
110
|
-
@property
|
|
111
|
-
def time_unit(self) -> str | None:
|
|
112
|
-
"""Return the time unit for a given axis."""
|
|
113
|
-
t_axis = self._axes_mapper.get_axis("t")
|
|
114
|
-
if t_axis is None:
|
|
115
|
-
return None
|
|
116
|
-
return t_axis.unit
|
|
56
|
+
def axes_handler(self) -> AxesHandler:
|
|
57
|
+
"""Return the axes handler object."""
|
|
58
|
+
return self._axes_handler
|
|
117
59
|
|
|
118
60
|
@property
|
|
119
61
|
def pixel_size(self) -> PixelSize:
|
|
120
62
|
"""Return the pixel size for the dataset."""
|
|
121
63
|
return PixelSize(
|
|
122
|
-
x=self.get_scale("x"),
|
|
123
|
-
y=self.get_scale("y"),
|
|
124
|
-
z=self.get_scale("z"),
|
|
125
|
-
t=self.get_scale("t"),
|
|
126
|
-
space_unit=self.space_unit,
|
|
127
|
-
time_unit=self.time_unit,
|
|
64
|
+
x=self.get_scale("x", default=1.0),
|
|
65
|
+
y=self.get_scale("y", default=1.0),
|
|
66
|
+
z=self.get_scale("z", default=1.0),
|
|
67
|
+
t=self.get_scale("t", default=1.0),
|
|
68
|
+
space_unit=self.axes_handler.space_unit,
|
|
69
|
+
time_unit=self.axes_handler.time_unit,
|
|
128
70
|
)
|
|
129
71
|
|
|
130
72
|
@property
|
|
131
|
-
def
|
|
132
|
-
"""Return the
|
|
133
|
-
return self.
|
|
73
|
+
def scale(self) -> tuple[float, ...]:
|
|
74
|
+
"""Return the scale transformation as a tuple."""
|
|
75
|
+
return tuple(self._scale)
|
|
134
76
|
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
time_unit: TimeUnits = DefaultTimeUnit,
|
|
140
|
-
) -> "Dataset":
|
|
141
|
-
"""Convert the pixel size to the given units.
|
|
77
|
+
@property
|
|
78
|
+
def translation(self) -> tuple[float, ...]:
|
|
79
|
+
"""Return the translation as a tuple."""
|
|
80
|
+
return tuple(self._translation)
|
|
142
81
|
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
on_disk_name=ax.on_disk_name,
|
|
152
|
-
axis_type=ax.axis_type,
|
|
153
|
-
unit=space_unit,
|
|
154
|
-
)
|
|
155
|
-
new_axes.append(new_ax)
|
|
156
|
-
elif ax.axis_type == AxisType.time:
|
|
157
|
-
new_ax = Axis(
|
|
158
|
-
on_disk_name=ax.on_disk_name, axis_type=ax.axis_type, unit=time_unit
|
|
159
|
-
)
|
|
160
|
-
new_axes.append(new_ax)
|
|
161
|
-
else:
|
|
162
|
-
new_axes.append(ax)
|
|
82
|
+
def get_scale(self, axis_name: str, default: float | None = None) -> float:
|
|
83
|
+
"""Return the scale for a given axis."""
|
|
84
|
+
idx = self.axes_handler.get_index(axis_name)
|
|
85
|
+
if idx is None:
|
|
86
|
+
if default is not None:
|
|
87
|
+
return default
|
|
88
|
+
raise ValueError(f"Axis {axis_name} not found in axes {self.axes_handler}.")
|
|
89
|
+
return self._scale[idx]
|
|
163
90
|
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
)
|
|
91
|
+
def get_translation(self, axis_name: str, default: float | None = None) -> float:
|
|
92
|
+
"""Return the translation for a given axis."""
|
|
93
|
+
idx = self.axes_handler.get_index(axis_name)
|
|
94
|
+
if idx is None:
|
|
95
|
+
if default is not None:
|
|
96
|
+
return default
|
|
97
|
+
raise ValueError(f"Axis {axis_name} not found in axes {self.axes_handler}.")
|
|
98
|
+
return self._translation[idx]
|
|
@@ -21,21 +21,12 @@ from ngio.utils import NgioValueError, ngio_logger
|
|
|
21
21
|
|
|
22
22
|
def path_in_well_validation(path: str) -> str:
|
|
23
23
|
"""Validate the path in the well."""
|
|
24
|
-
if path.find("_") != -1:
|
|
25
|
-
# Remove underscores from the path
|
|
26
|
-
# This is a custom serialization step
|
|
27
|
-
old_value = path
|
|
28
|
-
path = path.replace("_", "")
|
|
29
|
-
ngio_logger.warning(
|
|
30
|
-
f"Underscores in well-paths are not allowed. "
|
|
31
|
-
f"Path '{old_value}' was changed to '{path}'"
|
|
32
|
-
f" to comply with the specification."
|
|
33
|
-
)
|
|
34
24
|
# Check if the value contains only alphanumeric characters
|
|
35
25
|
if not path.isalnum():
|
|
36
|
-
|
|
26
|
+
ngio_logger.warning(
|
|
37
27
|
f"Path '{path}' contains non-alphanumeric characters. "
|
|
38
|
-
|
|
28
|
+
"This may cause issues with some tools. "
|
|
29
|
+
"Consider using only alphanumeric characters in the path."
|
|
39
30
|
)
|
|
40
31
|
return path
|
|
41
32
|
|
|
@@ -60,11 +51,11 @@ class CustomWellImage(WellImage04):
|
|
|
60
51
|
|
|
61
52
|
|
|
62
53
|
class CustomWellMeta(WellMeta04):
|
|
63
|
-
images: list[CustomWellImage] # type: ignore
|
|
54
|
+
images: list[CustomWellImage] # type: ignore (override of WellMeta04.images)
|
|
64
55
|
|
|
65
56
|
|
|
66
57
|
class CustomWellAttrs(WellAttrs04):
|
|
67
|
-
well: CustomWellMeta # type: ignore
|
|
58
|
+
well: CustomWellMeta # type: ignore (override of WellAttrs04.well)
|
|
68
59
|
|
|
69
60
|
|
|
70
61
|
class NgioWellMeta(CustomWellAttrs):
|
|
@@ -540,7 +531,7 @@ class NgioPlateMeta(HCSAttrs):
|
|
|
540
531
|
acquisitions = None
|
|
541
532
|
|
|
542
533
|
if version is None:
|
|
543
|
-
version = self.plate.version # type: ignore
|
|
534
|
+
version = self.plate.version # type: ignore (version is NgffVersions or None)
|
|
544
535
|
|
|
545
536
|
return NgioPlateMeta(
|
|
546
537
|
plate=Plate(
|
|
@@ -6,7 +6,7 @@ But they can be built from the OME standard metadata, and the
|
|
|
6
6
|
can be converted to the OME standard.
|
|
7
7
|
"""
|
|
8
8
|
|
|
9
|
-
from collections.abc import
|
|
9
|
+
from collections.abc import Sequence
|
|
10
10
|
from typing import Any, Literal, TypeVar
|
|
11
11
|
|
|
12
12
|
import numpy as np
|
|
@@ -17,9 +17,9 @@ from ngio.ome_zarr_meta.ngio_specs._axes import (
|
|
|
17
17
|
DefaultTimeUnit,
|
|
18
18
|
SpaceUnits,
|
|
19
19
|
TimeUnits,
|
|
20
|
-
|
|
20
|
+
build_canonical_axes_handler,
|
|
21
21
|
)
|
|
22
|
-
from ngio.ome_zarr_meta.ngio_specs._channels import
|
|
22
|
+
from ngio.ome_zarr_meta.ngio_specs._channels import ChannelsMeta
|
|
23
23
|
from ngio.ome_zarr_meta.ngio_specs._dataset import Dataset
|
|
24
24
|
from ngio.ome_zarr_meta.ngio_specs._pixel_size import PixelSize
|
|
25
25
|
from ngio.utils import NgioValidationError, NgioValueError
|
|
@@ -55,38 +55,39 @@ class AbstractNgioImageMeta:
|
|
|
55
55
|
raise NgioValidationError("At least one dataset must be provided.")
|
|
56
56
|
|
|
57
57
|
self._datasets = datasets
|
|
58
|
-
self.
|
|
58
|
+
self._axes_handler = datasets[0].axes_handler
|
|
59
59
|
|
|
60
60
|
def __repr__(self):
|
|
61
61
|
class_name = type(self).__name__
|
|
62
62
|
paths = [dataset.path for dataset in self.datasets]
|
|
63
|
-
|
|
64
|
-
return f"{class_name}(name={self.name}, datasets={paths}, axes={
|
|
63
|
+
axes = self.axes_handler.axes_names
|
|
64
|
+
return f"{class_name}(name={self.name}, datasets={paths}, axes={axes})"
|
|
65
65
|
|
|
66
66
|
@classmethod
|
|
67
67
|
def default_init(
|
|
68
68
|
cls,
|
|
69
|
-
levels: int |
|
|
70
|
-
axes_names:
|
|
69
|
+
levels: int | Sequence[str],
|
|
70
|
+
axes_names: Sequence[str],
|
|
71
71
|
pixel_size: PixelSize,
|
|
72
|
-
scaling_factors:
|
|
72
|
+
scaling_factors: Sequence[float] | None = None,
|
|
73
73
|
name: str | None = None,
|
|
74
74
|
version: NgffVersions = DefaultNgffVersion,
|
|
75
75
|
):
|
|
76
76
|
"""Initialize the ImageMeta object."""
|
|
77
|
-
|
|
77
|
+
axes_handler = build_canonical_axes_handler(
|
|
78
78
|
axes_names,
|
|
79
|
-
space_units=pixel_size.space_unit,
|
|
80
|
-
time_units=pixel_size.time_unit,
|
|
79
|
+
space_units=pixel_size.space_unit,
|
|
80
|
+
time_units=pixel_size.time_unit,
|
|
81
81
|
)
|
|
82
82
|
|
|
83
83
|
px_size_dict = pixel_size.as_dict()
|
|
84
|
-
scale = [px_size_dict.get(
|
|
85
|
-
translation = [0.0] * len(scale)
|
|
84
|
+
scale = [px_size_dict.get(name, 1.0) for name in axes_handler.axes_names]
|
|
86
85
|
|
|
87
86
|
if scaling_factors is None:
|
|
88
87
|
_default = {"x": 2.0, "y": 2.0}
|
|
89
|
-
scaling_factors = [
|
|
88
|
+
scaling_factors = [
|
|
89
|
+
_default.get(name, 1.0) for name in axes_handler.axes_names
|
|
90
|
+
]
|
|
90
91
|
|
|
91
92
|
if isinstance(levels, int):
|
|
92
93
|
levels = [str(i) for i in range(levels)]
|
|
@@ -95,11 +96,9 @@ class AbstractNgioImageMeta:
|
|
|
95
96
|
for level in levels:
|
|
96
97
|
dataset = Dataset(
|
|
97
98
|
path=level,
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
allow_non_canonical_axes=False,
|
|
102
|
-
strict_canonical_order=True,
|
|
99
|
+
axes_handler=axes_handler,
|
|
100
|
+
scale=scale,
|
|
101
|
+
translation=None,
|
|
103
102
|
)
|
|
104
103
|
datasets.append(dataset)
|
|
105
104
|
scale = [s * f for s, f in zip(scale, scaling_factors, strict=True)]
|
|
@@ -122,9 +121,18 @@ class AbstractNgioImageMeta:
|
|
|
122
121
|
space_unit(str): The space unit to convert to.
|
|
123
122
|
time_unit(str): The time unit to convert to.
|
|
124
123
|
"""
|
|
124
|
+
new_axes_handler = self.axes_handler.to_units(
|
|
125
|
+
space_unit=space_unit,
|
|
126
|
+
time_unit=time_unit,
|
|
127
|
+
)
|
|
125
128
|
new_datasets = []
|
|
126
129
|
for dataset in self.datasets:
|
|
127
|
-
new_dataset =
|
|
130
|
+
new_dataset = Dataset(
|
|
131
|
+
path=dataset.path,
|
|
132
|
+
axes_handler=new_axes_handler,
|
|
133
|
+
scale=dataset.scale,
|
|
134
|
+
translation=dataset.translation,
|
|
135
|
+
)
|
|
128
136
|
new_datasets.append(new_dataset)
|
|
129
137
|
|
|
130
138
|
return type(self)(
|
|
@@ -136,7 +144,7 @@ class AbstractNgioImageMeta:
|
|
|
136
144
|
@property
|
|
137
145
|
def version(self) -> NgffVersions:
|
|
138
146
|
"""Version of the OME-NFF metadata used to build the object."""
|
|
139
|
-
return self._version # type: ignore
|
|
147
|
+
return self._version # type: ignore (version is a Literal type)
|
|
140
148
|
|
|
141
149
|
@property
|
|
142
150
|
def name(self) -> str | None:
|
|
@@ -149,9 +157,9 @@ class AbstractNgioImageMeta:
|
|
|
149
157
|
return self._datasets
|
|
150
158
|
|
|
151
159
|
@property
|
|
152
|
-
def
|
|
160
|
+
def axes_handler(self):
|
|
153
161
|
"""Return the axes mapper."""
|
|
154
|
-
return self.
|
|
162
|
+
return self._axes_handler
|
|
155
163
|
|
|
156
164
|
@property
|
|
157
165
|
def levels(self) -> int:
|
|
@@ -166,12 +174,12 @@ class AbstractNgioImageMeta:
|
|
|
166
174
|
@property
|
|
167
175
|
def space_unit(self) -> str | None:
|
|
168
176
|
"""Get the space unit of the pixel size."""
|
|
169
|
-
return self.
|
|
177
|
+
return self.axes_handler.space_unit
|
|
170
178
|
|
|
171
179
|
@property
|
|
172
180
|
def time_unit(self) -> str | None:
|
|
173
181
|
"""Get the time unit of the pixel size."""
|
|
174
|
-
return self.
|
|
182
|
+
return self.axes_handler.time_unit
|
|
175
183
|
|
|
176
184
|
def _get_dataset_by_path(self, path: str) -> Dataset:
|
|
177
185
|
"""Get a dataset by its path."""
|
|
@@ -321,11 +329,11 @@ class AbstractNgioImageMeta:
|
|
|
321
329
|
def scaling_factor(self, path: str | None = None) -> list[float]:
|
|
322
330
|
"""Get the scaling factors from a dataset to its lower resolution."""
|
|
323
331
|
if self.levels == 1:
|
|
324
|
-
return [1.0] * len(self.
|
|
332
|
+
return [1.0] * len(self.axes_handler.axes_names)
|
|
325
333
|
dataset, lr_dataset = self._get_closest_datasets(path=path)
|
|
326
334
|
|
|
327
335
|
scaling_factors = []
|
|
328
|
-
for ax_name in self.
|
|
336
|
+
for ax_name in self.axes_handler.axes_names:
|
|
329
337
|
s_d = dataset.get_scale(ax_name)
|
|
330
338
|
s_lr_d = lr_dataset.get_scale(ax_name)
|
|
331
339
|
scaling_factors.append(s_lr_d / s_d)
|
|
@@ -359,8 +367,8 @@ class AbstractNgioImageMeta:
|
|
|
359
367
|
return 1.0
|
|
360
368
|
dataset, lr_dataset = self._get_closest_datasets(path=path)
|
|
361
369
|
|
|
362
|
-
s_d = dataset.get_scale("z")
|
|
363
|
-
s_lr_d = lr_dataset.get_scale("z")
|
|
370
|
+
s_d = dataset.get_scale("z", default=1.0)
|
|
371
|
+
s_lr_d = lr_dataset.get_scale("z", default=1.0)
|
|
364
372
|
return s_lr_d / s_d
|
|
365
373
|
|
|
366
374
|
|
|
@@ -462,62 +470,5 @@ class NgioImageMeta(AbstractNgioImageMeta):
|
|
|
462
470
|
)
|
|
463
471
|
self.set_channels_meta(channels_meta=channels_meta)
|
|
464
472
|
|
|
465
|
-
@property
|
|
466
|
-
def channels(self) -> list[Channel]:
|
|
467
|
-
"""Get the channels in the image."""
|
|
468
|
-
if self._channels_meta is None:
|
|
469
|
-
return []
|
|
470
|
-
assert self.channels_meta is not None
|
|
471
|
-
return self.channels_meta.channels
|
|
472
|
-
|
|
473
|
-
@property
|
|
474
|
-
def channel_labels(self) -> list[str]:
|
|
475
|
-
"""Get the labels of the channels in the image."""
|
|
476
|
-
return [channel.label for channel in self.channels]
|
|
477
|
-
|
|
478
|
-
@property
|
|
479
|
-
def channel_wavelength_ids(self) -> list[str | None]:
|
|
480
|
-
"""Get the wavelength IDs of the channels in the image."""
|
|
481
|
-
return [channel.wavelength_id for channel in self.channels]
|
|
482
|
-
|
|
483
|
-
def _get_channel_idx_by_label(self, label: str) -> int | None:
|
|
484
|
-
"""Get the index of a channel by its label."""
|
|
485
|
-
if self._channels_meta is None:
|
|
486
|
-
return None
|
|
487
|
-
|
|
488
|
-
if label not in self.channel_labels:
|
|
489
|
-
raise NgioValueError(f"Channel with label {label} not found.")
|
|
490
|
-
|
|
491
|
-
return self.channel_labels.index(label)
|
|
492
|
-
|
|
493
|
-
def _get_channel_idx_by_wavelength_id(self, wavelength_id: str) -> int | None:
|
|
494
|
-
"""Get the index of a channel by its wavelength ID."""
|
|
495
|
-
if self._channels_meta is None:
|
|
496
|
-
return None
|
|
497
|
-
|
|
498
|
-
if wavelength_id not in self.channel_wavelength_ids:
|
|
499
|
-
raise NgioValueError(
|
|
500
|
-
f"Channel with wavelength ID {wavelength_id} not found."
|
|
501
|
-
)
|
|
502
|
-
|
|
503
|
-
return self.channel_wavelength_ids.index(wavelength_id)
|
|
504
|
-
|
|
505
|
-
def get_channel_idx(
|
|
506
|
-
self, label: str | None = None, wavelength_id: str | None = None
|
|
507
|
-
) -> int | None:
|
|
508
|
-
"""Get the index of a channel by its label or wavelength ID."""
|
|
509
|
-
# Only one of the arguments must be provided
|
|
510
|
-
if sum([label is not None, wavelength_id is not None]) != 1:
|
|
511
|
-
raise NgioValueError("get_channel_idx must receive only one argument.")
|
|
512
|
-
|
|
513
|
-
if label is not None:
|
|
514
|
-
return self._get_channel_idx_by_label(label)
|
|
515
|
-
elif wavelength_id is not None:
|
|
516
|
-
return self._get_channel_idx_by_wavelength_id(wavelength_id)
|
|
517
|
-
else:
|
|
518
|
-
raise NgioValueError(
|
|
519
|
-
"get_channel_idx must receive either label or wavelength_id."
|
|
520
|
-
)
|
|
521
|
-
|
|
522
473
|
|
|
523
474
|
NgioImageLabelMeta = NgioImageMeta | NgioLabelMeta
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
import math
|
|
4
4
|
from functools import total_ordering
|
|
5
|
+
from typing import overload
|
|
5
6
|
|
|
6
7
|
import numpy as np
|
|
7
8
|
|
|
@@ -84,10 +85,25 @@ class PixelSize:
|
|
|
84
85
|
)
|
|
85
86
|
return self.distance(ref) < other.distance(ref)
|
|
86
87
|
|
|
87
|
-
def as_dict(self) -> dict:
|
|
88
|
+
def as_dict(self) -> dict[str, float]:
|
|
88
89
|
"""Return the pixel size as a dictionary."""
|
|
89
90
|
return {"t": self.t, "z": self.z, "y": self.y, "x": self.x}
|
|
90
91
|
|
|
92
|
+
@overload
|
|
93
|
+
def get(self, axis: str, default: float) -> float: ...
|
|
94
|
+
|
|
95
|
+
@overload
|
|
96
|
+
def get(self, axis: str, default: None = None) -> float | None: ...
|
|
97
|
+
|
|
98
|
+
def get(self, axis: str, default: float | None = None) -> float | None:
|
|
99
|
+
"""Get the pixel size for a given axis (in canonical name)."""
|
|
100
|
+
px_size = self.as_dict().get(axis, default)
|
|
101
|
+
if px_size is None:
|
|
102
|
+
raise ValueError(
|
|
103
|
+
f"Invalid axis name: {axis}, must be one of 'x', 'y', 'z', 't'."
|
|
104
|
+
)
|
|
105
|
+
return px_size
|
|
106
|
+
|
|
91
107
|
@property
|
|
92
108
|
def space_unit(self) -> SpaceUnits | str | None:
|
|
93
109
|
"""Return the space unit."""
|
|
@@ -26,6 +26,7 @@ from ome_zarr_models.v04.omero import Window as WindowV04
|
|
|
26
26
|
from pydantic import ValidationError
|
|
27
27
|
|
|
28
28
|
from ngio.ome_zarr_meta.ngio_specs import (
|
|
29
|
+
AxesHandler,
|
|
29
30
|
AxesSetup,
|
|
30
31
|
Axis,
|
|
31
32
|
AxisType,
|
|
@@ -160,34 +161,38 @@ def _v04_to_ngio_datasets(
|
|
|
160
161
|
v04_multiscale.coordinateTransformations, global_scale, global_translation
|
|
161
162
|
)
|
|
162
163
|
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
)
|
|
164
|
+
# Prepare axes handler
|
|
165
|
+
axes = []
|
|
166
|
+
for v04_axis in v04_multiscale.axes:
|
|
167
|
+
unit = v04_axis.unit
|
|
168
|
+
if unit is not None and not isinstance(unit, str):
|
|
169
|
+
unit = str(unit)
|
|
170
|
+
axes.append(
|
|
171
|
+
Axis(
|
|
172
|
+
name=v04_axis.name,
|
|
173
|
+
axis_type=AxisType(v04_axis.type),
|
|
174
|
+
# (for some reason the type is a generic JsonValue,
|
|
175
|
+
# but it should be a string or None)
|
|
176
|
+
unit=v04_axis.unit, # type: ignore
|
|
177
177
|
)
|
|
178
|
+
)
|
|
179
|
+
axes_handler = AxesHandler(
|
|
180
|
+
axes=axes,
|
|
181
|
+
axes_setup=axes_setup,
|
|
182
|
+
allow_non_canonical_axes=allow_non_canonical_axes,
|
|
183
|
+
strict_canonical_order=strict_canonical_order,
|
|
184
|
+
)
|
|
178
185
|
|
|
179
|
-
|
|
186
|
+
for v04_dataset in v04_multiscale.datasets:
|
|
187
|
+
_scale, _translation = _compute_scale_translation(
|
|
180
188
|
v04_dataset.coordinateTransformations, global_scale, global_translation
|
|
181
189
|
)
|
|
182
190
|
datasets.append(
|
|
183
191
|
Dataset(
|
|
184
192
|
path=v04_dataset.path,
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
axes_setup=axes_setup,
|
|
189
|
-
allow_non_canonical_axes=allow_non_canonical_axes,
|
|
190
|
-
strict_canonical_order=strict_canonical_order,
|
|
193
|
+
axes_handler=axes_handler,
|
|
194
|
+
scale=_scale,
|
|
195
|
+
translation=_translation,
|
|
191
196
|
)
|
|
192
197
|
)
|
|
193
198
|
return datasets
|
|
@@ -314,12 +319,12 @@ def _ngio_to_v04_multiscale(name: str | None, datasets: list[Dataset]) -> Multis
|
|
|
314
319
|
Returns:
|
|
315
320
|
MultiscaleV04: The v04 multiscale.
|
|
316
321
|
"""
|
|
317
|
-
ax_mapper = datasets[0].
|
|
322
|
+
ax_mapper = datasets[0].axes_handler
|
|
318
323
|
v04_axes = []
|
|
319
|
-
for axis in ax_mapper.
|
|
324
|
+
for axis in ax_mapper.axes:
|
|
320
325
|
v04_axes.append(
|
|
321
326
|
AxisV04(
|
|
322
|
-
name=axis.
|
|
327
|
+
name=axis.name,
|
|
323
328
|
type=axis.axis_type.value if axis.axis_type is not None else None,
|
|
324
329
|
unit=axis.unit if axis.unit is not None else None,
|
|
325
330
|
)
|
|
@@ -327,18 +332,16 @@ def _ngio_to_v04_multiscale(name: str | None, datasets: list[Dataset]) -> Multis
|
|
|
327
332
|
|
|
328
333
|
v04_datasets = []
|
|
329
334
|
for dataset in datasets:
|
|
330
|
-
transform = [VectorScaleV04(type="scale", scale=list(dataset.
|
|
331
|
-
if sum(dataset.
|
|
335
|
+
transform = [VectorScaleV04(type="scale", scale=list(dataset._scale))]
|
|
336
|
+
if sum(dataset._translation) > 0:
|
|
332
337
|
transform = (
|
|
333
|
-
VectorScaleV04(type="scale", scale=list(dataset.
|
|
338
|
+
VectorScaleV04(type="scale", scale=list(dataset._scale)),
|
|
334
339
|
VectorTranslationV04(
|
|
335
|
-
type="translation", translation=list(dataset.
|
|
340
|
+
type="translation", translation=list(dataset._translation)
|
|
336
341
|
),
|
|
337
342
|
)
|
|
338
343
|
else:
|
|
339
|
-
transform = (
|
|
340
|
-
VectorScaleV04(type="scale", scale=list(dataset._on_disk_scale)),
|
|
341
|
-
)
|
|
344
|
+
transform = (VectorScaleV04(type="scale", scale=list(dataset._scale)),)
|
|
342
345
|
|
|
343
346
|
v04_datasets.append(
|
|
344
347
|
DatasetV04(path=dataset.path, coordinateTransformations=transform)
|