volsegtools 0.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. volsegtools-0.0.0/PKG-INFO +22 -0
  2. volsegtools-0.0.0/README.md +3 -0
  3. volsegtools-0.0.0/pyproject.toml +48 -0
  4. volsegtools-0.0.0/setup.cfg +4 -0
  5. volsegtools-0.0.0/tests/test_lazy_data_reference.py +9 -0
  6. volsegtools-0.0.0/tests/test_typing.py +0 -0
  7. volsegtools-0.0.0/tests/test_vector.py +8 -0
  8. volsegtools-0.0.0/volsegtools/__init__.py +0 -0
  9. volsegtools-0.0.0/volsegtools/_cli/__init__.py +1 -0
  10. volsegtools-0.0.0/volsegtools/_cli/molstar_preprocessor.py +79 -0
  11. volsegtools-0.0.0/volsegtools/abc/__init__.py +6 -0
  12. volsegtools-0.0.0/volsegtools/abc/converter.py +69 -0
  13. volsegtools-0.0.0/volsegtools/abc/data_handle.py +24 -0
  14. volsegtools-0.0.0/volsegtools/abc/downsampler.py +26 -0
  15. volsegtools-0.0.0/volsegtools/abc/kernel.py +8 -0
  16. volsegtools-0.0.0/volsegtools/abc/preprocessor.py +38 -0
  17. volsegtools-0.0.0/volsegtools/abc/serializer.py +12 -0
  18. volsegtools-0.0.0/volsegtools/converter/__init__.py +1 -0
  19. volsegtools-0.0.0/volsegtools/converter/map_converter.py +148 -0
  20. volsegtools-0.0.0/volsegtools/core/__init__.py +5 -0
  21. volsegtools-0.0.0/volsegtools/core/bounds.py +12 -0
  22. volsegtools-0.0.0/volsegtools/core/downsampling_parameters.py +28 -0
  23. volsegtools-0.0.0/volsegtools/core/gaussian_kernel_3D.py +16 -0
  24. volsegtools-0.0.0/volsegtools/core/lattice_kind.py +8 -0
  25. volsegtools-0.0.0/volsegtools/core/vector.py +9 -0
  26. volsegtools-0.0.0/volsegtools/downsampler/__init__.py +2 -0
  27. volsegtools-0.0.0/volsegtools/downsampler/base_downsampler.py +20 -0
  28. volsegtools-0.0.0/volsegtools/downsampler/hierarchy_downsampler.py +253 -0
  29. volsegtools-0.0.0/volsegtools/model/__init__.py +13 -0
  30. volsegtools-0.0.0/volsegtools/model/chunking_mode.py +16 -0
  31. volsegtools-0.0.0/volsegtools/model/metadata.py +50 -0
  32. volsegtools-0.0.0/volsegtools/model/opaque_data_handle.py +112 -0
  33. volsegtools-0.0.0/volsegtools/model/storing_parameters.py +52 -0
  34. volsegtools-0.0.0/volsegtools/model/working_store.py +142 -0
  35. volsegtools-0.0.0/volsegtools/preprocessor/__init__.py +2 -0
  36. volsegtools-0.0.0/volsegtools/preprocessor/preprocessor.py +75 -0
  37. volsegtools-0.0.0/volsegtools/preprocessor/preprocessor_builder.py +110 -0
  38. volsegtools-0.0.0/volsegtools/serialization/__init__.py +1 -0
  39. volsegtools-0.0.0/volsegtools/serialization/bcif_serializer.py +318 -0
  40. volsegtools-0.0.0/volsegtools/typing.py +12 -0
  41. volsegtools-0.0.0/volsegtools.egg-info/PKG-INFO +22 -0
  42. volsegtools-0.0.0/volsegtools.egg-info/SOURCES.txt +44 -0
  43. volsegtools-0.0.0/volsegtools.egg-info/dependency_links.txt +1 -0
  44. volsegtools-0.0.0/volsegtools.egg-info/entry_points.txt +2 -0
  45. volsegtools-0.0.0/volsegtools.egg-info/requires.txt +12 -0
  46. volsegtools-0.0.0/volsegtools.egg-info/top_level.txt +1 -0
@@ -0,0 +1,22 @@
1
+ Metadata-Version: 2.4
2
+ Name: volsegtools
3
+ Version: 0.0.0
4
+ Classifier: Programming Language :: Python :: 3
5
+ Classifier: Operating System :: OS Independent
6
+ Requires-Python: >=3.9
7
+ Description-Content-Type: text/markdown
8
+ Requires-Dist: dask
9
+ Requires-Dist: dask_image
10
+ Requires-Dist: zarr
11
+ Requires-Dist: numpy
12
+ Requires-Dist: mrcfile
13
+ Requires-Dist: nibabel
14
+ Requires-Dist: pydantic
15
+ Requires-Dist: typer
16
+ Requires-Dist: ciftools
17
+ Provides-Extra: dev
18
+ Requires-Dist: pytest; extra == "dev"
19
+
20
+ # Volseg Tools
21
+
22
+ A library used for preprocessing of volumes and segmentations for further usage.
@@ -0,0 +1,3 @@
1
+ # Volseg Tools
2
+
3
+ A library used for preprocessing of volumes and segmentations for further usage.
@@ -0,0 +1,48 @@
1
+ [build-system]
2
+ requires = ["setuptools>=61.0"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "volsegtools"
7
+ version = "0.0.0"
8
+ authors = [
9
+ ]
10
+ description = ""
11
+ readme = "README.md"
12
+ requires-python = ">=3.9"
13
+ classifiers = [
14
+ "Programming Language :: Python :: 3",
15
+ "Operating System :: OS Independent",
16
+ ]
17
+ dependencies = [
18
+ "dask",
19
+ "dask_image",
20
+ "zarr",
21
+ "numpy",
22
+ "mrcfile",
23
+ "nibabel",
24
+ "pydantic",
25
+ "typer",
26
+ "ciftools",
27
+ ]
28
+
29
+ [tool.setuptools]
30
+ packages = [
31
+ "volsegtools",
32
+ "volsegtools.core",
33
+ "volsegtools.model",
34
+ "volsegtools.downsampler",
35
+ "volsegtools.converter",
36
+ "volsegtools.abc",
37
+ "volsegtools.preprocessor",
38
+ "volsegtools.serialization",
39
+ "volsegtools._cli", # FIX: this shouldn't be exported
40
+ ]
41
+
42
+ [project.optional-dependencies]
43
+ dev = [
44
+ "pytest"
45
+ ]
46
+
47
+ [project.scripts]
48
+ molstar-preprocessor = 'volsegtools._cli:app'
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,9 @@
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from volsegtools.model import LazyDataReference
5
+
6
+
7
+ def test_lazy_ref_from_np_arr():
8
+ arr = np.arange(10)
9
+ ref = LazyDataReference(arr)
File without changes
@@ -0,0 +1,8 @@
1
+ import volsegtools.core as vst
2
+
3
+
4
+ def test_vector():
5
+ vec = vst.Vector3()
6
+ assert hasattr(vec, 'x')
7
+ assert hasattr(vec, 'y')
8
+ assert hasattr(vec, 'z')
File without changes
@@ -0,0 +1 @@
1
+ from .molstar_preprocessor import app
@@ -0,0 +1,79 @@
1
+ import logging
2
+ import os
3
+ import shutil
4
+ import sys
5
+ from pathlib import Path
6
+ from typing import List
7
+
8
+ import typer
9
+ from typing_extensions import Annotated
10
+
11
+ from volsegtools.converter import MapConverter
12
+ from volsegtools.core import DownsamplingParameters, LatticeKind
13
+
14
+ # TODO: parameters should (and can be) moved to the downsampler package
15
+ from volsegtools.downsampler import HierarchyDownsampler
16
+ from volsegtools.model.working_store import WorkingStore
17
+ from volsegtools.preprocessor import Preprocessor, PreprocessorBuilder
18
+
19
+ app = typer.Typer()
20
+
21
+
22
+ @app.command()
23
+ def run(
24
+ volume_source: Annotated[
25
+ List[Path], typer.Option(help="Specifies a path to volumetric data.")
26
+ ] = [],
27
+ segmentation_source: Annotated[
28
+ List[Path], typer.Option(help="Specifies a path to segmentation data.")
29
+ ] = [],
30
+ workdir: Annotated[
31
+ Path,
32
+ typer.Option(help="Remove temporal Zarr store created during downsampling."),
33
+ ] = Path.cwd(),
34
+ rm_tmp: Annotated[
35
+ bool,
36
+ typer.Option(help="Remove temporal Zarr store created during downsampling."),
37
+ ] = False,
38
+ overwrite_tmp: Annotated[
39
+ bool, typer.Option(help="Overwrite temporal Zarr store if present.")
40
+ ] = False,
41
+ ):
42
+ if len(sys.argv) < 2:
43
+ raise RuntimeError("Not enough arguments!")
44
+
45
+ logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO)
46
+
47
+ # TODO: this could be stored in the /tmp directory...
48
+ local_store_path = workdir / "volsegtools_workdir"
49
+ if overwrite_tmp and local_store_path.exists():
50
+ shutil.rmtree(local_store_path)
51
+
52
+ # Initialization of the singleton working store
53
+ # FIX: This shouldn't be necessarry
54
+ working_store = WorkingStore(local_store_path)
55
+
56
+ builder = PreprocessorBuilder()
57
+ builder.set_converter(MapConverter())
58
+ builder.set_downsampler(HierarchyDownsampler())
59
+
60
+ for file in volume_source:
61
+ logging.debug(f"Adding file: '{file}' as a volume source.")
62
+ builder.add_volume_src_file(file)
63
+
64
+ for file in segmentation_source:
65
+ logging.debug(f"Adding file: '{file}' as a segmentation source.")
66
+ builder.add_segmentation_src_file(file)
67
+
68
+ builder.set_output_dir(local_store_path)
69
+
70
+ try:
71
+ preprocessor: Preprocessor = builder.build()
72
+ preprocessor.sync_preprocess()
73
+ finally:
74
+ if rm_tmp and local_store_path.exists():
75
+ shutil.rmtree(local_store_path)
76
+
77
+
78
+ if __name__ == "__main__":
79
+ app()
@@ -0,0 +1,6 @@
1
+ from .converter import Converter
2
+ from .data_handle import DataHandle
3
+ from .downsampler import Downsampler
4
+ from .kernel import ConvolutionKernel
5
+ from .preprocessor import Preprocessor
6
+ from .serializer import Serializer
@@ -0,0 +1,69 @@
1
+ import abc
2
+ from pathlib import Path
3
+ from typing import Any
4
+
5
+ from volsegtools.abc import DataHandle
6
+
7
+
8
+ class Converter(abc.ABC):
9
+ """Converts the contents of some file format into the internal data
10
+ structure that is then going to be used further in the processing.
11
+ """
12
+
13
+ @staticmethod
14
+ @abc.abstractmethod
15
+ async def transform_volume(input_path: Path) -> DataHandle:
16
+ """Transforms volumetric data into a zarr array.
17
+
18
+ Parameters
19
+ ----------
20
+ input_path: Path
21
+ Path to the transformation target.
22
+
23
+ Returns
24
+ -------
25
+ Lazy reference to the binary blob data.
26
+ """
27
+ ...
28
+
29
+ @staticmethod
30
+ @abc.abstractmethod
31
+ async def transform_segmentation(input_path: Path) -> DataHandle:
32
+ """Transforms the segmentation data into a zarr array.
33
+
34
+ Parameters
35
+ ----------
36
+ input_path: Path
37
+ Path to the transformation target.
38
+ """
39
+ ...
40
+
41
+ @staticmethod
42
+ @abc.abstractmethod
43
+ async def collect_metadata(input_path) -> Any:
44
+ """Collects metadata from a file.
45
+
46
+ Parameters
47
+ ----------
48
+ input_path: Path
49
+ Path to the transformation target.
50
+ internal_data: Data
51
+ Reference to the internal representation of the output. It is
52
+ going to be changed by this method.
53
+ """
54
+ pass
55
+
56
+ @staticmethod
57
+ @abc.abstractmethod
58
+ async def collect_annotations(input_path) -> Any:
59
+ """Collects annotations from a file.
60
+
61
+ Parameters
62
+ ----------
63
+ input_path: Path
64
+ Path to the transformation target.
65
+ internal_data: Data
66
+ Reference to the internal representation of the output. It is
67
+ going to be changed by this method.
68
+ """
69
+ pass
@@ -0,0 +1,24 @@
1
+ import abc
2
+ from typing import Any
3
+
4
+ from numpy.typing import ArrayLike
5
+
6
+
7
+ class DataHandle(abc.ABC):
8
+ @abc.abstractmethod
9
+ def access(self) -> ArrayLike:
10
+ raise NotImplementedError()
11
+
12
+ @abc.abstractmethod
13
+ def unwrap(self, target: str) -> ArrayLike:
14
+ raise NotImplementedError()
15
+
16
+ @property
17
+ @abc.abstractmethod
18
+ def metadata(self) -> Any:
19
+ raise NotImplementedError()
20
+
21
+ @metadata.setter
22
+ @abc.abstractmethod
23
+ def metadata(self, new_metadata: Any) -> None:
24
+ raise NotImplementedError()
@@ -0,0 +1,26 @@
1
+ import abc
2
+ from typing import Any, List
3
+
4
+ from volsegtools.abc import DataHandle
5
+
6
+
7
+ class Downsampler(abc.ABC):
8
+ @property
9
+ def parameters(self) -> Any: ...
10
+
11
+ @abc.abstractmethod
12
+ async def downsample_lattice(self, data: DataHandle) -> List[DataHandle]:
13
+ """Downsamples the provided lattice.
14
+
15
+ The lattice is changed in place and only a reference to the same data
16
+ array is returned.
17
+
18
+ Parameters
19
+ ----------
20
+ name: str
21
+ Name of the lattice that should be previously stored in the lattice
22
+
23
+ Returns
24
+ -------
25
+ """
26
+ ...
@@ -0,0 +1,8 @@
1
+ import abc
2
+
3
+ import numpy as np
4
+
5
+
6
+ class ConvolutionKernel(abc.ABC):
7
+ @abc.abstractmethod
8
+ def as_ndarray(self) -> np.ndarray: ...
@@ -0,0 +1,38 @@
1
+ import abc
2
+ from typing import Any
3
+
4
+
5
+ class Preprocessor(abc.ABC):
6
+ """Processes"""
7
+
8
+ @abc.abstractmethod
9
+ async def transform_volume(self):
10
+ pass
11
+
12
+ @abc.abstractmethod
13
+ async def transform_segmentation(self):
14
+ pass
15
+
16
+ @abc.abstractmethod
17
+ async def collect_metadata(self):
18
+ pass
19
+
20
+ @abc.abstractmethod
21
+ async def collect_annotation(self):
22
+ pass
23
+
24
+ @abc.abstractmethod
25
+ async def downsample(self):
26
+ pass
27
+
28
+ @abc.abstractmethod
29
+ async def get_results(self) -> Any:
30
+ pass
31
+
32
+ @abc.abstractmethod
33
+ async def preprocess(self):
34
+ pass
35
+
36
+ @abc.abstractmethod
37
+ def sync_preprocess(self):
38
+ pass
@@ -0,0 +1,12 @@
1
+ import abc
2
+ from pathlib import Path
3
+
4
+ from volsegtools.abc.data_handle import DataHandle
5
+
6
+
7
+ class Serializer(abc.ABC):
8
+ """Serializes the provided array-like data into some data format."""
9
+
10
+ @staticmethod
11
+ @abc.abstractmethod
12
+ async def serialize(data: DataHandle, output_path: Path) -> None: ...
@@ -0,0 +1 @@
1
+ from .map_converter import MapConverter
@@ -0,0 +1,148 @@
1
+ from pathlib import Path
2
+
3
+ import dask.array as da
4
+ import mrcfile
5
+ import numpy as np
6
+
7
+ from volsegtools.abc import Converter
8
+ from volsegtools.core import LatticeKind, Vector3
9
+ from volsegtools.model import StoringParameters, TimeFrameMetadata
10
+ from volsegtools.model.opaque_data_handle import OpaqueDataHandle
11
+ from volsegtools.model.working_store import WorkingStore
12
+
13
+
14
+ class MapConverter(Converter):
15
+ @staticmethod
16
+ def _normalize_axis_order(data: da.Array, header: np.recarray) -> da.Array:
17
+ """Normalizes the order of axes in the data to (x, y, z).
18
+
19
+ Due to the fact that we use column order we have to transpose
20
+ the array in the end.
21
+
22
+ Parameters
23
+ ----------
24
+ data: da.Array
25
+ MCR file data with any order of axes.
26
+ header: np.recarray
27
+ The of the file from which comes the data.
28
+
29
+ Returns
30
+ -------
31
+ da.Array
32
+ Array with normalized order of axes. It is the view to the input
33
+ array.
34
+ """
35
+ CORRECT_ORDER = (0, 1, 2)
36
+
37
+ current_order = tuple(
38
+ int(axis) - 1 for axis in [header.mapc, header.mapr, header.maps]
39
+ )
40
+
41
+ if tuple(current_order) != CORRECT_ORDER:
42
+ da.moveaxis(data, current_order, CORRECT_ORDER)
43
+
44
+ data.transpose()
45
+
46
+ return data
47
+
48
+ @staticmethod
49
+ async def transform_volume(input_path: Path) -> OpaqueDataHandle:
50
+ with mrcfile.mmap(input_path, "r+") as mrc:
51
+ if mrc.data is None or mrc.header is None:
52
+ raise RuntimeError("Failed to read data from MAP file")
53
+
54
+ array = da.from_array(mrc.data)
55
+ array = MapConverter._normalize_axis_order(array, mrc.header)
56
+ internal_data = WorkingStore.instance
57
+ internal_data.volume_dtype = mrc.data.dtype
58
+ internal_data.is_volume_dtype_set = True
59
+ volume_id: str = input_path.stem
60
+ return internal_data.store_lattice_time_frame(
61
+ StoringParameters(), array, volume_id
62
+ )
63
+
64
+ @staticmethod
65
+ async def transform_segmentation(input_path: Path) -> OpaqueDataHandle:
66
+ with mrcfile.open(input_path, "r+") as mrc:
67
+ if mrc.data is None or mrc.header is None:
68
+ raise RuntimeError("Failed to read data from MAP file")
69
+
70
+ data = da.from_array(mrc.data)
71
+ data = MapConverter._normalize_axis_order(data, mrc.header)
72
+
73
+ if isinstance(data.dtype, np.floating):
74
+ data = data.astype(np.byte)
75
+
76
+ internal_data = WorkingStore.instance
77
+
78
+ internal_data.volume_dtype = data.dtype
79
+ internal_data.is_volume_dtype_set = True
80
+
81
+ segmentation_id: str = input_path.stem
82
+
83
+ storing_params = StoringParameters()
84
+ storing_params.storage_dtype = data.dtype
85
+ storing_params.lattice_kind = LatticeKind.SEGMENTATION
86
+ return internal_data.store_lattice_time_frame(
87
+ storing_params, data, segmentation_id
88
+ )
89
+
90
+ @staticmethod
91
+ async def collect_annotations(input_path) -> None:
92
+ pass
93
+
94
+ @staticmethod
95
+ async def collect_metadata(input_path) -> TimeFrameMetadata:
96
+ with mrcfile.open(input_path, "r+") as mrc:
97
+ if mrc.data is None or mrc.header is None:
98
+ raise RuntimeError("Failed to read data from MAP file")
99
+ lattice_shape = Vector3(
100
+ int(mrc.header.nx),
101
+ int(mrc.header.ny),
102
+ int(mrc.header.nz),
103
+ )
104
+ header = mrc.header
105
+
106
+ axis_order_map = {
107
+ header.mapc - 1: 0,
108
+ header.mapr - 1: 1,
109
+ header.maps - 1: 2,
110
+ }
111
+
112
+ axis_order = Vector3(0, 1, 2)
113
+
114
+ start = (header.nxstart, header.nystart, header.nzstart)
115
+ start = Vector3(
116
+ start[axis_order_map[0]],
117
+ start[axis_order_map[1]],
118
+ start[axis_order_map[2]],
119
+ )
120
+
121
+ original_voxel_size = Vector3(
122
+ header.cella.x,
123
+ header.cella.y,
124
+ header.cella.z,
125
+ )
126
+
127
+ origin = Vector3(
128
+ start.x * original_voxel_size.x,
129
+ start.y * original_voxel_size.y,
130
+ start.z * original_voxel_size.z,
131
+ )
132
+
133
+ original_time_frame_metadata = TimeFrameMetadata(
134
+ axis_order=axis_order,
135
+ lattice_id=input_path.stem,
136
+ id=0,
137
+ resolution=0,
138
+ origin=origin,
139
+ lattice_dimensions=lattice_shape,
140
+ voxel_size=original_voxel_size,
141
+ # As we are not converting the first resolution to BCIF, this can
142
+ # be left empty.
143
+ channels=[],
144
+ )
145
+
146
+ print("Original Metadata:", original_time_frame_metadata)
147
+
148
+ return original_time_frame_metadata
@@ -0,0 +1,5 @@
1
+ from .bounds import Bounds
2
+ from .downsampling_parameters import DownsamplingParameters, to_bytes
3
+ from .gaussian_kernel_3D import Gaussian3DKernel
4
+ from .lattice_kind import LatticeKind
5
+ from .vector import Vector3
@@ -0,0 +1,12 @@
1
+ from typing import Optional
2
+
3
+ import pydantic
4
+
5
+
6
+ # TODO: rather use base model...
7
+ @pydantic.dataclasses.dataclass
8
+ class Bounds:
9
+ # TODO: might be int
10
+ # TODO: this should be generic and accept any class that support some traits
11
+ min: Optional[pydantic.NonNegativeFloat] = 0.0
12
+ max: Optional[pydantic.NonNegativeFloat] = 0.0
@@ -0,0 +1,28 @@
1
+ from typing import Optional
2
+
3
+ import pydantic
4
+ from pydantic import ConfigDict
5
+ from pydantic.dataclasses import dataclass
6
+
7
+ from volsegtools.abc.kernel import ConvolutionKernel
8
+ from volsegtools.core import Bounds, Gaussian3DKernel
9
+
10
+
11
+ def _minimal_size_bounds_factory():
12
+ return Bounds(5.0, None)
13
+
14
+
15
+ @dataclass(config=ConfigDict(arbitrary_types_allowed=True))
16
+ class DownsamplingParameters:
17
+ downsampling_level_bounds: Optional[Bounds] = None
18
+ should_remove_original_resolution: bool = False
19
+ is_mask: bool = False
20
+ acceptance_threshold: Optional[pydantic.PositiveFloat] = None
21
+ kernel: ConvolutionKernel = Gaussian3DKernel(5, 1.0)
22
+ size_per_level_bounds_in_mb: Bounds = pydantic.Field(
23
+ default_factory=_minimal_size_bounds_factory
24
+ )
25
+
26
+
27
+ def to_bytes(megabytes: int):
28
+ return megabytes * 10**6
@@ -0,0 +1,16 @@
1
+ import numpy as np
2
+
3
+ from volsegtools.abc import ConvolutionKernel
4
+
5
+
6
+ class Gaussian3DKernel(ConvolutionKernel):
7
+ """Generate a 3D Gaussian kernel."""
8
+
9
+ def __init__(self, size: int, sigma: float):
10
+ ax = np.linspace(-(size // 2), size // 2, size)
11
+ xx, yy, zz = np.meshgrid(ax, ax, ax, indexing="ij")
12
+ kernel = np.exp(-(xx**2 + yy**2 + zz**2) / (2.0 * sigma**2))
13
+ self._kernel_lattice = kernel / np.sum(kernel)
14
+
15
+ def as_ndarray(self) -> np.ndarray:
16
+ return self._kernel_lattice
@@ -0,0 +1,8 @@
1
+ import enum
2
+
3
+
4
+ class LatticeKind(enum.Enum):
5
+ """ """
6
+
7
+ VOLUME = 1
8
+ SEGMENTATION = 2
@@ -0,0 +1,9 @@
1
+ import dataclasses
2
+
3
+
4
+ # TODO: rather use pydantic
5
+ @dataclasses.dataclass
6
+ class Vector3:
7
+ x: float = 0
8
+ y: float = 0
9
+ z: float = 0
@@ -0,0 +1,2 @@
1
+ from .base_downsampler import BaseDownsampler
2
+ from .hierarchy_downsampler import HierarchyDownsampler
@@ -0,0 +1,20 @@
1
+ from typing import List
2
+
3
+ from volsegtools.abc.downsampler import Downsampler
4
+ from volsegtools.core.downsampling_parameters import DownsamplingParameters
5
+ from volsegtools.model.opaque_data_handle import OpaqueDataHandle
6
+
7
+
8
+ class BaseDownsampler(Downsampler):
9
+ # TODO: There shouldn't be any of this, this is just interface...
10
+ def __init__(self, parameters: DownsamplingParameters):
11
+ self._parameters = parameters
12
+ self.data = None
13
+
14
+ @property
15
+ def parameters(self) -> DownsamplingParameters:
16
+ return self._parameters
17
+
18
+ async def downsample_lattice(
19
+ self, _: OpaqueDataHandle
20
+ ) -> List[OpaqueDataHandle]: ...