ngio 0.5.0b6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. ngio/__init__.py +69 -0
  2. ngio/common/__init__.py +28 -0
  3. ngio/common/_dimensions.py +335 -0
  4. ngio/common/_masking_roi.py +153 -0
  5. ngio/common/_pyramid.py +408 -0
  6. ngio/common/_roi.py +315 -0
  7. ngio/common/_synt_images_utils.py +101 -0
  8. ngio/common/_zoom.py +188 -0
  9. ngio/experimental/__init__.py +5 -0
  10. ngio/experimental/iterators/__init__.py +15 -0
  11. ngio/experimental/iterators/_abstract_iterator.py +390 -0
  12. ngio/experimental/iterators/_feature.py +189 -0
  13. ngio/experimental/iterators/_image_processing.py +130 -0
  14. ngio/experimental/iterators/_mappers.py +48 -0
  15. ngio/experimental/iterators/_rois_utils.py +126 -0
  16. ngio/experimental/iterators/_segmentation.py +235 -0
  17. ngio/hcs/__init__.py +19 -0
  18. ngio/hcs/_plate.py +1354 -0
  19. ngio/images/__init__.py +44 -0
  20. ngio/images/_abstract_image.py +967 -0
  21. ngio/images/_create_synt_container.py +132 -0
  22. ngio/images/_create_utils.py +423 -0
  23. ngio/images/_image.py +926 -0
  24. ngio/images/_label.py +411 -0
  25. ngio/images/_masked_image.py +531 -0
  26. ngio/images/_ome_zarr_container.py +1237 -0
  27. ngio/images/_table_ops.py +471 -0
  28. ngio/io_pipes/__init__.py +75 -0
  29. ngio/io_pipes/_io_pipes.py +361 -0
  30. ngio/io_pipes/_io_pipes_masked.py +488 -0
  31. ngio/io_pipes/_io_pipes_roi.py +146 -0
  32. ngio/io_pipes/_io_pipes_types.py +56 -0
  33. ngio/io_pipes/_match_shape.py +377 -0
  34. ngio/io_pipes/_ops_axes.py +344 -0
  35. ngio/io_pipes/_ops_slices.py +411 -0
  36. ngio/io_pipes/_ops_slices_utils.py +199 -0
  37. ngio/io_pipes/_ops_transforms.py +104 -0
  38. ngio/io_pipes/_zoom_transform.py +180 -0
  39. ngio/ome_zarr_meta/__init__.py +65 -0
  40. ngio/ome_zarr_meta/_meta_handlers.py +536 -0
  41. ngio/ome_zarr_meta/ngio_specs/__init__.py +77 -0
  42. ngio/ome_zarr_meta/ngio_specs/_axes.py +515 -0
  43. ngio/ome_zarr_meta/ngio_specs/_channels.py +462 -0
  44. ngio/ome_zarr_meta/ngio_specs/_dataset.py +89 -0
  45. ngio/ome_zarr_meta/ngio_specs/_ngio_hcs.py +539 -0
  46. ngio/ome_zarr_meta/ngio_specs/_ngio_image.py +438 -0
  47. ngio/ome_zarr_meta/ngio_specs/_pixel_size.py +122 -0
  48. ngio/ome_zarr_meta/v04/__init__.py +27 -0
  49. ngio/ome_zarr_meta/v04/_custom_models.py +18 -0
  50. ngio/ome_zarr_meta/v04/_v04_spec.py +473 -0
  51. ngio/ome_zarr_meta/v05/__init__.py +27 -0
  52. ngio/ome_zarr_meta/v05/_custom_models.py +18 -0
  53. ngio/ome_zarr_meta/v05/_v05_spec.py +511 -0
  54. ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/mask.png +0 -0
  55. ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/nuclei.png +0 -0
  56. ngio/resources/20200812-CardiomyocyteDifferentiation14-Cycle1_B03/raw.jpg +0 -0
  57. ngio/resources/__init__.py +55 -0
  58. ngio/resources/resource_model.py +36 -0
  59. ngio/tables/__init__.py +43 -0
  60. ngio/tables/_abstract_table.py +270 -0
  61. ngio/tables/_tables_container.py +449 -0
  62. ngio/tables/backends/__init__.py +57 -0
  63. ngio/tables/backends/_abstract_backend.py +240 -0
  64. ngio/tables/backends/_anndata.py +139 -0
  65. ngio/tables/backends/_anndata_utils.py +90 -0
  66. ngio/tables/backends/_csv.py +19 -0
  67. ngio/tables/backends/_json.py +92 -0
  68. ngio/tables/backends/_parquet.py +19 -0
  69. ngio/tables/backends/_py_arrow_backends.py +222 -0
  70. ngio/tables/backends/_table_backends.py +226 -0
  71. ngio/tables/backends/_utils.py +608 -0
  72. ngio/tables/v1/__init__.py +23 -0
  73. ngio/tables/v1/_condition_table.py +71 -0
  74. ngio/tables/v1/_feature_table.py +125 -0
  75. ngio/tables/v1/_generic_table.py +49 -0
  76. ngio/tables/v1/_roi_table.py +575 -0
  77. ngio/transforms/__init__.py +5 -0
  78. ngio/transforms/_zoom.py +19 -0
  79. ngio/utils/__init__.py +45 -0
  80. ngio/utils/_cache.py +48 -0
  81. ngio/utils/_datasets.py +165 -0
  82. ngio/utils/_errors.py +37 -0
  83. ngio/utils/_fractal_fsspec_store.py +42 -0
  84. ngio/utils/_zarr_utils.py +534 -0
  85. ngio-0.5.0b6.dist-info/METADATA +148 -0
  86. ngio-0.5.0b6.dist-info/RECORD +88 -0
  87. ngio-0.5.0b6.dist-info/WHEEL +4 -0
  88. ngio-0.5.0b6.dist-info/licenses/LICENSE +28 -0
@@ -0,0 +1,471 @@
1
+ """Aggregation and filtering operations for tables."""
2
+
3
+ import asyncio
4
+ from collections import Counter
5
+ from collections.abc import Sequence
6
+ from dataclasses import dataclass, field
7
+ from typing import Literal
8
+
9
+ import pandas as pd
10
+ import polars as pl
11
+
12
+ from ngio.images._ome_zarr_container import OmeZarrContainer
13
+ from ngio.tables import Table, TableType
14
+
15
+
16
+ @dataclass
17
+ class TableWithExtras:
18
+ """A class to hold a table and its extras."""
19
+
20
+ table: Table
21
+ extras: dict[str, str] = field(default_factory=dict)
22
+
23
+
24
+ def _reindex_dataframe(
25
+ dataframe, index_cols: list[str], index_key: str | None = None
26
+ ) -> pd.DataFrame:
27
+ """Reindex a dataframe using an hash of the index columns."""
28
+ # Reindex the dataframe
29
+ old_index = dataframe.index.name
30
+ if old_index is not None:
31
+ dataframe = dataframe.reset_index()
32
+ index_cols.append(old_index)
33
+ dataframe.index = dataframe[index_cols].astype(str).agg("_".join, axis=1)
34
+
35
+ if index_key is None:
36
+ dataframe.index.name = index_key
37
+ return dataframe
38
+
39
+
40
+ def _add_const_columns(
41
+ dataframe: pd.DataFrame,
42
+ new_cols: dict[str, str],
43
+ index_key: str | None = None,
44
+ ) -> pd.DataFrame:
45
+ for col, value in new_cols.items():
46
+ dataframe[col] = value
47
+
48
+ if index_key is not None:
49
+ dataframe = _reindex_dataframe(
50
+ dataframe=dataframe,
51
+ index_cols=list(new_cols.keys()),
52
+ index_key=index_key,
53
+ )
54
+ return dataframe
55
+
56
+
57
+ def _add_const_columns_pl(
58
+ dataframe: pl.LazyFrame,
59
+ new_cols: dict[str, str],
60
+ index_key: str | None = None,
61
+ ) -> pl.LazyFrame:
62
+ dataframe = dataframe.with_columns(
63
+ [pl.lit(value, dtype=pl.String()).alias(col) for col, value in new_cols.items()]
64
+ )
65
+
66
+ if index_key is not None:
67
+ dataframe = dataframe.with_columns(
68
+ [
69
+ pl.concat_str(
70
+ [pl.col(col) for col in new_cols.keys()],
71
+ separator="_",
72
+ ).alias(index_key)
73
+ ]
74
+ )
75
+ return dataframe
76
+
77
+
78
+ def _pd_concat(
79
+ tables: Sequence[TableWithExtras], index_key: str | None = None
80
+ ) -> pd.DataFrame:
81
+ """Concatenate tables from different plates into a single table."""
82
+ if len(tables) == 0:
83
+ raise ValueError("No tables to concatenate.")
84
+
85
+ dataframes = []
86
+ for table in tables:
87
+ dataframe = _add_const_columns(
88
+ dataframe=table.table.dataframe, new_cols=table.extras, index_key=index_key
89
+ )
90
+ dataframes.append(dataframe)
91
+ concatenated_table = pd.concat(dataframes, axis=0)
92
+ return concatenated_table
93
+
94
+
95
+ def _pl_concat(
96
+ tables: Sequence[TableWithExtras], index_key: str | None = None
97
+ ) -> pl.LazyFrame:
98
+ """Concatenate tables from different plates into a single table."""
99
+ if len(tables) == 0:
100
+ raise ValueError("No tables to concatenate.")
101
+
102
+ dataframes = []
103
+ for table in tables:
104
+ polars_ls = _add_const_columns_pl(
105
+ dataframe=table.table.lazy_frame,
106
+ new_cols=table.extras,
107
+ index_key=index_key,
108
+ )
109
+ dataframes.append(polars_ls)
110
+
111
+ concatenated_table = pl.concat(dataframes, how="vertical")
112
+ return concatenated_table
113
+
114
+
115
+ def conctatenate_tables(
116
+ tables: Sequence[TableWithExtras],
117
+ mode: Literal["eager", "lazy"] = "eager",
118
+ index_key: str | None = None,
119
+ table_cls: type[TableType] | None = None,
120
+ ) -> Table:
121
+ """Concatenate tables from different plates into a single table."""
122
+ if len(tables) == 0:
123
+ raise ValueError("No tables to concatenate.")
124
+
125
+ table0 = next(iter(tables)).table
126
+
127
+ if mode == "lazy":
128
+ concatenated_table = _pl_concat(tables=tables, index_key=index_key)
129
+ elif mode == "eager":
130
+ concatenated_table = _pd_concat(tables=tables, index_key=index_key)
131
+ else:
132
+ raise ValueError(f"Unknown mode: {mode}. Use 'eager' or 'lazy'.")
133
+
134
+ meta = table0.meta
135
+ meta.index_key = index_key
136
+ meta.index_type = "str"
137
+
138
+ if table_cls is not None:
139
+ return table_cls.from_table_data(
140
+ table_data=concatenated_table,
141
+ meta=meta,
142
+ )
143
+ return table0.from_table_data(
144
+ table_data=concatenated_table,
145
+ meta=meta,
146
+ )
147
+
148
+
149
+ def _check_images_and_extras(
150
+ images: Sequence[OmeZarrContainer],
151
+ extras: Sequence[dict[str, str]],
152
+ ) -> None:
153
+ """Check if the images and extras are valid."""
154
+ if len(images) == 0:
155
+ raise ValueError("No images to concatenate.")
156
+
157
+ if len(images) != len(extras):
158
+ raise ValueError("The number of images and extras must be the same.")
159
+
160
+
161
+ def _concatenate_image_tables(
162
+ images: Sequence[OmeZarrContainer],
163
+ extras: Sequence[dict[str, str]],
164
+ name: str,
165
+ table_cls: type[TableType] | None = None,
166
+ index_key: str | None = None,
167
+ strict: bool = True,
168
+ mode: Literal["eager", "lazy"] = "eager",
169
+ ) -> Table:
170
+ """Concatenate tables from different images into a single table."""
171
+ _check_images_and_extras(images=images, extras=extras)
172
+
173
+ tables = []
174
+ for image, extra in zip(images, extras, strict=True):
175
+ if not strict and name not in image.list_tables():
176
+ continue
177
+ table = image.get_table(name)
178
+ tables.append(TableWithExtras(table=table, extras=extra))
179
+
180
+ return conctatenate_tables(
181
+ tables=tables,
182
+ mode=mode,
183
+ index_key=index_key,
184
+ table_cls=table_cls,
185
+ )
186
+
187
+
188
+ def concatenate_image_tables(
189
+ images: Sequence[OmeZarrContainer],
190
+ extras: Sequence[dict[str, str]],
191
+ name: str,
192
+ index_key: str | None = None,
193
+ strict: bool = True,
194
+ mode: Literal["eager", "lazy"] = "eager",
195
+ ) -> Table:
196
+ """Concatenate tables from different images into a single table.
197
+
198
+ Args:
199
+ images: A collection of images.
200
+ extras: A collection of extras dictionaries for each image.
201
+ this will be added as columns to the table, and will be
202
+ concatenated with the table index to create a new index.
203
+ name: The name of the table to concatenate.
204
+ index_key: The key to use for the index of the concatenated table.
205
+ strict: If True, raise an error if the table is not found in the image.
206
+ mode: The mode to use for concatenation. Can be 'eager' or 'lazy'.
207
+ if 'eager', the table will be loaded into memory.
208
+ if 'lazy', the table will be loaded as a lazy frame.
209
+ """
210
+ return _concatenate_image_tables(
211
+ images=images,
212
+ extras=extras,
213
+ name=name,
214
+ table_cls=None,
215
+ index_key=index_key,
216
+ strict=strict,
217
+ mode=mode,
218
+ )
219
+
220
+
221
+ def concatenate_image_tables_as(
222
+ images: Sequence[OmeZarrContainer],
223
+ extras: Sequence[dict[str, str]],
224
+ name: str,
225
+ table_cls: type[TableType],
226
+ index_key: str | None = None,
227
+ strict: bool = True,
228
+ mode: Literal["eager", "lazy"] = "eager",
229
+ ) -> TableType:
230
+ """Concatenate tables from different images into a single table.
231
+
232
+ Args:
233
+ images: A collection of images.
234
+ extras: A collection of extras dictionaries for each image.
235
+ this will be added as columns to the table, and will be
236
+ concatenated with the table index to create a new index.
237
+ name: The name of the table to concatenate.
238
+ table_cls: The output will be casted to this class, if the new table_cls is
239
+ compatible with the table_cls of the input tables.
240
+ index_key: The key to use for the index of the concatenated table.
241
+ strict: If True, raise an error if the table is not found in the image.
242
+ mode: The mode to use for concatenation. Can be 'eager' or 'lazy'.
243
+ if 'eager', the table will be loaded into memory.
244
+ if 'lazy', the table will be loaded as a lazy frame.
245
+ """
246
+ table = _concatenate_image_tables(
247
+ images=images,
248
+ extras=extras,
249
+ name=name,
250
+ table_cls=table_cls,
251
+ index_key=index_key,
252
+ strict=strict,
253
+ mode=mode,
254
+ )
255
+ if not isinstance(table, table_cls):
256
+ raise ValueError(f"Table is not of type {table_cls}. Got {type(table)}")
257
+ return table
258
+
259
+
260
+ async def _concatenate_image_tables_async(
261
+ images: Sequence[OmeZarrContainer],
262
+ extras: Sequence[dict[str, str]],
263
+ name: str,
264
+ table_cls: type[TableType] | None = None,
265
+ index_key: str | None = None,
266
+ strict: bool = True,
267
+ mode: Literal["eager", "lazy"] = "eager",
268
+ ) -> Table:
269
+ """Concatenate tables from different images into a single table."""
270
+ _check_images_and_extras(images=images, extras=extras)
271
+
272
+ def process_image(
273
+ image: OmeZarrContainer,
274
+ name: str,
275
+ extra: dict[str, str],
276
+ mode: Literal["eager", "lazy"] = "eager",
277
+ strict: bool = True,
278
+ ) -> TableWithExtras | None:
279
+ """Process a single image and return the table."""
280
+ if not strict and name not in image.list_tables():
281
+ return None
282
+ _table = image.get_table(name)
283
+ if mode == "lazy":
284
+ # make sure the table is loaded lazily
285
+ # It the backend is not lazy, this will be
286
+ # loaded eagerly
287
+ _ = _table.lazy_frame
288
+ elif mode == "eager":
289
+ # make sure the table is loaded eagerly
290
+ _ = _table.dataframe
291
+ table = TableWithExtras(
292
+ table=_table,
293
+ extras=extra,
294
+ )
295
+ return table
296
+
297
+ tasks = []
298
+ for image, extra in zip(images, extras, strict=True):
299
+ task = asyncio.to_thread(
300
+ process_image,
301
+ image=image,
302
+ name=name,
303
+ extra=extra,
304
+ strict=strict,
305
+ )
306
+ tasks.append(task)
307
+ tables = await asyncio.gather(*tasks)
308
+ tables = [table for table in tables if table is not None]
309
+ return conctatenate_tables(
310
+ tables=tables,
311
+ mode=mode,
312
+ index_key=index_key,
313
+ table_cls=table_cls,
314
+ )
315
+
316
+
317
+ async def concatenate_image_tables_async(
318
+ images: Sequence[OmeZarrContainer],
319
+ extras: Sequence[dict[str, str]],
320
+ name: str,
321
+ index_key: str | None = None,
322
+ strict: bool = True,
323
+ mode: Literal["eager", "lazy"] = "eager",
324
+ ) -> Table:
325
+ """Concatenate tables from different images into a single table.
326
+
327
+ Args:
328
+ images: A collection of images.
329
+ extras: A collection of extras dictionaries for each image.
330
+ this will be added as columns to the table, and will be
331
+ concatenated with the table index to create a new index.
332
+ name: The name of the table to concatenate.
333
+ index_key: The key to use for the index of the concatenated table.
334
+ strict: If True, raise an error if the table is not found in the image.
335
+ mode: The mode to use for concatenation. Can be 'eager' or 'lazy'.
336
+ if 'eager', the table will be loaded into memory.
337
+ if 'lazy', the table will be loaded as a lazy frame.
338
+ """
339
+ return await _concatenate_image_tables_async(
340
+ images=images,
341
+ extras=extras,
342
+ name=name,
343
+ table_cls=None,
344
+ index_key=index_key,
345
+ strict=strict,
346
+ mode=mode,
347
+ )
348
+
349
+
350
+ async def concatenate_image_tables_as_async(
351
+ images: Sequence[OmeZarrContainer],
352
+ extras: Sequence[dict[str, str]],
353
+ name: str,
354
+ table_cls: type[TableType],
355
+ index_key: str | None = None,
356
+ strict: bool = True,
357
+ mode: Literal["eager", "lazy"] = "eager",
358
+ ) -> TableType:
359
+ """Concatenate tables from different images into a single table.
360
+
361
+ Args:
362
+ images: A collection of images.
363
+ extras: A collection of extras dictionaries for each image.
364
+ this will be added as columns to the table, and will be
365
+ concatenated with the table index to create a new index.
366
+ name: The name of the table to concatenate.
367
+ table_cls: The output will be casted to this class, if the new table_cls is
368
+ compatible with the table_cls of the input tables.
369
+ index_key: The key to use for the index of the concatenated table.
370
+ strict: If True, raise an error if the table is not found in the image.
371
+ mode: The mode to use for concatenation. Can be 'eager' or 'lazy'.
372
+ if 'eager', the table will be loaded into memory.
373
+ if 'lazy', the table will be loaded as a lazy frame.
374
+ """
375
+ table = await _concatenate_image_tables_async(
376
+ images=images,
377
+ extras=extras,
378
+ name=name,
379
+ table_cls=table_cls,
380
+ index_key=index_key,
381
+ strict=strict,
382
+ mode=mode,
383
+ )
384
+ if not isinstance(table, table_cls):
385
+ raise ValueError(f"Table is not of type {table_cls}. Got {type(table)}")
386
+ return table
387
+
388
+
389
+ def _tables_names_coalesce(
390
+ tables_names: list[list[str]],
391
+ mode: Literal["common", "all"] = "common",
392
+ ) -> list[str]:
393
+ num_images = len(tables_names)
394
+ if num_images == 0:
395
+ raise ValueError("No images to concatenate.")
396
+
397
+ names = [name for _names in tables_names for name in _names]
398
+ names_counts = Counter(names)
399
+
400
+ if mode == "common":
401
+ # Get the names that are present in all images
402
+ common_names = [
403
+ name for name, count in names_counts.items() if count == num_images
404
+ ]
405
+ return common_names
406
+ elif mode == "all":
407
+ # Get all names
408
+ return list(names_counts.keys())
409
+ else:
410
+ raise ValueError(f"Unknown mode: {mode}. Use 'common' or 'all'.")
411
+
412
+
413
+ def list_image_tables(
414
+ images: Sequence[OmeZarrContainer],
415
+ filter_types: str | None = None,
416
+ mode: Literal["common", "all"] = "common",
417
+ ) -> list[str]:
418
+ """List all table names in the images.
419
+
420
+ Args:
421
+ images: A collection of images.
422
+ filter_types (str | None): The type of tables to filter. If None,
423
+ return all tables. Defaults to None.
424
+ mode (Literal["common", "all"]): Whether to return only common tables
425
+ between all images or all tables. Defaults to "common".
426
+ """
427
+ tables_names = []
428
+ for image in images:
429
+ tables = image.list_tables(filter_types=filter_types)
430
+ tables_names.append(tables)
431
+
432
+ return _tables_names_coalesce(
433
+ tables_names=tables_names,
434
+ mode=mode,
435
+ )
436
+
437
+
438
+ async def list_image_tables_async(
439
+ images: Sequence[OmeZarrContainer],
440
+ filter_types: str | None = None,
441
+ mode: Literal["common", "all"] = "common",
442
+ ) -> list[str]:
443
+ """List all image tables in the image asynchronously.
444
+
445
+ Args:
446
+ images: A collection of images.
447
+ filter_types (str | None): The type of tables to filter. If None,
448
+ return all tables. Defaults to None.
449
+ mode (Literal["common", "all"]): Whether to return only common tables
450
+ between all images or all tables. Defaults to "common".
451
+ """
452
+ images_ids = []
453
+
454
+ # key table name, value list of paths
455
+ def process_image(
456
+ image: OmeZarrContainer, filter_types: str | None = None
457
+ ) -> list[str]:
458
+ tables = image.list_tables(filter_types=filter_types)
459
+ return tables
460
+
461
+ tasks = []
462
+ for i, image in enumerate(images):
463
+ images_ids.append(i)
464
+ task = asyncio.to_thread(process_image, image, filter_types=filter_types)
465
+ tasks.append(task)
466
+
467
+ tables_names = await asyncio.gather(*tasks)
468
+ return _tables_names_coalesce(
469
+ tables_names=tables_names,
470
+ mode=mode,
471
+ )
@@ -0,0 +1,75 @@
1
+ """I/O pipes for reading and writing data from zarr to numpy and dask arrays.
2
+
3
+ There are 3 main types of I/O pipes:
4
+ - Standard I/O pipes: NumpyGetter, NumpySetter, DaskGetter, DaskSetter:
5
+ These pipes read and write data from simple integer indexing and slicing.
6
+ - ROI I/O pipes: NumpyRoiGetter, NumpyRoiSetter, DaskRoiGetter, DaskRoiSetter:
7
+ These pipes read and write data from a region of interest (ROI) defined in physical
8
+ coordinates.
9
+ - Masked I/O pipes: NumpyGetterMasked, NumpySetterMasked, DaskGetterMasked,
10
+ DaskSetterMasked: These pipes like the ROI pipes read and write data
11
+ from a region of interest (ROI). However they also load a boolean mask
12
+ from a label zarr array to mask the data being read or written.
13
+
14
+ All the io pipes are structured in the same way.
15
+
16
+ When reading data the order of operations is:
17
+ - Step 1: Slice the zarr array to load only the data needed into memory.
18
+ - Step 2: Apply axes operations to reorder, squeeze or expand the axes.
19
+ To match the user desired axes order.
20
+ - Step 3: Apply any additional transforms to the data.
21
+
22
+ When writing data the order of operations is the reverse.
23
+
24
+ The Transforms must implement the TransformProtocol.
25
+ They should be stateless and only depend on the input array and the slicing
26
+ and axes ops. This allows them to be easily reused between different I/O pipes.
27
+
28
+ """
29
+
30
+ from ngio.io_pipes._io_pipes import (
31
+ DaskGetter,
32
+ DaskSetter,
33
+ DataGetter,
34
+ DataSetter,
35
+ NumpyGetter,
36
+ NumpySetter,
37
+ )
38
+ from ngio.io_pipes._io_pipes_masked import (
39
+ DaskGetterMasked,
40
+ DaskSetterMasked,
41
+ NumpyGetterMasked,
42
+ NumpySetterMasked,
43
+ )
44
+ from ngio.io_pipes._io_pipes_roi import (
45
+ DaskRoiGetter,
46
+ DaskRoiSetter,
47
+ NumpyRoiGetter,
48
+ NumpyRoiSetter,
49
+ )
50
+ from ngio.io_pipes._match_shape import dask_match_shape, numpy_match_shape
51
+ from ngio.io_pipes._ops_slices import SlicingInputType, SlicingOps, SlicingType
52
+ from ngio.io_pipes._ops_transforms import TransformProtocol
53
+
54
+ __all__ = [
55
+ "DaskGetter",
56
+ "DaskGetterMasked",
57
+ "DaskRoiGetter",
58
+ "DaskRoiSetter",
59
+ "DaskSetter",
60
+ "DaskSetterMasked",
61
+ "DataGetter",
62
+ "DataSetter",
63
+ "NumpyGetter",
64
+ "NumpyGetterMasked",
65
+ "NumpyRoiGetter",
66
+ "NumpyRoiSetter",
67
+ "NumpySetter",
68
+ "NumpySetterMasked",
69
+ "SlicingInputType",
70
+ "SlicingOps",
71
+ "SlicingType",
72
+ "TransformProtocol",
73
+ "dask_match_shape",
74
+ "numpy_match_shape",
75
+ ]