careamics 0.0.14__py3-none-any.whl → 0.0.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of careamics might be problematic. Click here for more details.
- careamics/careamist.py +55 -61
- careamics/cli/conf.py +24 -9
- careamics/cli/main.py +8 -8
- careamics/cli/utils.py +2 -4
- careamics/config/__init__.py +8 -0
- careamics/config/algorithms/__init__.py +4 -0
- careamics/config/algorithms/hdn_algorithm_model.py +103 -0
- careamics/config/algorithms/microsplit_algorithm_model.py +103 -0
- careamics/config/algorithms/n2v_algorithm_model.py +1 -2
- careamics/config/algorithms/vae_algorithm_model.py +53 -18
- careamics/config/architectures/lvae_model.py +12 -8
- careamics/config/callback_model.py +15 -11
- careamics/config/configuration.py +9 -8
- careamics/config/configuration_factories.py +892 -78
- careamics/config/data/data_model.py +7 -14
- careamics/config/data/ng_data_model.py +8 -15
- careamics/config/data/patching_strategies/_overlapping_patched_model.py +4 -5
- careamics/config/inference_model.py +6 -11
- careamics/config/likelihood_model.py +4 -4
- careamics/config/loss_model.py +6 -2
- careamics/config/nm_model.py +30 -7
- careamics/config/optimizer_models.py +1 -2
- careamics/config/support/supported_algorithms.py +5 -3
- careamics/config/support/supported_losses.py +5 -2
- careamics/config/training_model.py +8 -38
- careamics/config/transformations/normalize_model.py +3 -4
- careamics/config/transformations/xy_flip_model.py +2 -2
- careamics/config/transformations/xy_random_rotate90_model.py +2 -2
- careamics/config/validators/validator_utils.py +1 -2
- careamics/dataset/dataset_utils/iterate_over_files.py +3 -3
- careamics/dataset/in_memory_dataset.py +2 -2
- careamics/dataset/iterable_dataset.py +1 -2
- careamics/dataset/patching/random_patching.py +6 -6
- careamics/dataset/patching/sequential_patching.py +4 -4
- careamics/dataset/tiling/lvae_tiled_patching.py +2 -2
- careamics/dataset_ng/dataset.py +3 -3
- careamics/dataset_ng/factory.py +19 -19
- careamics/dataset_ng/patch_extractor/demo_custom_image_stack_loader.py +4 -4
- careamics/dataset_ng/patch_extractor/image_stack/in_memory_image_stack.py +1 -2
- careamics/dataset_ng/patch_extractor/image_stack/zarr_image_stack.py +33 -7
- careamics/dataset_ng/patch_extractor/image_stack_loader.py +2 -2
- careamics/dataset_ng/patching_strategies/random_patching.py +2 -3
- careamics/dataset_ng/patching_strategies/sequential_patching.py +1 -2
- careamics/file_io/read/__init__.py +0 -1
- careamics/lightning/__init__.py +16 -2
- careamics/lightning/callbacks/__init__.py +2 -0
- careamics/lightning/callbacks/data_stats_callback.py +23 -0
- careamics/lightning/callbacks/prediction_writer_callback/prediction_writer_callback.py +5 -5
- careamics/lightning/callbacks/prediction_writer_callback/write_strategy.py +5 -5
- careamics/lightning/callbacks/prediction_writer_callback/write_strategy_factory.py +8 -8
- careamics/lightning/dataset_ng/data_module.py +43 -43
- careamics/lightning/lightning_module.py +166 -68
- careamics/lightning/microsplit_data_module.py +631 -0
- careamics/lightning/predict_data_module.py +16 -9
- careamics/lightning/train_data_module.py +29 -18
- careamics/losses/__init__.py +7 -1
- careamics/losses/loss_factory.py +9 -1
- careamics/losses/lvae/losses.py +94 -9
- careamics/lvae_training/dataset/__init__.py +8 -8
- careamics/lvae_training/dataset/config.py +56 -44
- careamics/lvae_training/dataset/lc_dataset.py +18 -12
- careamics/lvae_training/dataset/ms_dataset_ref.py +5 -5
- careamics/lvae_training/dataset/multich_dataset.py +24 -18
- careamics/lvae_training/dataset/multifile_dataset.py +6 -6
- careamics/model_io/bioimage/model_description.py +12 -11
- careamics/model_io/bmz_io.py +12 -8
- careamics/models/layers.py +5 -5
- careamics/models/lvae/likelihoods.py +30 -14
- careamics/models/lvae/lvae.py +2 -2
- careamics/models/lvae/noise_models.py +20 -14
- careamics/prediction_utils/__init__.py +8 -2
- careamics/prediction_utils/lvae_prediction.py +5 -5
- careamics/prediction_utils/prediction_outputs.py +48 -3
- careamics/prediction_utils/stitch_prediction.py +71 -0
- careamics/transforms/compose.py +9 -9
- careamics/transforms/n2v_manipulate.py +3 -3
- careamics/transforms/n2v_manipulate_torch.py +4 -4
- careamics/transforms/normalize.py +4 -6
- careamics/transforms/pixel_manipulation.py +6 -8
- careamics/transforms/pixel_manipulation_torch.py +5 -7
- careamics/transforms/xy_flip.py +3 -5
- careamics/transforms/xy_random_rotate90.py +4 -6
- careamics/utils/logging.py +8 -8
- careamics/utils/metrics.py +2 -2
- careamics/utils/plotting.py +1 -3
- {careamics-0.0.14.dist-info → careamics-0.0.16.dist-info}/METADATA +18 -16
- {careamics-0.0.14.dist-info → careamics-0.0.16.dist-info}/RECORD +90 -88
- careamics/dataset/zarr_dataset.py +0 -151
- careamics/file_io/read/zarr.py +0 -60
- {careamics-0.0.14.dist-info → careamics-0.0.16.dist-info}/WHEEL +0 -0
- {careamics-0.0.14.dist-info → careamics-0.0.16.dist-info}/entry_points.txt +0 -0
- {careamics-0.0.14.dist-info → careamics-0.0.16.dist-info}/licenses/LICENSE +0 -0
careamics/careamist.py
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
from collections.abc import Callable
|
|
4
4
|
from pathlib import Path
|
|
5
|
-
from typing import Any, Literal,
|
|
5
|
+
from typing import Any, Literal, Union, overload
|
|
6
6
|
|
|
7
7
|
import numpy as np
|
|
8
8
|
from numpy.typing import NDArray
|
|
@@ -79,8 +79,8 @@ class CAREamist:
|
|
|
79
79
|
def __init__( # numpydoc ignore=GL08
|
|
80
80
|
self,
|
|
81
81
|
source: Union[Path, str],
|
|
82
|
-
work_dir:
|
|
83
|
-
callbacks:
|
|
82
|
+
work_dir: Union[Path, str] | None = None,
|
|
83
|
+
callbacks: list[Callback] | None = None,
|
|
84
84
|
enable_progress_bar: bool = True,
|
|
85
85
|
) -> None: ...
|
|
86
86
|
|
|
@@ -88,16 +88,16 @@ class CAREamist:
|
|
|
88
88
|
def __init__( # numpydoc ignore=GL08
|
|
89
89
|
self,
|
|
90
90
|
source: Configuration,
|
|
91
|
-
work_dir:
|
|
92
|
-
callbacks:
|
|
91
|
+
work_dir: Union[Path, str] | None = None,
|
|
92
|
+
callbacks: list[Callback] | None = None,
|
|
93
93
|
enable_progress_bar: bool = True,
|
|
94
94
|
) -> None: ...
|
|
95
95
|
|
|
96
96
|
def __init__(
|
|
97
97
|
self,
|
|
98
98
|
source: Union[Path, str, Configuration],
|
|
99
|
-
work_dir:
|
|
100
|
-
callbacks:
|
|
99
|
+
work_dir: Union[Path, str] | None = None,
|
|
100
|
+
callbacks: list[Callback] | None = None,
|
|
101
101
|
enable_progress_bar: bool = True,
|
|
102
102
|
) -> None:
|
|
103
103
|
"""
|
|
@@ -208,25 +208,19 @@ class CAREamist:
|
|
|
208
208
|
|
|
209
209
|
# instantiate trainer
|
|
210
210
|
self.trainer = Trainer(
|
|
211
|
-
max_epochs=self.cfg.training_config.num_epochs,
|
|
212
|
-
precision=self.cfg.training_config.precision,
|
|
213
|
-
max_steps=self.cfg.training_config.max_steps,
|
|
214
|
-
check_val_every_n_epoch=self.cfg.training_config.check_val_every_n_epoch,
|
|
215
211
|
enable_progress_bar=enable_progress_bar,
|
|
216
|
-
accumulate_grad_batches=self.cfg.training_config.accumulate_grad_batches,
|
|
217
|
-
gradient_clip_val=self.cfg.training_config.gradient_clip_val,
|
|
218
|
-
gradient_clip_algorithm=self.cfg.training_config.gradient_clip_algorithm,
|
|
219
212
|
callbacks=self.callbacks,
|
|
220
213
|
default_root_dir=self.work_dir,
|
|
221
214
|
logger=experiment_logger,
|
|
215
|
+
**self.cfg.training_config.lightning_trainer_config or {},
|
|
222
216
|
)
|
|
223
217
|
|
|
224
218
|
# place holder for the datamodules
|
|
225
|
-
self.train_datamodule:
|
|
226
|
-
self.pred_datamodule:
|
|
219
|
+
self.train_datamodule: TrainDataModule | None = None
|
|
220
|
+
self.pred_datamodule: PredictDataModule | None = None
|
|
227
221
|
|
|
228
222
|
def _define_callbacks(
|
|
229
|
-
self, callbacks:
|
|
223
|
+
self, callbacks: list[Callback] | None, enable_progress_bar: bool
|
|
230
224
|
) -> None:
|
|
231
225
|
"""Define the callbacks for the training loop.
|
|
232
226
|
|
|
@@ -264,7 +258,7 @@ class CAREamist:
|
|
|
264
258
|
HyperParametersCallback(self.cfg),
|
|
265
259
|
ModelCheckpoint(
|
|
266
260
|
dirpath=self.work_dir / Path("checkpoints"),
|
|
267
|
-
filename=self.cfg.experiment_name,
|
|
261
|
+
filename=f"{self.cfg.experiment_name}_{{epoch:02d}}_step_{{step}}",
|
|
268
262
|
**self.cfg.training_config.checkpoint_callback.model_dump(),
|
|
269
263
|
),
|
|
270
264
|
]
|
|
@@ -288,11 +282,11 @@ class CAREamist:
|
|
|
288
282
|
def train(
|
|
289
283
|
self,
|
|
290
284
|
*,
|
|
291
|
-
datamodule:
|
|
292
|
-
train_source:
|
|
293
|
-
val_source:
|
|
294
|
-
train_target:
|
|
295
|
-
val_target:
|
|
285
|
+
datamodule: TrainDataModule | None = None,
|
|
286
|
+
train_source: Union[Path, str, NDArray] | None = None,
|
|
287
|
+
val_source: Union[Path, str, NDArray] | None = None,
|
|
288
|
+
train_target: Union[Path, str, NDArray] | None = None,
|
|
289
|
+
val_target: Union[Path, str, NDArray] | None = None,
|
|
296
290
|
use_in_memory: bool = True,
|
|
297
291
|
val_percentage: float = 0.1,
|
|
298
292
|
val_minimum_split: int = 1,
|
|
@@ -443,9 +437,9 @@ class CAREamist:
|
|
|
443
437
|
def _train_on_array(
|
|
444
438
|
self,
|
|
445
439
|
train_data: NDArray,
|
|
446
|
-
val_data:
|
|
447
|
-
train_target:
|
|
448
|
-
val_target:
|
|
440
|
+
val_data: NDArray | None = None,
|
|
441
|
+
train_target: NDArray | None = None,
|
|
442
|
+
val_target: NDArray | None = None,
|
|
449
443
|
val_percentage: float = 0.1,
|
|
450
444
|
val_minimum_split: int = 5,
|
|
451
445
|
) -> None:
|
|
@@ -484,9 +478,9 @@ class CAREamist:
|
|
|
484
478
|
def _train_on_path(
|
|
485
479
|
self,
|
|
486
480
|
path_to_train_data: Union[Path, str],
|
|
487
|
-
path_to_val_data:
|
|
488
|
-
path_to_train_target:
|
|
489
|
-
path_to_val_target:
|
|
481
|
+
path_to_val_data: Union[Path, str] | None = None,
|
|
482
|
+
path_to_train_target: Union[Path, str] | None = None,
|
|
483
|
+
path_to_val_target: Union[Path, str] | None = None,
|
|
490
484
|
use_in_memory: bool = True,
|
|
491
485
|
val_percentage: float = 0.1,
|
|
492
486
|
val_minimum_split: int = 1,
|
|
@@ -549,13 +543,13 @@ class CAREamist:
|
|
|
549
543
|
source: Union[Path, str],
|
|
550
544
|
*,
|
|
551
545
|
batch_size: int = 1,
|
|
552
|
-
tile_size:
|
|
553
|
-
tile_overlap:
|
|
554
|
-
axes:
|
|
555
|
-
data_type:
|
|
546
|
+
tile_size: tuple[int, ...] | None = None,
|
|
547
|
+
tile_overlap: tuple[int, ...] | None = (48, 48),
|
|
548
|
+
axes: str | None = None,
|
|
549
|
+
data_type: Literal["tiff", "custom"] | None = None,
|
|
556
550
|
tta_transforms: bool = False,
|
|
557
|
-
dataloader_params:
|
|
558
|
-
read_source_func:
|
|
551
|
+
dataloader_params: dict | None = None,
|
|
552
|
+
read_source_func: Callable | None = None,
|
|
559
553
|
extension_filter: str = "",
|
|
560
554
|
) -> Union[list[NDArray], NDArray]: ...
|
|
561
555
|
|
|
@@ -565,12 +559,12 @@ class CAREamist:
|
|
|
565
559
|
source: NDArray,
|
|
566
560
|
*,
|
|
567
561
|
batch_size: int = 1,
|
|
568
|
-
tile_size:
|
|
569
|
-
tile_overlap:
|
|
570
|
-
axes:
|
|
571
|
-
data_type:
|
|
562
|
+
tile_size: tuple[int, ...] | None = None,
|
|
563
|
+
tile_overlap: tuple[int, ...] | None = (48, 48),
|
|
564
|
+
axes: str | None = None,
|
|
565
|
+
data_type: Literal["array"] | None = None,
|
|
572
566
|
tta_transforms: bool = False,
|
|
573
|
-
dataloader_params:
|
|
567
|
+
dataloader_params: dict | None = None,
|
|
574
568
|
) -> Union[list[NDArray], NDArray]: ...
|
|
575
569
|
|
|
576
570
|
def predict(
|
|
@@ -578,13 +572,13 @@ class CAREamist:
|
|
|
578
572
|
source: Union[PredictDataModule, Path, str, NDArray],
|
|
579
573
|
*,
|
|
580
574
|
batch_size: int = 1,
|
|
581
|
-
tile_size:
|
|
582
|
-
tile_overlap:
|
|
583
|
-
axes:
|
|
584
|
-
data_type:
|
|
575
|
+
tile_size: tuple[int, ...] | None = None,
|
|
576
|
+
tile_overlap: tuple[int, ...] | None = (48, 48),
|
|
577
|
+
axes: str | None = None,
|
|
578
|
+
data_type: Literal["array", "tiff", "custom"] | None = None,
|
|
585
579
|
tta_transforms: bool = False,
|
|
586
|
-
dataloader_params:
|
|
587
|
-
read_source_func:
|
|
580
|
+
dataloader_params: dict | None = None,
|
|
581
|
+
read_source_func: Callable | None = None,
|
|
588
582
|
extension_filter: str = "",
|
|
589
583
|
**kwargs: Any,
|
|
590
584
|
) -> Union[list[NDArray], NDArray]:
|
|
@@ -704,18 +698,18 @@ class CAREamist:
|
|
|
704
698
|
source: Union[PredictDataModule, Path, str],
|
|
705
699
|
*,
|
|
706
700
|
batch_size: int = 1,
|
|
707
|
-
tile_size:
|
|
708
|
-
tile_overlap:
|
|
709
|
-
axes:
|
|
710
|
-
data_type:
|
|
701
|
+
tile_size: tuple[int, ...] | None = None,
|
|
702
|
+
tile_overlap: tuple[int, ...] | None = (48, 48),
|
|
703
|
+
axes: str | None = None,
|
|
704
|
+
data_type: Literal["tiff", "custom"] | None = None,
|
|
711
705
|
tta_transforms: bool = False,
|
|
712
|
-
dataloader_params:
|
|
713
|
-
read_source_func:
|
|
706
|
+
dataloader_params: dict | None = None,
|
|
707
|
+
read_source_func: Callable | None = None,
|
|
714
708
|
extension_filter: str = "",
|
|
715
709
|
write_type: Literal["tiff", "custom"] = "tiff",
|
|
716
|
-
write_extension:
|
|
717
|
-
write_func:
|
|
718
|
-
write_func_kwargs:
|
|
710
|
+
write_extension: str | None = None,
|
|
711
|
+
write_func: WriteFunc | None = None,
|
|
712
|
+
write_func_kwargs: dict[str, Any] | None = None,
|
|
719
713
|
prediction_dir: Union[Path, str] = "predictions",
|
|
720
714
|
**kwargs,
|
|
721
715
|
) -> None:
|
|
@@ -823,12 +817,12 @@ class CAREamist:
|
|
|
823
817
|
|
|
824
818
|
# extract file names
|
|
825
819
|
source_path: Union[Path, str, NDArray]
|
|
826
|
-
source_data_type: Literal["array", "tiff", "custom"]
|
|
820
|
+
source_data_type: Literal["array", "tiff", "czi", "custom"]
|
|
827
821
|
if isinstance(source, PredictDataModule):
|
|
828
822
|
source_path = source.pred_data
|
|
829
823
|
source_data_type = source.data_type
|
|
830
824
|
extension_filter = source.extension_filter
|
|
831
|
-
elif isinstance(source, str | Path):
|
|
825
|
+
elif isinstance(source, (str | Path)):
|
|
832
826
|
source_path = source
|
|
833
827
|
source_data_type = data_type or self.cfg.data_config.data_type
|
|
834
828
|
extension_filter = SupportedData.get_extension_pattern(
|
|
@@ -841,7 +835,7 @@ class CAREamist:
|
|
|
841
835
|
raise ValueError(
|
|
842
836
|
"Predicting to disk is not supported for input type 'array'."
|
|
843
837
|
)
|
|
844
|
-
assert isinstance(source_path,
|
|
838
|
+
assert isinstance(source_path, (Path | str)) # because data_type != "array"
|
|
845
839
|
source_path = Path(source_path)
|
|
846
840
|
|
|
847
841
|
file_paths = list_files(source_path, source_data_type, extension_filter)
|
|
@@ -879,14 +873,14 @@ class CAREamist:
|
|
|
879
873
|
|
|
880
874
|
def export_to_bmz(
|
|
881
875
|
self,
|
|
882
|
-
path_to_archive: Union[Path
|
|
876
|
+
path_to_archive: Union[Path | str],
|
|
883
877
|
friendly_model_name: str,
|
|
884
878
|
input_array: NDArray,
|
|
885
879
|
authors: list[dict],
|
|
886
880
|
general_description: str,
|
|
887
881
|
data_description: str,
|
|
888
|
-
covers:
|
|
889
|
-
channel_names:
|
|
882
|
+
covers: list[Union[Path, str]] | None = None,
|
|
883
|
+
channel_names: list[str] | None = None,
|
|
890
884
|
model_version: str = "0.1.0",
|
|
891
885
|
) -> None:
|
|
892
886
|
"""Export the model to the BioImage Model Zoo format.
|
careamics/cli/conf.py
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
import sys
|
|
4
4
|
from dataclasses import dataclass
|
|
5
5
|
from pathlib import Path
|
|
6
|
-
from typing import Annotated
|
|
6
|
+
from typing import Annotated
|
|
7
7
|
|
|
8
8
|
import click
|
|
9
9
|
import typer
|
|
@@ -116,7 +116,11 @@ def care( # numpydoc ignore=PR01
|
|
|
116
116
|
),
|
|
117
117
|
],
|
|
118
118
|
batch_size: Annotated[int, typer.Option(help="Batch size.")],
|
|
119
|
-
num_epochs: Annotated[int, typer.Option(help="Number of epochs.")],
|
|
119
|
+
num_epochs: Annotated[int, typer.Option(help="Number of epochs.")] = 100,
|
|
120
|
+
num_steps: Annotated[
|
|
121
|
+
int | None,
|
|
122
|
+
typer.Option(help="Number of batches per epoch (limit_train_batches)."),
|
|
123
|
+
] = None,
|
|
120
124
|
data_type: Annotated[
|
|
121
125
|
click.Choice,
|
|
122
126
|
typer.Option(click_type=click.Choice(["tiff"]), help="Type of the data."),
|
|
@@ -135,10 +139,10 @@ def care( # numpydoc ignore=PR01
|
|
|
135
139
|
),
|
|
136
140
|
] = "mae",
|
|
137
141
|
n_channels_in: Annotated[
|
|
138
|
-
|
|
142
|
+
int | None, typer.Option(help="Number of channels in")
|
|
139
143
|
] = None,
|
|
140
144
|
n_channels_out: Annotated[
|
|
141
|
-
|
|
145
|
+
int | None, typer.Option(help="Number of channels out")
|
|
142
146
|
] = None,
|
|
143
147
|
logger: Annotated[
|
|
144
148
|
click.Choice,
|
|
@@ -175,6 +179,7 @@ def care( # numpydoc ignore=PR01
|
|
|
175
179
|
patch_size=patch_size,
|
|
176
180
|
batch_size=batch_size,
|
|
177
181
|
num_epochs=num_epochs,
|
|
182
|
+
num_steps=num_steps,
|
|
178
183
|
# TODO: fix choosing augmentations
|
|
179
184
|
augmentations=None if use_augmentations else [],
|
|
180
185
|
independent_channels=independent_channels,
|
|
@@ -203,7 +208,11 @@ def n2n( # numpydoc ignore=PR01
|
|
|
203
208
|
),
|
|
204
209
|
],
|
|
205
210
|
batch_size: Annotated[int, typer.Option(help="Batch size.")],
|
|
206
|
-
num_epochs: Annotated[int, typer.Option(help="Number of epochs.")],
|
|
211
|
+
num_epochs: Annotated[int, typer.Option(help="Number of epochs.")] = 100,
|
|
212
|
+
num_steps: Annotated[
|
|
213
|
+
int | None,
|
|
214
|
+
typer.Option(help="Number of batches per epoch (limit_train_batches)."),
|
|
215
|
+
] = None,
|
|
207
216
|
data_type: Annotated[
|
|
208
217
|
click.Choice,
|
|
209
218
|
typer.Option(click_type=click.Choice(["tiff"]), help="Type of the data."),
|
|
@@ -222,10 +231,10 @@ def n2n( # numpydoc ignore=PR01
|
|
|
222
231
|
),
|
|
223
232
|
] = "mae",
|
|
224
233
|
n_channels_in: Annotated[
|
|
225
|
-
|
|
234
|
+
int | None, typer.Option(help="Number of channels in")
|
|
226
235
|
] = None,
|
|
227
236
|
n_channels_out: Annotated[
|
|
228
|
-
|
|
237
|
+
int | None, typer.Option(help="Number of channels out")
|
|
229
238
|
] = None,
|
|
230
239
|
logger: Annotated[
|
|
231
240
|
click.Choice,
|
|
@@ -259,6 +268,7 @@ def n2n( # numpydoc ignore=PR01
|
|
|
259
268
|
patch_size=patch_size,
|
|
260
269
|
batch_size=batch_size,
|
|
261
270
|
num_epochs=num_epochs,
|
|
271
|
+
num_steps=num_steps,
|
|
262
272
|
# TODO: fix choosing augmentations
|
|
263
273
|
augmentations=None if use_augmentations else [],
|
|
264
274
|
independent_channels=independent_channels,
|
|
@@ -287,7 +297,11 @@ def n2v( # numpydoc ignore=PR01
|
|
|
287
297
|
),
|
|
288
298
|
],
|
|
289
299
|
batch_size: Annotated[int, typer.Option(help="Batch size.")],
|
|
290
|
-
num_epochs: Annotated[int, typer.Option(help="Number of epochs.")],
|
|
300
|
+
num_epochs: Annotated[int, typer.Option(help="Number of epochs.")] = 100,
|
|
301
|
+
num_steps: Annotated[
|
|
302
|
+
int | None,
|
|
303
|
+
typer.Option(help="Number of batches per epoch (limit_train_batches)."),
|
|
304
|
+
] = None,
|
|
291
305
|
data_type: Annotated[
|
|
292
306
|
click.Choice,
|
|
293
307
|
typer.Option(click_type=click.Choice(["tiff"]), help="Type of the data."),
|
|
@@ -300,7 +314,7 @@ def n2v( # numpydoc ignore=PR01
|
|
|
300
314
|
] = True,
|
|
301
315
|
use_n2v2: Annotated[bool, typer.Option(help="Whether to use N2V2")] = False,
|
|
302
316
|
n_channels: Annotated[
|
|
303
|
-
|
|
317
|
+
int | None, typer.Option(help="Number of channels (in and out)")
|
|
304
318
|
] = None,
|
|
305
319
|
roi_size: Annotated[int, typer.Option(help="N2V pixel manipulation area.")] = 11,
|
|
306
320
|
masked_pixel_percentage: Annotated[
|
|
@@ -364,6 +378,7 @@ def n2v( # numpydoc ignore=PR01
|
|
|
364
378
|
patch_size=patch_size,
|
|
365
379
|
batch_size=batch_size,
|
|
366
380
|
num_epochs=num_epochs,
|
|
381
|
+
num_steps=num_steps,
|
|
367
382
|
# TODO: fix choosing augmentations
|
|
368
383
|
augmentations=None if use_augmentations else [],
|
|
369
384
|
independent_channels=independent_channels,
|
careamics/cli/main.py
CHANGED
|
@@ -7,7 +7,7 @@ its implementation is contained in the conf.py file.
|
|
|
7
7
|
"""
|
|
8
8
|
|
|
9
9
|
from pathlib import Path
|
|
10
|
-
from typing import Annotated
|
|
10
|
+
from typing import Annotated
|
|
11
11
|
|
|
12
12
|
import click
|
|
13
13
|
import typer
|
|
@@ -47,7 +47,7 @@ def train( # numpydoc ignore=PR01
|
|
|
47
47
|
),
|
|
48
48
|
],
|
|
49
49
|
train_target: Annotated[
|
|
50
|
-
|
|
50
|
+
Path | None,
|
|
51
51
|
typer.Option(
|
|
52
52
|
"--train-target",
|
|
53
53
|
"-tt",
|
|
@@ -58,7 +58,7 @@ def train( # numpydoc ignore=PR01
|
|
|
58
58
|
),
|
|
59
59
|
] = None,
|
|
60
60
|
val_source: Annotated[
|
|
61
|
-
|
|
61
|
+
Path | None,
|
|
62
62
|
typer.Option(
|
|
63
63
|
"--val-source",
|
|
64
64
|
"-vs",
|
|
@@ -69,7 +69,7 @@ def train( # numpydoc ignore=PR01
|
|
|
69
69
|
),
|
|
70
70
|
] = None,
|
|
71
71
|
val_target: Annotated[
|
|
72
|
-
|
|
72
|
+
Path | None,
|
|
73
73
|
typer.Option(
|
|
74
74
|
"--val-target",
|
|
75
75
|
"-vt",
|
|
@@ -96,7 +96,7 @@ def train( # numpydoc ignore=PR01
|
|
|
96
96
|
typer.Option(help="Minimum number of files to use for validation,"),
|
|
97
97
|
] = 1,
|
|
98
98
|
work_dir: Annotated[
|
|
99
|
-
|
|
99
|
+
Path | None,
|
|
100
100
|
typer.Option(
|
|
101
101
|
"--work-dir",
|
|
102
102
|
"-wd",
|
|
@@ -142,7 +142,7 @@ def predict( # numpydoc ignore=PR01
|
|
|
142
142
|
],
|
|
143
143
|
batch_size: Annotated[int, typer.Option(help="Batch size.")] = 1,
|
|
144
144
|
tile_size: Annotated[
|
|
145
|
-
|
|
145
|
+
click.Tuple | None,
|
|
146
146
|
typer.Option(
|
|
147
147
|
help=(
|
|
148
148
|
"Size of the tiles to use for prediction, (if the data "
|
|
@@ -164,7 +164,7 @@ def predict( # numpydoc ignore=PR01
|
|
|
164
164
|
),
|
|
165
165
|
] = (48, 48, -1),
|
|
166
166
|
axes: Annotated[
|
|
167
|
-
|
|
167
|
+
str | None,
|
|
168
168
|
typer.Option(
|
|
169
169
|
help="Axes of the input data. If unused the data is assumed to have the "
|
|
170
170
|
"same axes as the original training data."
|
|
@@ -190,7 +190,7 @@ def predict( # numpydoc ignore=PR01
|
|
|
190
190
|
] = "tiff",
|
|
191
191
|
# TODO: could make dataloader_params as json, necessary?
|
|
192
192
|
work_dir: Annotated[
|
|
193
|
-
|
|
193
|
+
Path | None,
|
|
194
194
|
typer.Option(
|
|
195
195
|
"--work-dir",
|
|
196
196
|
"-wd",
|
careamics/cli/utils.py
CHANGED
|
@@ -1,11 +1,9 @@
|
|
|
1
1
|
"""Utility functions for the CAREamics CLI."""
|
|
2
2
|
|
|
3
|
-
from typing import Optional
|
|
4
|
-
|
|
5
3
|
|
|
6
4
|
def handle_2D_3D_callback(
|
|
7
|
-
value:
|
|
8
|
-
) ->
|
|
5
|
+
value: tuple[int, int, int] | None,
|
|
6
|
+
) -> tuple[int, ...] | None:
|
|
9
7
|
"""
|
|
10
8
|
Callback for options that require 2D or 3D inputs.
|
|
11
9
|
|
careamics/config/__init__.py
CHANGED
|
@@ -12,8 +12,10 @@ __all__ = [
|
|
|
12
12
|
"Configuration",
|
|
13
13
|
"DataConfig",
|
|
14
14
|
"GaussianMixtureNMConfig",
|
|
15
|
+
"HDNAlgorithm",
|
|
15
16
|
"InferenceConfig",
|
|
16
17
|
"LVAELossConfig",
|
|
18
|
+
"MicroSplitAlgorithm",
|
|
17
19
|
"MultiChannelNMConfig",
|
|
18
20
|
"N2NAlgorithm",
|
|
19
21
|
"N2VAlgorithm",
|
|
@@ -22,6 +24,8 @@ __all__ = [
|
|
|
22
24
|
"VAEBasedAlgorithm",
|
|
23
25
|
"algorithm_factory",
|
|
24
26
|
"create_care_configuration",
|
|
27
|
+
"create_hdn_configuration",
|
|
28
|
+
"create_microsplit_configuration",
|
|
25
29
|
"create_n2n_configuration",
|
|
26
30
|
"create_n2v_configuration",
|
|
27
31
|
"load_configuration",
|
|
@@ -30,6 +34,8 @@ __all__ = [
|
|
|
30
34
|
|
|
31
35
|
from .algorithms import (
|
|
32
36
|
CAREAlgorithm,
|
|
37
|
+
HDNAlgorithm,
|
|
38
|
+
MicroSplitAlgorithm,
|
|
33
39
|
N2NAlgorithm,
|
|
34
40
|
N2VAlgorithm,
|
|
35
41
|
UNetBasedAlgorithm,
|
|
@@ -40,6 +46,8 @@ from .configuration import Configuration
|
|
|
40
46
|
from .configuration_factories import (
|
|
41
47
|
algorithm_factory,
|
|
42
48
|
create_care_configuration,
|
|
49
|
+
create_hdn_configuration,
|
|
50
|
+
create_microsplit_configuration,
|
|
43
51
|
create_n2n_configuration,
|
|
44
52
|
create_n2v_configuration,
|
|
45
53
|
)
|
|
@@ -2,6 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
__all__ = [
|
|
4
4
|
"CAREAlgorithm",
|
|
5
|
+
"HDNAlgorithm",
|
|
6
|
+
"MicroSplitAlgorithm",
|
|
5
7
|
"N2NAlgorithm",
|
|
6
8
|
"N2VAlgorithm",
|
|
7
9
|
"UNetBasedAlgorithm",
|
|
@@ -9,6 +11,8 @@ __all__ = [
|
|
|
9
11
|
]
|
|
10
12
|
|
|
11
13
|
from .care_algorithm_model import CAREAlgorithm
|
|
14
|
+
from .hdn_algorithm_model import HDNAlgorithm
|
|
15
|
+
from .microsplit_algorithm_model import MicroSplitAlgorithm
|
|
12
16
|
from .n2n_algorithm_model import N2NAlgorithm
|
|
13
17
|
from .n2v_algorithm_model import N2VAlgorithm
|
|
14
18
|
from .unet_algorithm_model import UNetBasedAlgorithm
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
"""HDN algorithm configuration."""
|
|
2
|
+
|
|
3
|
+
from typing import Literal
|
|
4
|
+
|
|
5
|
+
from bioimageio.spec.generic.v0_3 import CiteEntry
|
|
6
|
+
from pydantic import ConfigDict
|
|
7
|
+
|
|
8
|
+
from careamics.config.algorithms.vae_algorithm_model import VAEBasedAlgorithm
|
|
9
|
+
from careamics.config.architectures import LVAEModel
|
|
10
|
+
from careamics.config.loss_model import LVAELossConfig
|
|
11
|
+
|
|
12
|
+
HDN = "Hierarchical DivNoising"
|
|
13
|
+
|
|
14
|
+
HDN_DESCRIPTION = (
|
|
15
|
+
"HDN leverages a hierarchical VAE to perform image "
|
|
16
|
+
"restoration. It is designed to be interpretable and unsupervised, "
|
|
17
|
+
"making it suitable for a wide range of microscopy images."
|
|
18
|
+
)
|
|
19
|
+
HDN_REF = CiteEntry(
|
|
20
|
+
text='Prakash, M., Delbracio, M., Milanfar, P., Jug, F. 2022. "Interpretable '
|
|
21
|
+
'Unsupervised Diversity Denoising and Artefact Removal." The International '
|
|
22
|
+
"Conference on Learning Representations (ICLR).",
|
|
23
|
+
doi="10.1561/2200000056",
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class HDNAlgorithm(VAEBasedAlgorithm):
|
|
28
|
+
"""HDN algorithm configuration."""
|
|
29
|
+
|
|
30
|
+
model_config = ConfigDict(validate_assignment=True)
|
|
31
|
+
|
|
32
|
+
algorithm: Literal["hdn"] = "hdn"
|
|
33
|
+
|
|
34
|
+
loss: LVAELossConfig
|
|
35
|
+
|
|
36
|
+
model: LVAEModel # TODO add validators
|
|
37
|
+
|
|
38
|
+
is_supervised: bool = False
|
|
39
|
+
|
|
40
|
+
def get_algorithm_friendly_name(self) -> str:
|
|
41
|
+
"""
|
|
42
|
+
Get the algorithm friendly name.
|
|
43
|
+
|
|
44
|
+
Returns
|
|
45
|
+
-------
|
|
46
|
+
str
|
|
47
|
+
Friendly name of the algorithm.
|
|
48
|
+
"""
|
|
49
|
+
return HDN
|
|
50
|
+
|
|
51
|
+
def get_algorithm_keywords(self) -> list[str]:
|
|
52
|
+
"""
|
|
53
|
+
Get algorithm keywords.
|
|
54
|
+
|
|
55
|
+
Returns
|
|
56
|
+
-------
|
|
57
|
+
list[str]
|
|
58
|
+
List of keywords.
|
|
59
|
+
"""
|
|
60
|
+
return [
|
|
61
|
+
"restoration",
|
|
62
|
+
"VAE",
|
|
63
|
+
"3D" if self.model.is_3D() else "2D",
|
|
64
|
+
"CAREamics",
|
|
65
|
+
"pytorch",
|
|
66
|
+
]
|
|
67
|
+
|
|
68
|
+
def get_algorithm_references(self) -> str:
|
|
69
|
+
"""
|
|
70
|
+
Get the algorithm references.
|
|
71
|
+
|
|
72
|
+
This is used to generate the README of the BioImage Model Zoo export.
|
|
73
|
+
|
|
74
|
+
Returns
|
|
75
|
+
-------
|
|
76
|
+
str
|
|
77
|
+
Algorithm references.
|
|
78
|
+
"""
|
|
79
|
+
return HDN_REF.text + " doi: " + HDN_REF.doi
|
|
80
|
+
|
|
81
|
+
def get_algorithm_citations(self) -> list[CiteEntry]:
|
|
82
|
+
"""
|
|
83
|
+
Return a list of citation entries of the current algorithm.
|
|
84
|
+
|
|
85
|
+
This is used to generate the model description for the BioImage Model Zoo.
|
|
86
|
+
|
|
87
|
+
Returns
|
|
88
|
+
-------
|
|
89
|
+
List[CiteEntry]
|
|
90
|
+
List of citation entries.
|
|
91
|
+
"""
|
|
92
|
+
return [HDN_REF]
|
|
93
|
+
|
|
94
|
+
def get_algorithm_description(self) -> str:
|
|
95
|
+
"""
|
|
96
|
+
Get the algorithm description.
|
|
97
|
+
|
|
98
|
+
Returns
|
|
99
|
+
-------
|
|
100
|
+
str
|
|
101
|
+
Algorithm description.
|
|
102
|
+
"""
|
|
103
|
+
return HDN_DESCRIPTION
|