careamics 0.0.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- careamics/__init__.py +24 -0
- careamics/careamist.py +961 -0
- careamics/cli/__init__.py +5 -0
- careamics/cli/conf.py +394 -0
- careamics/cli/main.py +234 -0
- careamics/cli/utils.py +27 -0
- careamics/config/__init__.py +66 -0
- careamics/config/algorithms/__init__.py +21 -0
- careamics/config/algorithms/care_algorithm_config.py +122 -0
- careamics/config/algorithms/hdn_algorithm_config.py +103 -0
- careamics/config/algorithms/microsplit_algorithm_config.py +103 -0
- careamics/config/algorithms/n2n_algorithm_config.py +115 -0
- careamics/config/algorithms/n2v_algorithm_config.py +296 -0
- careamics/config/algorithms/pn2v_algorithm_config.py +301 -0
- careamics/config/algorithms/unet_algorithm_config.py +91 -0
- careamics/config/algorithms/vae_algorithm_config.py +178 -0
- careamics/config/architectures/__init__.py +7 -0
- careamics/config/architectures/architecture_config.py +37 -0
- careamics/config/architectures/lvae_config.py +262 -0
- careamics/config/architectures/unet_config.py +125 -0
- careamics/config/configuration.py +367 -0
- careamics/config/configuration_factories.py +2400 -0
- careamics/config/data/__init__.py +27 -0
- careamics/config/data/data_config.py +472 -0
- careamics/config/data/inference_config.py +237 -0
- careamics/config/data/ng_data_config.py +1038 -0
- careamics/config/data/patch_filter/__init__.py +15 -0
- careamics/config/data/patch_filter/filter_config.py +16 -0
- careamics/config/data/patch_filter/mask_filter_config.py +17 -0
- careamics/config/data/patch_filter/max_filter_config.py +15 -0
- careamics/config/data/patch_filter/meanstd_filter_config.py +18 -0
- careamics/config/data/patch_filter/shannon_filter_config.py +15 -0
- careamics/config/data/patching_strategies/__init__.py +15 -0
- careamics/config/data/patching_strategies/_overlapping_patched_config.py +102 -0
- careamics/config/data/patching_strategies/_patched_config.py +56 -0
- careamics/config/data/patching_strategies/random_patching_config.py +45 -0
- careamics/config/data/patching_strategies/sequential_patching_config.py +25 -0
- careamics/config/data/patching_strategies/tiled_patching_config.py +40 -0
- careamics/config/data/patching_strategies/whole_patching_config.py +12 -0
- careamics/config/data/tile_information.py +65 -0
- careamics/config/lightning/__init__.py +15 -0
- careamics/config/lightning/callbacks/__init__.py +8 -0
- careamics/config/lightning/callbacks/callback_config.py +116 -0
- careamics/config/lightning/optimizer_configs.py +186 -0
- careamics/config/lightning/training_config.py +70 -0
- careamics/config/losses/__init__.py +8 -0
- careamics/config/losses/loss_config.py +60 -0
- careamics/config/ng_configs/__init__.py +5 -0
- careamics/config/ng_configs/n2v_configuration.py +64 -0
- careamics/config/ng_configs/ng_configuration.py +256 -0
- careamics/config/ng_factories/__init__.py +9 -0
- careamics/config/ng_factories/algorithm_factory.py +120 -0
- careamics/config/ng_factories/data_factory.py +154 -0
- careamics/config/ng_factories/n2v_factory.py +256 -0
- careamics/config/ng_factories/training_factory.py +69 -0
- careamics/config/noise_model/__init__.py +12 -0
- careamics/config/noise_model/likelihood_config.py +60 -0
- careamics/config/noise_model/noise_model_config.py +149 -0
- careamics/config/support/__init__.py +31 -0
- careamics/config/support/supported_activations.py +27 -0
- careamics/config/support/supported_algorithms.py +40 -0
- careamics/config/support/supported_architectures.py +13 -0
- careamics/config/support/supported_data.py +122 -0
- careamics/config/support/supported_filters.py +17 -0
- careamics/config/support/supported_loggers.py +10 -0
- careamics/config/support/supported_losses.py +32 -0
- careamics/config/support/supported_optimizers.py +57 -0
- careamics/config/support/supported_patching_strategies.py +22 -0
- careamics/config/support/supported_pixel_manipulations.py +15 -0
- careamics/config/support/supported_struct_axis.py +21 -0
- careamics/config/support/supported_transforms.py +12 -0
- careamics/config/transformations/__init__.py +22 -0
- careamics/config/transformations/n2v_manipulate_config.py +79 -0
- careamics/config/transformations/normalize_config.py +59 -0
- careamics/config/transformations/transform_config.py +45 -0
- careamics/config/transformations/transform_unions.py +29 -0
- careamics/config/transformations/xy_flip_config.py +43 -0
- careamics/config/transformations/xy_random_rotate90_config.py +35 -0
- careamics/config/utils/__init__.py +8 -0
- careamics/config/utils/configuration_io.py +85 -0
- careamics/config/validators/__init__.py +18 -0
- careamics/config/validators/axes_validators.py +90 -0
- careamics/config/validators/model_validators.py +84 -0
- careamics/config/validators/patch_validators.py +55 -0
- careamics/conftest.py +39 -0
- careamics/dataset/__init__.py +17 -0
- careamics/dataset/dataset_utils/__init__.py +19 -0
- careamics/dataset/dataset_utils/dataset_utils.py +118 -0
- careamics/dataset/dataset_utils/file_utils.py +141 -0
- careamics/dataset/dataset_utils/iterate_over_files.py +84 -0
- careamics/dataset/dataset_utils/running_stats.py +189 -0
- careamics/dataset/in_memory_dataset.py +303 -0
- careamics/dataset/in_memory_pred_dataset.py +88 -0
- careamics/dataset/in_memory_tiled_pred_dataset.py +131 -0
- careamics/dataset/iterable_dataset.py +294 -0
- careamics/dataset/iterable_pred_dataset.py +121 -0
- careamics/dataset/iterable_tiled_pred_dataset.py +141 -0
- careamics/dataset/patching/__init__.py +1 -0
- careamics/dataset/patching/patching.py +300 -0
- careamics/dataset/patching/random_patching.py +110 -0
- careamics/dataset/patching/sequential_patching.py +212 -0
- careamics/dataset/patching/validate_patch_dimension.py +64 -0
- careamics/dataset/tiling/__init__.py +10 -0
- careamics/dataset/tiling/collate_tiles.py +33 -0
- careamics/dataset/tiling/lvae_tiled_patching.py +375 -0
- careamics/dataset/tiling/tiled_patching.py +166 -0
- careamics/dataset_ng/README.md +212 -0
- careamics/dataset_ng/__init__.py +0 -0
- careamics/dataset_ng/dataset.py +365 -0
- careamics/dataset_ng/demos/bsd68_demo.ipynb +361 -0
- careamics/dataset_ng/demos/bsd68_zarr_demo.ipynb +453 -0
- careamics/dataset_ng/demos/care_U2OS_demo.ipynb +330 -0
- careamics/dataset_ng/demos/demo_custom_image_stack.ipynb +736 -0
- careamics/dataset_ng/demos/demo_datamodule.ipynb +447 -0
- careamics/dataset_ng/demos/demo_dataset.ipynb +278 -0
- careamics/dataset_ng/demos/demo_patch_extractor.py +51 -0
- careamics/dataset_ng/demos/mouse_nuclei_demo.ipynb +293 -0
- careamics/dataset_ng/factory.py +180 -0
- careamics/dataset_ng/grouped_index_sampler.py +73 -0
- careamics/dataset_ng/image_stack/__init__.py +14 -0
- careamics/dataset_ng/image_stack/czi_image_stack.py +396 -0
- careamics/dataset_ng/image_stack/file_image_stack.py +140 -0
- careamics/dataset_ng/image_stack/image_stack_protocol.py +93 -0
- careamics/dataset_ng/image_stack/image_utils/__init__.py +6 -0
- careamics/dataset_ng/image_stack/image_utils/image_stack_utils.py +125 -0
- careamics/dataset_ng/image_stack/in_memory_image_stack.py +93 -0
- careamics/dataset_ng/image_stack/zarr_image_stack.py +170 -0
- careamics/dataset_ng/image_stack_loader/__init__.py +19 -0
- careamics/dataset_ng/image_stack_loader/image_stack_loader_protocol.py +70 -0
- careamics/dataset_ng/image_stack_loader/image_stack_loaders.py +273 -0
- careamics/dataset_ng/image_stack_loader/zarr_utils.py +130 -0
- careamics/dataset_ng/legacy_interoperability.py +175 -0
- careamics/dataset_ng/microsplit_input_synth.py +377 -0
- careamics/dataset_ng/patch_extractor/__init__.py +7 -0
- careamics/dataset_ng/patch_extractor/limit_file_extractor.py +50 -0
- careamics/dataset_ng/patch_extractor/patch_construction.py +151 -0
- careamics/dataset_ng/patch_extractor/patch_extractor.py +117 -0
- careamics/dataset_ng/patch_filter/__init__.py +20 -0
- careamics/dataset_ng/patch_filter/coordinate_filter_protocol.py +27 -0
- careamics/dataset_ng/patch_filter/filter_factory.py +95 -0
- careamics/dataset_ng/patch_filter/mask_filter.py +96 -0
- careamics/dataset_ng/patch_filter/max_filter.py +188 -0
- careamics/dataset_ng/patch_filter/mean_std_filter.py +218 -0
- careamics/dataset_ng/patch_filter/patch_filter_protocol.py +50 -0
- careamics/dataset_ng/patch_filter/shannon_filter.py +188 -0
- careamics/dataset_ng/patching_strategies/__init__.py +26 -0
- careamics/dataset_ng/patching_strategies/patching_strategy_factory.py +50 -0
- careamics/dataset_ng/patching_strategies/patching_strategy_protocol.py +161 -0
- careamics/dataset_ng/patching_strategies/random_patching.py +393 -0
- careamics/dataset_ng/patching_strategies/sequential_patching.py +99 -0
- careamics/dataset_ng/patching_strategies/tiling_strategy.py +207 -0
- careamics/dataset_ng/patching_strategies/whole_sample.py +61 -0
- careamics/file_io/__init__.py +15 -0
- careamics/file_io/read/__init__.py +11 -0
- careamics/file_io/read/get_func.py +57 -0
- careamics/file_io/read/tiff.py +58 -0
- careamics/file_io/write/__init__.py +15 -0
- careamics/file_io/write/get_func.py +63 -0
- careamics/file_io/write/tiff.py +40 -0
- careamics/lightning/__init__.py +32 -0
- careamics/lightning/callbacks/__init__.py +13 -0
- careamics/lightning/callbacks/data_stats_callback.py +33 -0
- careamics/lightning/callbacks/hyperparameters_callback.py +49 -0
- careamics/lightning/callbacks/prediction_writer_callback/__init__.py +20 -0
- careamics/lightning/callbacks/prediction_writer_callback/file_path_utils.py +56 -0
- careamics/lightning/callbacks/prediction_writer_callback/prediction_writer_callback.py +234 -0
- careamics/lightning/callbacks/prediction_writer_callback/write_strategy.py +399 -0
- careamics/lightning/callbacks/prediction_writer_callback/write_strategy_factory.py +215 -0
- careamics/lightning/callbacks/progress_bar_callback.py +90 -0
- careamics/lightning/dataset_ng/__init__.py +1 -0
- careamics/lightning/dataset_ng/callbacks/__init__.py +1 -0
- careamics/lightning/dataset_ng/callbacks/prediction_writer/__init__.py +29 -0
- careamics/lightning/dataset_ng/callbacks/prediction_writer/cached_tiles_strategy.py +164 -0
- careamics/lightning/dataset_ng/callbacks/prediction_writer/file_path_utils.py +33 -0
- careamics/lightning/dataset_ng/callbacks/prediction_writer/prediction_writer_callback.py +219 -0
- careamics/lightning/dataset_ng/callbacks/prediction_writer/write_image_strategy.py +91 -0
- careamics/lightning/dataset_ng/callbacks/prediction_writer/write_strategy.py +27 -0
- careamics/lightning/dataset_ng/callbacks/prediction_writer/write_strategy_factory.py +214 -0
- careamics/lightning/dataset_ng/callbacks/prediction_writer/write_tiles_zarr_strategy.py +375 -0
- careamics/lightning/dataset_ng/data_module.py +529 -0
- careamics/lightning/dataset_ng/data_module_utils.py +395 -0
- careamics/lightning/dataset_ng/lightning_modules/__init__.py +9 -0
- careamics/lightning/dataset_ng/lightning_modules/care_module.py +97 -0
- careamics/lightning/dataset_ng/lightning_modules/n2v_module.py +106 -0
- careamics/lightning/dataset_ng/lightning_modules/unet_module.py +221 -0
- careamics/lightning/dataset_ng/prediction/__init__.py +16 -0
- careamics/lightning/dataset_ng/prediction/convert_prediction.py +198 -0
- careamics/lightning/dataset_ng/prediction/stitch_prediction.py +171 -0
- careamics/lightning/lightning_module.py +914 -0
- careamics/lightning/microsplit_data_module.py +632 -0
- careamics/lightning/predict_data_module.py +341 -0
- careamics/lightning/train_data_module.py +666 -0
- careamics/losses/__init__.py +21 -0
- careamics/losses/fcn/__init__.py +1 -0
- careamics/losses/fcn/losses.py +125 -0
- careamics/losses/loss_factory.py +80 -0
- careamics/losses/lvae/__init__.py +1 -0
- careamics/losses/lvae/loss_utils.py +83 -0
- careamics/losses/lvae/losses.py +589 -0
- careamics/lvae_training/__init__.py +0 -0
- careamics/lvae_training/calibration.py +191 -0
- careamics/lvae_training/dataset/__init__.py +20 -0
- careamics/lvae_training/dataset/config.py +135 -0
- careamics/lvae_training/dataset/lc_dataset.py +274 -0
- careamics/lvae_training/dataset/ms_dataset_ref.py +1067 -0
- careamics/lvae_training/dataset/multich_dataset.py +1121 -0
- careamics/lvae_training/dataset/multicrop_dset.py +196 -0
- careamics/lvae_training/dataset/multifile_dataset.py +335 -0
- careamics/lvae_training/dataset/types.py +32 -0
- careamics/lvae_training/dataset/utils/__init__.py +0 -0
- careamics/lvae_training/dataset/utils/data_utils.py +114 -0
- careamics/lvae_training/dataset/utils/empty_patch_fetcher.py +65 -0
- careamics/lvae_training/dataset/utils/index_manager.py +491 -0
- careamics/lvae_training/dataset/utils/index_switcher.py +165 -0
- careamics/lvae_training/eval_utils.py +987 -0
- careamics/lvae_training/get_config.py +84 -0
- careamics/lvae_training/lightning_module.py +701 -0
- careamics/lvae_training/metrics.py +214 -0
- careamics/lvae_training/train_lvae.py +342 -0
- careamics/lvae_training/train_utils.py +121 -0
- careamics/model_io/__init__.py +7 -0
- careamics/model_io/bioimage/__init__.py +11 -0
- careamics/model_io/bioimage/_readme_factory.py +113 -0
- careamics/model_io/bioimage/bioimage_utils.py +56 -0
- careamics/model_io/bioimage/cover_factory.py +171 -0
- careamics/model_io/bioimage/model_description.py +341 -0
- careamics/model_io/bmz_io.py +251 -0
- careamics/model_io/model_io_utils.py +95 -0
- careamics/models/__init__.py +5 -0
- careamics/models/activation.py +40 -0
- careamics/models/layers.py +495 -0
- careamics/models/lvae/__init__.py +3 -0
- careamics/models/lvae/layers.py +1371 -0
- careamics/models/lvae/likelihoods.py +394 -0
- careamics/models/lvae/lvae.py +848 -0
- careamics/models/lvae/noise_models.py +738 -0
- careamics/models/lvae/stochastic.py +394 -0
- careamics/models/lvae/utils.py +404 -0
- careamics/models/model_factory.py +54 -0
- careamics/models/unet.py +449 -0
- careamics/nm_training_placeholder.py +203 -0
- careamics/prediction_utils/__init__.py +21 -0
- careamics/prediction_utils/lvae_prediction.py +158 -0
- careamics/prediction_utils/lvae_tiling_manager.py +362 -0
- careamics/prediction_utils/prediction_outputs.py +238 -0
- careamics/prediction_utils/stitch_prediction.py +193 -0
- careamics/py.typed +5 -0
- careamics/transforms/__init__.py +22 -0
- careamics/transforms/compose.py +173 -0
- careamics/transforms/n2v_manipulate.py +150 -0
- careamics/transforms/n2v_manipulate_torch.py +149 -0
- careamics/transforms/normalize.py +374 -0
- careamics/transforms/pixel_manipulation.py +406 -0
- careamics/transforms/pixel_manipulation_torch.py +388 -0
- careamics/transforms/struct_mask_parameters.py +20 -0
- careamics/transforms/transform.py +24 -0
- careamics/transforms/tta.py +88 -0
- careamics/transforms/xy_flip.py +131 -0
- careamics/transforms/xy_random_rotate90.py +108 -0
- careamics/utils/__init__.py +19 -0
- careamics/utils/autocorrelation.py +40 -0
- careamics/utils/base_enum.py +60 -0
- careamics/utils/context.py +67 -0
- careamics/utils/deprecation.py +63 -0
- careamics/utils/lightning_utils.py +71 -0
- careamics/utils/logging.py +323 -0
- careamics/utils/metrics.py +394 -0
- careamics/utils/path_utils.py +26 -0
- careamics/utils/plotting.py +76 -0
- careamics/utils/ram.py +15 -0
- careamics/utils/receptive_field.py +108 -0
- careamics/utils/serializers.py +62 -0
- careamics/utils/torch_utils.py +150 -0
- careamics/utils/version.py +38 -0
- careamics-0.0.19.dist-info/METADATA +80 -0
- careamics-0.0.19.dist-info/RECORD +279 -0
- careamics-0.0.19.dist-info/WHEEL +4 -0
- careamics-0.0.19.dist-info/entry_points.txt +2 -0
- careamics-0.0.19.dist-info/licenses/LICENSE +28 -0
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"""Utility functions for paths."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Union
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def check_path_exists(path: Union[str, Path]) -> Path:
|
|
8
|
+
"""Check if a path exists. If not, raise an error.
|
|
9
|
+
|
|
10
|
+
Note that it returns `path` as a Path object.
|
|
11
|
+
|
|
12
|
+
Parameters
|
|
13
|
+
----------
|
|
14
|
+
path : Union[str, Path]
|
|
15
|
+
Path to check.
|
|
16
|
+
|
|
17
|
+
Returns
|
|
18
|
+
-------
|
|
19
|
+
Path
|
|
20
|
+
Path as a Path object.
|
|
21
|
+
"""
|
|
22
|
+
path = Path(path)
|
|
23
|
+
if not path.exists():
|
|
24
|
+
raise FileNotFoundError(f"Data path {path} is incorrect or does not exist.")
|
|
25
|
+
|
|
26
|
+
return path
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
"""Plotting utilities."""
|
|
2
|
+
|
|
3
|
+
import matplotlib.pyplot as plt
|
|
4
|
+
import numpy as np
|
|
5
|
+
import torch
|
|
6
|
+
from numpy.typing import NDArray
|
|
7
|
+
|
|
8
|
+
from careamics.models.lvae.noise_models import GaussianMixtureNoiseModel
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def plot_noise_model_probability_distribution(
|
|
12
|
+
noise_model: GaussianMixtureNoiseModel,
|
|
13
|
+
signalBinIndex: int,
|
|
14
|
+
histogram: NDArray,
|
|
15
|
+
channel: str | None = None,
|
|
16
|
+
number_of_bins: int = 100,
|
|
17
|
+
) -> None:
|
|
18
|
+
"""Plot probability distribution P(x|s) for a certain ground truth signal.
|
|
19
|
+
|
|
20
|
+
Predictions from both Histogram and GMM-based
|
|
21
|
+
Noise models are displayed for comparison.
|
|
22
|
+
|
|
23
|
+
Parameters
|
|
24
|
+
----------
|
|
25
|
+
noise_model : GaussianMixtureNoiseModel
|
|
26
|
+
Trained GaussianMixtureNoiseModel.
|
|
27
|
+
signalBinIndex : int
|
|
28
|
+
Index of signal bin. Values go from 0 to number of bins (`n_bin`).
|
|
29
|
+
histogram : NDArray
|
|
30
|
+
Histogram based noise model.
|
|
31
|
+
channel : Optional[str], optional
|
|
32
|
+
Channel name used for plotting. Default is None.
|
|
33
|
+
number_of_bins : int, optional
|
|
34
|
+
Number of bins in the resulting histogram. Default is 100.
|
|
35
|
+
"""
|
|
36
|
+
min_signal = noise_model.min_signal.item()
|
|
37
|
+
max_signal = noise_model.max_signal.item()
|
|
38
|
+
bin_size = (max_signal - min_signal) / number_of_bins
|
|
39
|
+
|
|
40
|
+
query_signal_normalized = signalBinIndex / number_of_bins
|
|
41
|
+
query_signal = query_signal_normalized * (max_signal - min_signal) + min_signal
|
|
42
|
+
query_signal += bin_size / 2
|
|
43
|
+
query_signal = torch.tensor(query_signal)
|
|
44
|
+
|
|
45
|
+
query_observations = torch.arange(min_signal, max_signal, bin_size)
|
|
46
|
+
query_observations += bin_size / 2
|
|
47
|
+
|
|
48
|
+
likelihoods = noise_model.likelihood(
|
|
49
|
+
observations=query_observations, signals=query_signal
|
|
50
|
+
).numpy()
|
|
51
|
+
|
|
52
|
+
plt.figure(figsize=(12, 5))
|
|
53
|
+
if channel:
|
|
54
|
+
plt.suptitle(f"Noise model for channel {channel}")
|
|
55
|
+
else:
|
|
56
|
+
plt.suptitle("Noise model")
|
|
57
|
+
|
|
58
|
+
plt.subplot(1, 2, 1)
|
|
59
|
+
plt.xlabel("Observation Bin")
|
|
60
|
+
plt.ylabel("Signal Bin")
|
|
61
|
+
plt.imshow(histogram**0.25, cmap="gray")
|
|
62
|
+
plt.axhline(y=signalBinIndex + 0.5, linewidth=5, color="blue", alpha=0.5)
|
|
63
|
+
|
|
64
|
+
plt.subplot(1, 2, 2)
|
|
65
|
+
plt.plot(
|
|
66
|
+
query_observations,
|
|
67
|
+
likelihoods,
|
|
68
|
+
label="GMM : " + " signal = " + str(np.round(query_signal, 2)),
|
|
69
|
+
marker=".",
|
|
70
|
+
color="red",
|
|
71
|
+
linewidth=2,
|
|
72
|
+
)
|
|
73
|
+
plt.xlabel("Observations (x) for signal s = " + str(query_signal))
|
|
74
|
+
plt.ylabel("Probability Density")
|
|
75
|
+
plt.title("Probability Distribution P(x|s) at signal =" + str(query_signal))
|
|
76
|
+
plt.legend()
|
careamics/utils/ram.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
"""Receptive field calculation for computing the tile overlap."""
|
|
2
|
+
|
|
3
|
+
# TODO better docstring and function names
|
|
4
|
+
# Adapted from: https://github.com/frgfm/torch-scan
|
|
5
|
+
|
|
6
|
+
# import math
|
|
7
|
+
# import warnings
|
|
8
|
+
# from typing import Tuple, Union
|
|
9
|
+
|
|
10
|
+
# from torch import Tensor, nn
|
|
11
|
+
# from torch.nn import Module
|
|
12
|
+
# from torch.nn.modules.batchnorm import _BatchNorm
|
|
13
|
+
# from torch.nn.modules.conv import _ConvNd, _ConvTransposeNd
|
|
14
|
+
# from torch.nn.modules.pooling import (
|
|
15
|
+
# _AdaptiveAvgPoolNd,
|
|
16
|
+
# _AdaptiveMaxPoolNd,
|
|
17
|
+
# _AvgPoolNd,
|
|
18
|
+
# _MaxPoolNd,
|
|
19
|
+
# )
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
# def module_rf(module: Module, inp: Tensor, out: Tensor) -> Tuple[float, float, float]:
|
|
23
|
+
# """Estimate the spatial receptive field of the module.
|
|
24
|
+
|
|
25
|
+
# Parameters
|
|
26
|
+
# ----------
|
|
27
|
+
# module : Module
|
|
28
|
+
# Module to estimate the receptive field.
|
|
29
|
+
# inp : Tensor
|
|
30
|
+
# Input tensor.
|
|
31
|
+
# out : Tensor
|
|
32
|
+
# Output tensor.
|
|
33
|
+
|
|
34
|
+
# Returns
|
|
35
|
+
# -------
|
|
36
|
+
# Tuple[float, float, float]
|
|
37
|
+
# Receptive field, effective stride and padding.
|
|
38
|
+
# """
|
|
39
|
+
# if isinstance(
|
|
40
|
+
# module,
|
|
41
|
+
# (
|
|
42
|
+
# nn.Identity,
|
|
43
|
+
# nn.Flatten,
|
|
44
|
+
# nn.ReLU,
|
|
45
|
+
# nn.ELU,
|
|
46
|
+
# nn.LeakyReLU,
|
|
47
|
+
# nn.ReLU6,
|
|
48
|
+
# nn.Tanh,
|
|
49
|
+
# nn.Sigmoid,
|
|
50
|
+
# _BatchNorm,
|
|
51
|
+
# nn.Dropout,
|
|
52
|
+
# nn.Linear,
|
|
53
|
+
# ),
|
|
54
|
+
# ):
|
|
55
|
+
# return 1.0, 1.0, 0.0
|
|
56
|
+
# elif isinstance(module, _ConvTransposeNd):
|
|
57
|
+
# return rf_convtransposend(module, inp, out)
|
|
58
|
+
# elif isinstance(module, (_ConvNd, _MaxPoolNd, _AvgPoolNd)):
|
|
59
|
+
# return rf_aggregnd(module, inp, out)
|
|
60
|
+
# elif isinstance(module, (_AdaptiveMaxPoolNd, _AdaptiveAvgPoolNd)):
|
|
61
|
+
# return rf_adaptive_poolnd(module, inp, out)
|
|
62
|
+
# else:
|
|
63
|
+
# warnings.warn(
|
|
64
|
+
# f"Module type not supported: {module.__class__.__name__}", stacklevel=1
|
|
65
|
+
# )
|
|
66
|
+
# return 1.0, 1.0, 0.0
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
# def rf_convtransposend(
|
|
70
|
+
# module: _ConvTransposeNd, _: Tensor, __: Tensor
|
|
71
|
+
# ) -> Tuple[float, float, float]:
|
|
72
|
+
# k = (
|
|
73
|
+
# module.kernel_size[0]
|
|
74
|
+
# if isinstance(module.kernel_size, tuple)
|
|
75
|
+
# else module.kernel_size
|
|
76
|
+
# )
|
|
77
|
+
# s = module.stride[0] if isinstance(module.stride, tuple) else module.stride
|
|
78
|
+
# return -k, 1.0 / s, 0.0
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
# def rf_aggregnd(
|
|
82
|
+
# module: Union[_ConvNd, _MaxPoolNd, _AvgPoolNd], _: Tensor, __: Tensor
|
|
83
|
+
# ) -> Tuple[float, float, float]:
|
|
84
|
+
# k = (
|
|
85
|
+
# module.kernel_size[0]
|
|
86
|
+
# if isinstance(module.kernel_size, tuple)
|
|
87
|
+
# else module.kernel_size
|
|
88
|
+
# )
|
|
89
|
+
# if hasattr(module, "dilation"):
|
|
90
|
+
# d = (
|
|
91
|
+
# module.dilation[0]
|
|
92
|
+
# if isinstance(module.dilation, tuple)
|
|
93
|
+
# else module.dilation
|
|
94
|
+
# )
|
|
95
|
+
# k = d * (k - 1) + 1
|
|
96
|
+
# s = module.stride[0] if isinstance(module.stride, tuple) else module.stride
|
|
97
|
+
# p = module.padding[0] if isinstance(module.padding, tuple) else module.padding
|
|
98
|
+
# return k, s, p # type: ignore[return-value]
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
# def rf_adaptive_poolnd(
|
|
102
|
+
# _: Union[_AdaptiveMaxPoolNd, _AdaptiveAvgPoolNd], inp: Tensor, out: Tensor
|
|
103
|
+
# ) -> Tuple[int, int, float]:
|
|
104
|
+
# stride = math.ceil(inp.shape[-1] / out.shape[-1])
|
|
105
|
+
# kernel_size = stride
|
|
106
|
+
# padding = (inp.shape[-1] - kernel_size * stride) / 2
|
|
107
|
+
|
|
108
|
+
# return kernel_size, stride, padding
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"""A script for serializers in the careamics package."""
|
|
2
|
+
|
|
3
|
+
import ast
|
|
4
|
+
import json
|
|
5
|
+
from typing import Union
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
8
|
+
import torch
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def _array_to_json(arr: Union[np.ndarray, torch.Tensor]) -> str:
|
|
12
|
+
"""Convert an array to a list and then to a JSON string.
|
|
13
|
+
|
|
14
|
+
Parameters
|
|
15
|
+
----------
|
|
16
|
+
arr : Union[np.ndarray, torch.Tensor]
|
|
17
|
+
Array to be serialized.
|
|
18
|
+
|
|
19
|
+
Returns
|
|
20
|
+
-------
|
|
21
|
+
str
|
|
22
|
+
JSON string representing the array.
|
|
23
|
+
"""
|
|
24
|
+
if isinstance(arr, str):
|
|
25
|
+
return arr
|
|
26
|
+
return json.dumps(arr.tolist())
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _to_numpy(lst: Union[str, list]) -> np.ndarray:
|
|
30
|
+
"""Deserialize a list or string representing a list into `np.ndarray`.
|
|
31
|
+
|
|
32
|
+
Parameters
|
|
33
|
+
----------
|
|
34
|
+
lst : list
|
|
35
|
+
List or string representing a list with the array content to be deserialized.
|
|
36
|
+
|
|
37
|
+
Returns
|
|
38
|
+
-------
|
|
39
|
+
np.ndarray
|
|
40
|
+
The deserialized array.
|
|
41
|
+
"""
|
|
42
|
+
if isinstance(lst, str):
|
|
43
|
+
lst = ast.literal_eval(lst)
|
|
44
|
+
return np.asarray(lst)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _to_torch(lst: Union[str, list]) -> torch.Tensor:
|
|
48
|
+
"""Deserialize list or string representing a list into `torch.Tensor`.
|
|
49
|
+
|
|
50
|
+
Parameters
|
|
51
|
+
----------
|
|
52
|
+
lst : Union[str, list]
|
|
53
|
+
List or string representing a list swith the array content to be deserialized.
|
|
54
|
+
|
|
55
|
+
Returns
|
|
56
|
+
-------
|
|
57
|
+
torch.Tensor
|
|
58
|
+
The deserialized tensor.
|
|
59
|
+
"""
|
|
60
|
+
if isinstance(lst, str):
|
|
61
|
+
lst = ast.literal_eval(lst)
|
|
62
|
+
return torch.tensor(lst)
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Convenience functions using torch.
|
|
3
|
+
|
|
4
|
+
These functions are used to control certain aspects and behaviours of PyTorch.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import inspect
|
|
8
|
+
import platform
|
|
9
|
+
from typing import Union
|
|
10
|
+
|
|
11
|
+
import torch
|
|
12
|
+
|
|
13
|
+
from careamics.config.support import SupportedOptimizer, SupportedScheduler
|
|
14
|
+
|
|
15
|
+
from ..utils.logging import get_logger
|
|
16
|
+
|
|
17
|
+
logger = get_logger(__name__) # TODO are logger still needed?
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def get_device() -> str:
|
|
21
|
+
"""
|
|
22
|
+
Get the device on which operations take place.
|
|
23
|
+
|
|
24
|
+
Returns
|
|
25
|
+
-------
|
|
26
|
+
str
|
|
27
|
+
The device on which operations take place, e.g. "cuda", "cpu" or "mps".
|
|
28
|
+
"""
|
|
29
|
+
if torch.cuda.is_available():
|
|
30
|
+
device = "cuda"
|
|
31
|
+
elif torch.backends.mps.is_available() and platform.processor() in (
|
|
32
|
+
"arm",
|
|
33
|
+
"arm64",
|
|
34
|
+
):
|
|
35
|
+
device = "mps"
|
|
36
|
+
else:
|
|
37
|
+
device = "cpu"
|
|
38
|
+
|
|
39
|
+
return device
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def filter_parameters(
|
|
43
|
+
func: type,
|
|
44
|
+
user_params: dict,
|
|
45
|
+
) -> dict:
|
|
46
|
+
"""
|
|
47
|
+
Filter parameters according to the function signature.
|
|
48
|
+
|
|
49
|
+
Parameters
|
|
50
|
+
----------
|
|
51
|
+
func : type
|
|
52
|
+
Class object.
|
|
53
|
+
user_params : dict
|
|
54
|
+
User provided parameters.
|
|
55
|
+
|
|
56
|
+
Returns
|
|
57
|
+
-------
|
|
58
|
+
dict
|
|
59
|
+
Parameters matching `func`'s signature.
|
|
60
|
+
"""
|
|
61
|
+
# Get the list of all default parameters
|
|
62
|
+
default_params = list(inspect.signature(func).parameters.keys())
|
|
63
|
+
|
|
64
|
+
# Filter matching parameters
|
|
65
|
+
params_to_be_used = set(user_params.keys()) & set(default_params)
|
|
66
|
+
|
|
67
|
+
return {key: user_params[key] for key in params_to_be_used}
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def get_optimizer(name: str) -> torch.optim.Optimizer:
|
|
71
|
+
"""
|
|
72
|
+
Return the optimizer class given its name.
|
|
73
|
+
|
|
74
|
+
Parameters
|
|
75
|
+
----------
|
|
76
|
+
name : str
|
|
77
|
+
Optimizer name.
|
|
78
|
+
|
|
79
|
+
Returns
|
|
80
|
+
-------
|
|
81
|
+
torch.nn.Optimizer
|
|
82
|
+
Optimizer class.
|
|
83
|
+
"""
|
|
84
|
+
if name not in SupportedOptimizer:
|
|
85
|
+
raise NotImplementedError(f"Optimizer {name} is not yet supported.")
|
|
86
|
+
|
|
87
|
+
return getattr(torch.optim, name)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def get_optimizers() -> dict[str, str]:
|
|
91
|
+
"""
|
|
92
|
+
Return the list of all optimizers available in torch.optim.
|
|
93
|
+
|
|
94
|
+
Returns
|
|
95
|
+
-------
|
|
96
|
+
dict
|
|
97
|
+
Optimizers available in torch.optim.
|
|
98
|
+
"""
|
|
99
|
+
optims = {}
|
|
100
|
+
for name, obj in inspect.getmembers(torch.optim):
|
|
101
|
+
if inspect.isclass(obj) and issubclass(obj, torch.optim.Optimizer):
|
|
102
|
+
if name != "Optimizer":
|
|
103
|
+
optims[name] = name
|
|
104
|
+
return optims
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def get_scheduler(
|
|
108
|
+
name: str,
|
|
109
|
+
) -> Union[
|
|
110
|
+
# torch.optim.lr_scheduler.LRScheduler,
|
|
111
|
+
torch.optim.lr_scheduler.ReduceLROnPlateau,
|
|
112
|
+
]:
|
|
113
|
+
"""
|
|
114
|
+
Return the scheduler class given its name.
|
|
115
|
+
|
|
116
|
+
Parameters
|
|
117
|
+
----------
|
|
118
|
+
name : str
|
|
119
|
+
Scheduler name.
|
|
120
|
+
|
|
121
|
+
Returns
|
|
122
|
+
-------
|
|
123
|
+
Union
|
|
124
|
+
Scheduler class.
|
|
125
|
+
"""
|
|
126
|
+
if name not in SupportedScheduler:
|
|
127
|
+
raise NotImplementedError(f"Scheduler {name} is not yet supported.")
|
|
128
|
+
|
|
129
|
+
return getattr(torch.optim.lr_scheduler, name)
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def get_schedulers() -> dict[str, str]:
|
|
133
|
+
"""
|
|
134
|
+
Return the list of all schedulers available in torch.optim.lr_scheduler.
|
|
135
|
+
|
|
136
|
+
Returns
|
|
137
|
+
-------
|
|
138
|
+
dict
|
|
139
|
+
Schedulers available in torch.optim.lr_scheduler.
|
|
140
|
+
"""
|
|
141
|
+
schedulers = {}
|
|
142
|
+
for name, obj in inspect.getmembers(torch.optim.lr_scheduler):
|
|
143
|
+
if inspect.isclass(obj) and issubclass(
|
|
144
|
+
obj, torch.optim.lr_scheduler.LRScheduler
|
|
145
|
+
):
|
|
146
|
+
if "LRScheduler" not in name:
|
|
147
|
+
schedulers[name] = name
|
|
148
|
+
elif name == "ReduceLROnPlateau": # somewhat not a subclass of LRScheduler
|
|
149
|
+
schedulers[name] = name
|
|
150
|
+
return schedulers
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""Version utility."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
|
|
5
|
+
from careamics import __version__
|
|
6
|
+
|
|
7
|
+
logger = logging.getLogger(__name__)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def get_careamics_version() -> str:
|
|
11
|
+
"""Get clean CAREamics version.
|
|
12
|
+
|
|
13
|
+
This method returns the latest `Major.Minor.Patch` version of CAREamics, removing
|
|
14
|
+
any local version identifier.
|
|
15
|
+
|
|
16
|
+
Returns
|
|
17
|
+
-------
|
|
18
|
+
str
|
|
19
|
+
Clean CAREamics version.
|
|
20
|
+
"""
|
|
21
|
+
parts = __version__.split(".")
|
|
22
|
+
|
|
23
|
+
# for local installs that do not detect the latest versions via tags
|
|
24
|
+
# (typically our CI will install `0.X.devX<hash>.<other hash>` versions)
|
|
25
|
+
if "dev" in parts[2]:
|
|
26
|
+
parts[2] = "*"
|
|
27
|
+
clean_version = ".".join(parts[:3])
|
|
28
|
+
|
|
29
|
+
logger.warning(
|
|
30
|
+
f"Your CAREamics version seems to be a locally modified version "
|
|
31
|
+
f"({__version__}). The recorded version for loading models will be "
|
|
32
|
+
f"{clean_version}, which may not exist. If you want to ensure "
|
|
33
|
+
f"exporting the model with an existing version, please install the "
|
|
34
|
+
f"closest CAREamics version from PyPI or conda-forge."
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
# Remove any local version identifier
|
|
38
|
+
return ".".join(parts[:3])
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: careamics
|
|
3
|
+
Version: 0.0.19
|
|
4
|
+
Summary: Toolbox for running N2V and friends.
|
|
5
|
+
Project-URL: homepage, https://careamics.github.io/
|
|
6
|
+
Project-URL: repository, https://github.com/CAREamics/careamics
|
|
7
|
+
Author-email: CAREamics team <rse@fht.org>, Ashesh <ashesh.ashesh@fht.org>, Federico Carrara <federico.carrara@fht.org>, Melisande Croft <melisande.croft@fht.org>, Joran Deschamps <joran.deschamps@fht.org>, Vera Galinova <vera.galinova@fht.org>, Igor Zubarev <igor.zubarev@fht.org>
|
|
8
|
+
License: BSD-3-Clause
|
|
9
|
+
License-File: LICENSE
|
|
10
|
+
Classifier: Development Status :: 3 - Alpha
|
|
11
|
+
Classifier: License :: OSI Approved :: BSD License
|
|
12
|
+
Classifier: Programming Language :: Python :: 3
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
16
|
+
Classifier: Typing :: Typed
|
|
17
|
+
Requires-Python: >=3.11
|
|
18
|
+
Requires-Dist: bioimageio-core>=0.9.0
|
|
19
|
+
Requires-Dist: matplotlib<=3.10.8
|
|
20
|
+
Requires-Dist: numpy>=1.21
|
|
21
|
+
Requires-Dist: numpy>=2.1.0; python_version >= '3.13'
|
|
22
|
+
Requires-Dist: pillow<=12.1.0
|
|
23
|
+
Requires-Dist: psutil<=7.2.1
|
|
24
|
+
Requires-Dist: pydantic<=2.12.5,>=2.11
|
|
25
|
+
Requires-Dist: pytorch-lightning<=2.6.0,>=2.2
|
|
26
|
+
Requires-Dist: pyyaml!=6.0.0,<=6.0.3
|
|
27
|
+
Requires-Dist: scikit-image<=0.26.0
|
|
28
|
+
Requires-Dist: tifffile<=2025.12.20
|
|
29
|
+
Requires-Dist: torch<=2.9.1,>=2.0
|
|
30
|
+
Requires-Dist: torchmetrics<1.5.0,>=0.11.0
|
|
31
|
+
Requires-Dist: torchvision<=0.24.1
|
|
32
|
+
Requires-Dist: typer<=0.21.1,>=0.12.3
|
|
33
|
+
Requires-Dist: validators<=0.35.0
|
|
34
|
+
Requires-Dist: zarr<4.0.0,>=3.0.0
|
|
35
|
+
Provides-Extra: czi
|
|
36
|
+
Requires-Dist: pylibczirw<6.0.0,>=4.1.2; extra == 'czi'
|
|
37
|
+
Provides-Extra: examples
|
|
38
|
+
Requires-Dist: careamics-portfolio; extra == 'examples'
|
|
39
|
+
Requires-Dist: jupyter; extra == 'examples'
|
|
40
|
+
Provides-Extra: tensorboard
|
|
41
|
+
Requires-Dist: protobuf==5.29.1; extra == 'tensorboard'
|
|
42
|
+
Requires-Dist: tensorboard; extra == 'tensorboard'
|
|
43
|
+
Provides-Extra: wandb
|
|
44
|
+
Requires-Dist: wandb; extra == 'wandb'
|
|
45
|
+
Description-Content-Type: text/markdown
|
|
46
|
+
|
|
47
|
+
<p align="center">
|
|
48
|
+
<a href="https://careamics.github.io/">
|
|
49
|
+
<img src="https://raw.githubusercontent.com/CAREamics/.github/main/profile/images/banner_careamics.png">
|
|
50
|
+
</a>
|
|
51
|
+
</p>
|
|
52
|
+
|
|
53
|
+
# CAREamics
|
|
54
|
+
|
|
55
|
+
[](https://github.com/CAREamics/careamics/blob/main/LICENSE)
|
|
56
|
+
[](https://pypi.org/project/careamics)
|
|
57
|
+
[](https://python.org)
|
|
58
|
+
[](https://github.com/CAREamics/careamics/actions/workflows/ci.yml)
|
|
59
|
+
[](https://codecov.io/gh/CAREamics/careamics)
|
|
60
|
+
[](https://forum.image.sc/)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
CAREamics is a PyTorch library aimed at simplifying the use of Noise2Void and its many
|
|
64
|
+
variants and cousins (CARE, Noise2Noise, N2V2, P(P)N2V, HDN, muSplit etc.).
|
|
65
|
+
|
|
66
|
+
## Why CAREamics?
|
|
67
|
+
|
|
68
|
+
Noise2Void is a widely used denoising algorithm, and is readily available from the `n2v`
|
|
69
|
+
python package. However, `n2v` is based on TensorFlow, while more recent methods
|
|
70
|
+
denoising methods (PPN2V, DivNoising, HDN) are all implemented in PyTorch, but are
|
|
71
|
+
lacking the extra features that would make them usable by the community.
|
|
72
|
+
|
|
73
|
+
The aim of CAREamics is to provide a PyTorch library reuniting all the latest methods
|
|
74
|
+
in one package, while providing a simple and consistent API. The library relies on
|
|
75
|
+
PyTorch Lightning as a back-end. In addition, we will provide extensive documentation and
|
|
76
|
+
tutorials on how to best apply these methods in a scientific context.
|
|
77
|
+
|
|
78
|
+
## Installation and use
|
|
79
|
+
|
|
80
|
+
Check out the [documentation](https://careamics.github.io/) for installation instructions and guides!
|