careamics 0.1.0rc3__tar.gz → 0.1.0rc5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of careamics might be problematic. Click here for more details.

Files changed (187) hide show
  1. careamics-0.1.0rc5/.github/pull_request_template.md +39 -0
  2. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/.pre-commit-config.yaml +7 -7
  3. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/PKG-INFO +16 -61
  4. careamics-0.1.0rc5/README.md +34 -0
  5. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/pyproject.toml +0 -1
  6. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/__init__.py +8 -6
  7. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/careamist.py +30 -29
  8. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/__init__.py +12 -9
  9. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/algorithm_model.py +5 -5
  10. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/architectures/unet_model.py +1 -0
  11. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/callback_model.py +1 -0
  12. careamics-0.1.0rc5/src/careamics/config/configuration_example.py +87 -0
  13. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/configuration_factory.py +285 -78
  14. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/configuration_model.py +22 -23
  15. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/data_model.py +62 -160
  16. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/inference_model.py +20 -21
  17. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/references/algorithm_descriptions.py +1 -0
  18. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/references/references.py +1 -0
  19. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/support/supported_extraction_strategies.py +1 -0
  20. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/support/supported_optimizers.py +3 -3
  21. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/training_model.py +2 -1
  22. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/transformations/n2v_manipulate_model.py +2 -1
  23. careamics-0.1.0rc5/src/careamics/config/transformations/nd_flip_model.py +27 -0
  24. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/transformations/normalize_model.py +2 -1
  25. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/transformations/transform_model.py +1 -0
  26. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/transformations/xy_random_rotate90_model.py +7 -9
  27. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/validators/validator_utils.py +1 -0
  28. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/conftest.py +1 -0
  29. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/dataset/dataset_utils/__init__.py +0 -1
  30. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/dataset/dataset_utils/dataset_utils.py +1 -0
  31. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/dataset/in_memory_dataset.py +17 -48
  32. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/dataset/iterable_dataset.py +16 -71
  33. careamics-0.1.0rc5/src/careamics/dataset/patching/__init__.py +1 -0
  34. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/dataset/patching/patching.py +1 -0
  35. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/dataset/patching/sequential_patching.py +6 -6
  36. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/dataset/patching/tiled_patching.py +10 -6
  37. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/lightning_datamodule.py +123 -49
  38. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/lightning_module.py +7 -7
  39. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/lightning_prediction_datamodule.py +59 -48
  40. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/losses/__init__.py +0 -1
  41. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/losses/loss_factory.py +1 -0
  42. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/model_io/__init__.py +0 -1
  43. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/model_io/bioimage/_readme_factory.py +2 -1
  44. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/model_io/bioimage/bioimage_utils.py +1 -0
  45. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/model_io/bioimage/model_description.py +4 -3
  46. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/model_io/bmz_io.py +8 -7
  47. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/model_io/model_io_utils.py +4 -4
  48. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/models/layers.py +1 -0
  49. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/models/model_factory.py +1 -0
  50. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/models/unet.py +91 -17
  51. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/prediction/stitch_prediction.py +1 -0
  52. careamics-0.1.0rc5/src/careamics/transforms/__init__.py +20 -0
  53. careamics-0.1.0rc5/src/careamics/transforms/compose.py +98 -0
  54. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/transforms/n2v_manipulate.py +18 -23
  55. careamics-0.1.0rc5/src/careamics/transforms/nd_flip.py +67 -0
  56. careamics-0.1.0rc5/src/careamics/transforms/normalize.py +120 -0
  57. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/transforms/pixel_manipulation.py +2 -2
  58. careamics-0.1.0rc5/src/careamics/transforms/transform.py +33 -0
  59. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/transforms/tta.py +2 -2
  60. careamics-0.1.0rc5/src/careamics/transforms/xy_random_rotate90.py +68 -0
  61. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/utils/__init__.py +0 -1
  62. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/utils/context.py +1 -0
  63. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/utils/logging.py +1 -0
  64. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/utils/metrics.py +1 -0
  65. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/utils/torch_utils.py +1 -0
  66. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/test_algorithm_model.py +8 -8
  67. careamics-0.1.0rc5/tests/config/test_configuration_factory.py +596 -0
  68. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/test_data_model.py +28 -75
  69. careamics-0.1.0rc5/tests/config/test_full_config_example.py +6 -0
  70. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/test_inference_model.py +19 -31
  71. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/test_optimizers_model.py +6 -6
  72. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/test_training_model.py +2 -2
  73. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/dataset/patching/test_sequential_patching.py +17 -0
  74. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/dataset/test_in_memory_dataset.py +4 -4
  75. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/dataset/test_iterable_dataset.py +4 -4
  76. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/models/test_unet.py +40 -0
  77. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/test_careamist.py +5 -1
  78. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/test_conftest.py +8 -8
  79. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/test_lightning_datamodule.py +38 -61
  80. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/test_lightning_module.py +62 -24
  81. careamics-0.1.0rc5/tests/test_lightning_prediction_datamodule.py +80 -0
  82. careamics-0.1.0rc5/tests/transforms/test_compose.py +88 -0
  83. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/transforms/test_manipulate_n2v.py +5 -9
  84. careamics-0.1.0rc5/tests/transforms/test_nd_flip.py +60 -0
  85. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/transforms/test_normalize.py +3 -3
  86. careamics-0.1.0rc5/tests/transforms/test_xy_random_rotate90.py +73 -0
  87. careamics-0.1.0rc3/.github/PR_TEMPLATE/pull_request.md +0 -22
  88. careamics-0.1.0rc3/README.md +0 -78
  89. careamics-0.1.0rc3/src/careamics/config/transformations/nd_flip_model.py +0 -32
  90. careamics-0.1.0rc3/src/careamics/dataset/patching/__init__.py +0 -8
  91. careamics-0.1.0rc3/src/careamics/dataset/patching/patch_transform.py +0 -44
  92. careamics-0.1.0rc3/src/careamics/transforms/__init__.py +0 -41
  93. careamics-0.1.0rc3/src/careamics/transforms/nd_flip.py +0 -93
  94. careamics-0.1.0rc3/src/careamics/transforms/normalize.py +0 -109
  95. careamics-0.1.0rc3/src/careamics/transforms/xy_random_rotate90.py +0 -95
  96. careamics-0.1.0rc3/tests/config/test_configuration_factory.py +0 -222
  97. careamics-0.1.0rc3/tests/transforms/test_nd_flip.py +0 -129
  98. careamics-0.1.0rc3/tests/transforms/test_xy_random_rotate90.py +0 -117
  99. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
  100. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
  101. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/.github/workflows/ci.yml +0 -0
  102. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/.gitignore +0 -0
  103. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/LICENSE +0 -0
  104. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/examples/2D/n2n/example_SEM_careamist.ipynb +0 -0
  105. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/examples/2D/n2n/n2n_2D_SEM.yml +0 -0
  106. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/examples/2D/n2v/example_BSD68_careamist.ipynb +0 -0
  107. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/examples/2D/n2v/example_BSD68_lightning.ipynb +0 -0
  108. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/examples/2D/n2v/example_SEM_lightning.ipynb +0 -0
  109. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/examples/2D/n2v/n2v_2D_BSD.yml +0 -0
  110. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/examples/2D/pn2v/pN2V_Convallaria.yml +0 -0
  111. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/examples/3D/example_flywing_3D.ipynb +0 -0
  112. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/examples/3D/n2v_flywing_3D.yml +0 -0
  113. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/callbacks/__init__.py +0 -0
  114. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/callbacks/hyperparameters_callback.py +0 -0
  115. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/callbacks/progress_bar_callback.py +0 -0
  116. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/architectures/__init__.py +0 -0
  117. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/architectures/architecture_model.py +0 -0
  118. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/architectures/custom_model.py +0 -0
  119. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/architectures/register_model.py +0 -0
  120. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/architectures/vae_model.py +0 -0
  121. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/noise_models.py +0 -0
  122. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/optimizer_models.py +0 -0
  123. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/references/__init__.py +0 -0
  124. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/support/__init__.py +0 -0
  125. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/support/supported_activations.py +0 -0
  126. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/support/supported_algorithms.py +0 -0
  127. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/support/supported_architectures.py +0 -0
  128. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/support/supported_data.py +0 -0
  129. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/support/supported_loggers.py +0 -0
  130. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/support/supported_losses.py +0 -0
  131. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/support/supported_pixel_manipulations.py +0 -0
  132. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/support/supported_struct_axis.py +0 -0
  133. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/support/supported_transforms.py +0 -0
  134. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/tile_information.py +0 -0
  135. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/transformations/__init__.py +0 -0
  136. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/config/validators/__init__.py +0 -0
  137. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/dataset/__init__.py +0 -0
  138. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/dataset/dataset_utils/file_utils.py +0 -0
  139. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/dataset/dataset_utils/read_tiff.py +0 -0
  140. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/dataset/dataset_utils/read_utils.py +0 -0
  141. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/dataset/dataset_utils/read_zarr.py +0 -0
  142. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/dataset/patching/random_patching.py +0 -0
  143. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/dataset/patching/validate_patch_dimension.py +0 -0
  144. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/dataset/zarr_dataset.py +0 -0
  145. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/lightning_prediction_loop.py +0 -0
  146. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/losses/losses.py +0 -0
  147. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/losses/noise_model_factory.py +0 -0
  148. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/losses/noise_models.py +0 -0
  149. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/model_io/bioimage/__init__.py +0 -0
  150. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/models/__init__.py +0 -0
  151. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/models/activation.py +0 -0
  152. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/prediction/__init__.py +0 -0
  153. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/py.typed +0 -0
  154. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/transforms/struct_mask_parameters.py +0 -0
  155. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/utils/base_enum.py +0 -0
  156. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/utils/path_utils.py +0 -0
  157. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/utils/ram.py +0 -0
  158. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/utils/receptive_field.py +0 -0
  159. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/src/careamics/utils/running_stats.py +0 -0
  160. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/architectures/test_architecture_model.py +0 -0
  161. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/architectures/test_custom_model.py +0 -0
  162. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/architectures/test_register_model.py +0 -0
  163. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/architectures/test_unet_model.py +0 -0
  164. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/support/test_supported_data.py +0 -0
  165. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/support/test_supported_optimizers.py +0 -0
  166. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/test_configuration_model.py +0 -0
  167. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/test_tile_information.py +0 -0
  168. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/transformations/test_n2v_manipulate_model.py +0 -0
  169. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/transformations/test_normalize_model.py +0 -0
  170. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/config/validators/test_validator_utils.py +0 -0
  171. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/conftest.py +0 -0
  172. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/dataset/dataset_utils/test_list_files.py +0 -0
  173. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/dataset/dataset_utils/test_read_tiff.py +0 -0
  174. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/dataset/patching/test_patching_utils.py +0 -0
  175. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/dataset/patching/test_random_patching.py +0 -0
  176. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/dataset/patching/test_tiled_patching.py +0 -0
  177. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/model_io/test_bmz_io.py +0 -0
  178. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/models/test_model_factory.py +0 -0
  179. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/prediction/test_stitch_prediction.py +0 -0
  180. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/transforms/test_pixel_manipulation.py +0 -0
  181. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/transforms/test_supported_transforms.py +0 -0
  182. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/utils/test_base_enum.py +0 -0
  183. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/utils/test_context.py +0 -0
  184. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/utils/test_logging.py +0 -0
  185. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/utils/test_metrics.py +0 -0
  186. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/utils/test_torch_utils.py +0 -0
  187. {careamics-0.1.0rc3 → careamics-0.1.0rc5}/tests/utils/test_wandb.py +0 -0
@@ -0,0 +1,39 @@
1
+ ### Description
2
+
3
+ Please provide a brief description of the changes in this PR. Include any relevant context or background information.
4
+
5
+ - **What**: Clearly and concisely describe what changes you have made.
6
+ - **Why**: Explain the reasoning behind these changes. What problem are you solving? Why is this change necessary?
7
+ - **How**: Describe how you implemented these changes. Provide an overview of the approach and any important implementation details.
8
+
9
+ ### Changes Made
10
+
11
+ - **Added**: List new features or files added.
12
+ - **Modified**: Describe existing features or files modified.
13
+ - **Removed**: Detail features or files that were removed.
14
+
15
+ ### Related Issues
16
+
17
+ Link to any related issues or discussions. Use keywords like "Fixes", "Resolves", or "Closes" to link to issues automatically.
18
+
19
+ - Fixes #
20
+ - Resolves #
21
+ - Closes #
22
+
23
+ ### Breaking changes
24
+
25
+ Describe any breaking change.
26
+
27
+
28
+ ### Additional Notes and Examples
29
+
30
+ Include any additional notes or context that reviewers should be aware of, including snippets of code illustrating your new feature.
31
+
32
+ ---
33
+
34
+ **Please ensure your PR meets the following requirements:**
35
+
36
+ - [ ] Code builds and passes tests locally, including doctests
37
+ - [ ] New tests have been added (for bug fixes/features)
38
+ - [ ] Pre-commit passes
39
+ - [ ] PR to the documentation exists (for bug fixes / features)
@@ -9,23 +9,23 @@ ci:
9
9
 
10
10
  repos:
11
11
  - repo: https://github.com/abravalheri/validate-pyproject
12
- rev: v0.14
12
+ rev: v0.16
13
13
  hooks:
14
14
  - id: validate-pyproject
15
15
 
16
16
  - repo: https://github.com/astral-sh/ruff-pre-commit
17
- rev: v0.0.292
17
+ rev: v0.4.3
18
18
  hooks:
19
19
  - id: ruff
20
20
  args: [--fix, --target-version, py38]
21
21
 
22
22
  - repo: https://github.com/psf/black
23
- rev: 23.9.1
23
+ rev: 24.4.2
24
24
  hooks:
25
25
  - id: black
26
26
 
27
27
  - repo: https://github.com/pre-commit/mirrors-mypy
28
- rev: v1.5.1
28
+ rev: v1.10.0
29
29
  hooks:
30
30
  - id: mypy
31
31
  files: "^src/"
@@ -36,13 +36,13 @@ repos:
36
36
 
37
37
  # check docstrings
38
38
  - repo: https://github.com/numpy/numpydoc
39
- rev: v1.6.0
39
+ rev: v1.7.0
40
40
  hooks:
41
41
  - id: numpydoc-validation
42
42
 
43
43
  # jupyter linting and formatting
44
44
  - repo: https://github.com/nbQA-dev/nbQA
45
- rev: 1.7.0
45
+ rev: 1.8.5
46
46
  hooks:
47
47
  - id: nbqa-ruff
48
48
  args: [--fix]
@@ -51,6 +51,6 @@ repos:
51
51
 
52
52
  # strip out jupyter notebooks
53
53
  - repo: https://github.com/kynan/nbstripout
54
- rev: 0.6.1
54
+ rev: 0.7.1
55
55
  hooks:
56
56
  - id: nbstripout
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: careamics
3
- Version: 0.1.0rc3
3
+ Version: 0.1.0rc5
4
4
  Summary: Toolbox for running N2V and friends.
5
5
  Project-URL: homepage, https://careamics.github.io/
6
6
  Project-URL: repository, https://github.com/CAREamics/careamics
@@ -16,7 +16,6 @@ Classifier: Programming Language :: Python :: 3.10
16
16
  Classifier: Programming Language :: Python :: 3.11
17
17
  Classifier: Typing :: Typed
18
18
  Requires-Python: >=3.8
19
- Requires-Dist: albumentations
20
19
  Requires-Dist: bioimageio-core>=0.6.0
21
20
  Requires-Dist: psutil
22
21
  Requires-Dist: pydantic>=2.5
@@ -48,7 +47,7 @@ Description-Content-Type: text/markdown
48
47
  </a>
49
48
  </p>
50
49
 
51
- # CAREamics Restoration
50
+ # CAREamics
52
51
 
53
52
  [![License](https://img.shields.io/pypi/l/careamics.svg?color=green)](https://github.com/CAREamics/careamics/blob/main/LICENSE)
54
53
  [![PyPI](https://img.shields.io/pypi/v/careamics.svg?color=green)](https://pypi.org/project/careamics)
@@ -56,67 +55,23 @@ Description-Content-Type: text/markdown
56
55
  [![CI](https://github.com/CAREamics/careamics/actions/workflows/ci.yml/badge.svg)](https://github.com/CAREamics/careamics/actions/workflows/ci.yml)
57
56
  [![codecov](https://codecov.io/gh/CAREamics/careamics/branch/main/graph/badge.svg)](https://codecov.io/gh/CAREamics/careamics)
58
57
 
59
- ## Installation
60
58
 
61
- ``` bash
62
- pip install careamics
63
- ```
64
- For more details on the options please follow the installation [guide](https://careamics.github.io/careamics/).
59
+ CAREamics is a PyTorch library aimed at simplifying the use of Noise2Void and its many
60
+ variants and cousins (CARE, Noise2Noise, N2V2, P(P)N2V, HDN, muSplit etc.).
65
61
 
66
- ## Usage
62
+ ## Why CAREamics?
67
63
 
68
- CAREamics uses the Engine object to construct the pipeline for both training and prediction. First we import the Engine.
69
- ```python
70
- from careamics_restoration.engine import Engine
71
- ```
72
- The Engine could be initialized in 2 ways:
73
- 1. Using the [yaml config](examples/n2v_2D_reference.yml) file
64
+ Noise2Void is a widely used denoising algorithm, and is readily available from the `n2v`
65
+ python package. However, n2v is based on TensorFlow and Keras and we found it
66
+ increasingly hard to maintain. In addition, more recent methods (PPN2V, DivNoising,
67
+ HDN) are all implemented in PyTorch, but are lacking the extra features that would make
68
+ them usable by the community.
74
69
 
75
- Specify the mandatory parameters in the config file
76
- ```yaml
77
- experiment_name: Name of the experiment
78
- working_directory: Path to the working directory, where all the outputs will be stored
70
+ The aim of CAREamics is to provide a PyTorch library reuniting all the latest methods
71
+ in one package, while providing a simple and consistent API. The library relies on
72
+ PyTorch Lightning as a back-end. In addition, we will provide extensive documentation and
73
+ tutorials on how to best apply these methods in a scientific context.
79
74
 
80
- algorithm:
81
- loss: type of loss function, e.g. n2v for Noise2Void
82
- model: model architecture, e.g. UNet
83
- is_3D: True if 3D data, False if 2D data
84
-
85
- training:
86
- num_epochs: Number of training epochs
87
- patch_size: Size of the patches, List of 2 or 3 elements
88
- batch_size: Batch size for training
89
-
90
- extraction_strategy: Controls how the patches are extracted from the data
91
-
92
- data:
93
- data_format: File extension, e.g. tif
94
- axes: Defines the shape of the input data
95
- ```
96
- Full description of the configuration parameters is in the [documentation](https://careamics.github.io/careamics/).
97
-
98
-
99
- ```python
100
- engine = Engine(config_path="config.yml")
101
-
102
- ```
103
- 2. Using the path to the pretrained model
104
- It's also possible to initialize the Engine using the model checkpoint, saved during the training or downloaded from the [BioImage Model Zoo](https://bioimage.io/#/).
105
- Checkpoint must contain model_state_dict.
106
- Read more abount saving and loading models in the [documentation](https://careamics.github.io/careamics/).
107
-
108
- Once Engine is initialized, we can start training, providing the relative paths to train and validation data
109
-
110
- ```python
111
- engine.train(train_path=train_path, val_path=val_path)
112
- ```
113
- Training will run for the specified number of epochs and save the model checkpoint in the working directory.
114
-
115
- Prediction could be done directly after the training or by loading the pretrained model checkpoint.
116
-
117
- ```python
118
- predictions = engine.predict(pred_path=predict_path)
119
- ```
120
-
121
- For more examples please take a look at the [notebooks](examples).
75
+ ## Installation and use
122
76
 
77
+ Check out the [documentation](https://careamics.github.io/) for installation instructions and guides!
@@ -0,0 +1,34 @@
1
+ <p align="center">
2
+ <a href="https://careamics.github.io/">
3
+ <img src="https://raw.githubusercontent.com/CAREamics/.github/main/profile/images/banner_careamics.png">
4
+ </a>
5
+ </p>
6
+
7
+ # CAREamics
8
+
9
+ [![License](https://img.shields.io/pypi/l/careamics.svg?color=green)](https://github.com/CAREamics/careamics/blob/main/LICENSE)
10
+ [![PyPI](https://img.shields.io/pypi/v/careamics.svg?color=green)](https://pypi.org/project/careamics)
11
+ [![Python Version](https://img.shields.io/pypi/pyversions/careamics.svg?color=green)](https://python.org)
12
+ [![CI](https://github.com/CAREamics/careamics/actions/workflows/ci.yml/badge.svg)](https://github.com/CAREamics/careamics/actions/workflows/ci.yml)
13
+ [![codecov](https://codecov.io/gh/CAREamics/careamics/branch/main/graph/badge.svg)](https://codecov.io/gh/CAREamics/careamics)
14
+
15
+
16
+ CAREamics is a PyTorch library aimed at simplifying the use of Noise2Void and its many
17
+ variants and cousins (CARE, Noise2Noise, N2V2, P(P)N2V, HDN, muSplit etc.).
18
+
19
+ ## Why CAREamics?
20
+
21
+ Noise2Void is a widely used denoising algorithm, and is readily available from the `n2v`
22
+ python package. However, n2v is based on TensorFlow and Keras and we found it
23
+ increasingly hard to maintain. In addition, more recent methods (PPN2V, DivNoising,
24
+ HDN) are all implemented in PyTorch, but are lacking the extra features that would make
25
+ them usable by the community.
26
+
27
+ The aim of CAREamics is to provide a PyTorch library reuniting all the latest methods
28
+ in one package, while providing a simple and consistent API. The library relies on
29
+ PyTorch Lightning as a back-end. In addition, we will provide extensive documentation and
30
+ tutorials on how to best apply these methods in a scientific context.
31
+
32
+ ## Installation and use
33
+
34
+ Check out the [documentation](https://careamics.github.io/) for installation instructions and guides!
@@ -38,7 +38,6 @@ classifiers = [
38
38
  ]
39
39
  dependencies = [
40
40
  'torch>=2.0.0',
41
- 'albumentations',
42
41
  'bioimageio.core>=0.6.0',
43
42
  'tifffile',
44
43
  'psutil',
@@ -9,16 +9,18 @@ except PackageNotFoundError:
9
9
 
10
10
  __all__ = [
11
11
  "CAREamist",
12
- "CAREamicsModule",
12
+ "CAREamicsModuleWrapper",
13
+ "CAREamicsPredictData",
14
+ "CAREamicsTrainData",
13
15
  "Configuration",
14
16
  "load_configuration",
15
17
  "save_configuration",
16
- "CAREamicsTrainDataModule",
17
- "CAREamicsPredictDataModule",
18
+ "TrainingDataWrapper",
19
+ "PredictDataWrapper",
18
20
  ]
19
21
 
20
22
  from .careamist import CAREamist
21
23
  from .config import Configuration, load_configuration, save_configuration
22
- from .lightning_datamodule import CAREamicsTrainDataModule
23
- from .lightning_module import CAREamicsModule
24
- from .lightning_prediction_datamodule import CAREamicsPredictDataModule
24
+ from .lightning_datamodule import CAREamicsTrainData, TrainingDataWrapper
25
+ from .lightning_module import CAREamicsModuleWrapper
26
+ from .lightning_prediction_datamodule import CAREamicsPredictData, PredictDataWrapper
@@ -20,9 +20,9 @@ from careamics.config import (
20
20
  )
21
21
  from careamics.config.inference_model import TRANSFORMS_UNION
22
22
  from careamics.config.support import SupportedAlgorithm, SupportedData, SupportedLogger
23
- from careamics.lightning_datamodule import CAREamicsWood
24
- from careamics.lightning_module import CAREamicsKiln
25
- from careamics.lightning_prediction_datamodule import CAREamicsClay
23
+ from careamics.lightning_datamodule import CAREamicsTrainData
24
+ from careamics.lightning_module import CAREamicsModule
25
+ from careamics.lightning_prediction_datamodule import CAREamicsPredictData
26
26
  from careamics.lightning_prediction_loop import CAREamicsPredictionLoop
27
27
  from careamics.model_io import export_to_bmz, load_pretrained
28
28
  from careamics.utils import check_path_exists, get_logger
@@ -73,8 +73,7 @@ class CAREamist:
73
73
  source: Union[Path, str],
74
74
  work_dir: Optional[str] = None,
75
75
  experiment_name: str = "CAREamics",
76
- ) -> None:
77
- ...
76
+ ) -> None: ...
78
77
 
79
78
  @overload
80
79
  def __init__( # numpydoc ignore=GL08
@@ -82,8 +81,7 @@ class CAREamist:
82
81
  source: Configuration,
83
82
  work_dir: Optional[str] = None,
84
83
  experiment_name: str = "CAREamics",
85
- ) -> None:
86
- ...
84
+ ) -> None: ...
87
85
 
88
86
  def __init__(
89
87
  self,
@@ -140,7 +138,7 @@ class CAREamist:
140
138
  self.cfg = source
141
139
 
142
140
  # instantiate model
143
- self.model = CAREamicsKiln(
141
+ self.model = CAREamicsModule(
144
142
  algorithm_config=self.cfg.algorithm_config,
145
143
  )
146
144
 
@@ -156,7 +154,7 @@ class CAREamist:
156
154
  self.cfg = load_configuration(source)
157
155
 
158
156
  # instantiate model
159
- self.model = CAREamicsKiln(
157
+ self.model = CAREamicsModule(
160
158
  algorithm_config=self.cfg.algorithm_config,
161
159
  )
162
160
 
@@ -193,8 +191,8 @@ class CAREamist:
193
191
  self.trainer.predict_loop = CAREamicsPredictionLoop(self.trainer)
194
192
 
195
193
  # place holder for the datamodules
196
- self.train_datamodule: Optional[CAREamicsWood] = None
197
- self.pred_datamodule: Optional[CAREamicsClay] = None
194
+ self.train_datamodule: Optional[CAREamicsTrainData] = None
195
+ self.pred_datamodule: Optional[CAREamicsPredictData] = None
198
196
 
199
197
  def _define_callbacks(self) -> List[Callback]:
200
198
  """
@@ -227,7 +225,7 @@ class CAREamist:
227
225
  def train(
228
226
  self,
229
227
  *,
230
- datamodule: Optional[CAREamicsWood] = None,
228
+ datamodule: Optional[CAREamicsTrainData] = None,
231
229
  train_source: Optional[Union[Path, str, np.ndarray]] = None,
232
230
  val_source: Optional[Union[Path, str, np.ndarray]] = None,
233
231
  train_target: Optional[Union[Path, str, np.ndarray]] = None,
@@ -360,7 +358,7 @@ class CAREamist:
360
358
  f"instance (got {type(train_source)})."
361
359
  )
362
360
 
363
- def _train_on_datamodule(self, datamodule: CAREamicsWood) -> None:
361
+ def _train_on_datamodule(self, datamodule: CAREamicsTrainData) -> None:
364
362
  """
365
363
  Train the model on the provided datamodule.
366
364
 
@@ -402,7 +400,7 @@ class CAREamist:
402
400
  Minimum number of patches to use for validation, by default 5.
403
401
  """
404
402
  # create datamodule
405
- datamodule = CAREamicsWood(
403
+ datamodule = CAREamicsTrainData(
406
404
  data_config=self.cfg.data_config,
407
405
  train_data=train_data,
408
406
  val_data=val_data,
@@ -458,7 +456,7 @@ class CAREamist:
458
456
  path_to_val_target = check_path_exists(path_to_val_target)
459
457
 
460
458
  # create datamodule
461
- datamodule = CAREamicsWood(
459
+ datamodule = CAREamicsTrainData(
462
460
  data_config=self.cfg.data_config,
463
461
  train_data=path_to_train_data,
464
462
  val_data=path_to_val_data,
@@ -475,11 +473,10 @@ class CAREamist:
475
473
  @overload
476
474
  def predict( # numpydoc ignore=GL08
477
475
  self,
478
- source: CAREamicsClay,
476
+ source: CAREamicsPredictData,
479
477
  *,
480
478
  checkpoint: Optional[Literal["best", "last"]] = None,
481
- ) -> Union[list, np.ndarray]:
482
- ...
479
+ ) -> Union[list, np.ndarray]: ...
483
480
 
484
481
  @overload
485
482
  def predict( # numpydoc ignore=GL08
@@ -497,8 +494,7 @@ class CAREamist:
497
494
  read_source_func: Optional[Callable] = None,
498
495
  extension_filter: str = "",
499
496
  checkpoint: Optional[Literal["best", "last"]] = None,
500
- ) -> Union[list, np.ndarray]:
501
- ...
497
+ ) -> Union[list, np.ndarray]: ...
502
498
 
503
499
  @overload
504
500
  def predict( # numpydoc ignore=GL08
@@ -514,12 +510,11 @@ class CAREamist:
514
510
  tta_transforms: bool = True,
515
511
  dataloader_params: Optional[Dict] = None,
516
512
  checkpoint: Optional[Literal["best", "last"]] = None,
517
- ) -> Union[list, np.ndarray]:
518
- ...
513
+ ) -> Union[list, np.ndarray]: ...
519
514
 
520
515
  def predict(
521
516
  self,
522
- source: Union[CAREamicsClay, Path, str, np.ndarray],
517
+ source: Union[CAREamicsPredictData, Path, str, np.ndarray],
523
518
  *,
524
519
  batch_size: int = 1,
525
520
  tile_size: Optional[Tuple[int, ...]] = None,
@@ -548,6 +543,12 @@ class CAREamist:
548
543
  Test-time augmentation (TTA) can be switched off using the `tta_transforms`
549
544
  parameter.
550
545
 
546
+ Note that if you are using a UNet model and tiling, the tile size must be
547
+ divisible in every dimension by 2**d, where d is the depth of the model. This
548
+ avoids artefacts arising from the broken shift invariance induced by the
549
+ pooling layers of the UNet. If your image has less dimensions, as it may
550
+ happen in the Z dimension, consider padding your image.
551
+
551
552
  Parameters
552
553
  ----------
553
554
  source : Union[CAREamicsClay, Path, str, np.ndarray]
@@ -587,7 +588,7 @@ class CAREamist:
587
588
  ValueError
588
589
  If the input is not a CAREamicsClay instance, a path or a numpy array.
589
590
  """
590
- if isinstance(source, CAREamicsClay):
591
+ if isinstance(source, CAREamicsPredictData):
591
592
  # record datamodule
592
593
  self.pred_datamodule = source
593
594
 
@@ -602,7 +603,7 @@ class CAREamist:
602
603
  )
603
604
  # create predict config, reuse training config if parameters missing
604
605
  prediction_config = create_inference_configuration(
605
- training_configuration=self.cfg,
606
+ configuration=self.cfg,
606
607
  tile_size=tile_size,
607
608
  tile_overlap=tile_overlap,
608
609
  data_type=data_type,
@@ -623,8 +624,8 @@ class CAREamist:
623
624
  source_path = check_path_exists(source)
624
625
 
625
626
  # create datamodule
626
- datamodule = CAREamicsClay(
627
- prediction_config=prediction_config,
627
+ datamodule = CAREamicsPredictData(
628
+ pred_config=prediction_config,
628
629
  pred_data=source_path,
629
630
  read_source_func=read_source_func,
630
631
  extension_filter=extension_filter,
@@ -640,8 +641,8 @@ class CAREamist:
640
641
 
641
642
  elif isinstance(source, np.ndarray):
642
643
  # create datamodule
643
- datamodule = CAREamicsClay(
644
- prediction_config=prediction_config,
644
+ datamodule = CAREamicsPredictData(
645
+ pred_config=prediction_config,
645
646
  pred_data=source,
646
647
  dataloader_params=dataloader_params,
647
648
  )
@@ -1,16 +1,17 @@
1
1
  """Configuration module."""
2
2
 
3
-
4
3
  __all__ = [
5
- "AlgorithmModel",
6
- "DataModel",
4
+ "AlgorithmConfig",
5
+ "DataConfig",
7
6
  "Configuration",
8
7
  "CheckpointModel",
9
- "InferenceModel",
8
+ "InferenceConfig",
10
9
  "load_configuration",
11
10
  "save_configuration",
12
- "TrainingModel",
11
+ "TrainingConfig",
13
12
  "create_n2v_configuration",
13
+ "create_n2n_configuration",
14
+ "create_care_configuration",
14
15
  "register_model",
15
16
  "CustomModel",
16
17
  "create_inference_configuration",
@@ -18,11 +19,13 @@ __all__ = [
18
19
  "ConfigurationInformation",
19
20
  ]
20
21
 
21
- from .algorithm_model import AlgorithmModel
22
+ from .algorithm_model import AlgorithmConfig
22
23
  from .architectures import CustomModel, clear_custom_models, register_model
23
24
  from .callback_model import CheckpointModel
24
25
  from .configuration_factory import (
26
+ create_care_configuration,
25
27
  create_inference_configuration,
28
+ create_n2n_configuration,
26
29
  create_n2v_configuration,
27
30
  )
28
31
  from .configuration_model import (
@@ -30,6 +33,6 @@ from .configuration_model import (
30
33
  load_configuration,
31
34
  save_configuration,
32
35
  )
33
- from .data_model import DataModel
34
- from .inference_model import InferenceModel
35
- from .training_model import TrainingModel
36
+ from .data_model import DataConfig
37
+ from .inference_model import InferenceConfig
38
+ from .training_model import TrainingConfig
@@ -10,7 +10,7 @@ from .architectures import CustomModel, UNetModel, VAEModel
10
10
  from .optimizer_models import LrSchedulerModel, OptimizerModel
11
11
 
12
12
 
13
- class AlgorithmModel(BaseModel):
13
+ class AlgorithmConfig(BaseModel):
14
14
  """Algorithm configuration.
15
15
 
16
16
  This Pydantic model validates the parameters governing the components of the
@@ -45,7 +45,7 @@ class AlgorithmModel(BaseModel):
45
45
  Examples
46
46
  --------
47
47
  Minimum example:
48
- >>> from careamics.config import AlgorithmModel
48
+ >>> from careamics.config import AlgorithmConfig
49
49
  >>> config_dict = {
50
50
  ... "algorithm": "n2v",
51
51
  ... "loss": "n2v",
@@ -53,11 +53,11 @@ class AlgorithmModel(BaseModel):
53
53
  ... "architecture": "UNet",
54
54
  ... }
55
55
  ... }
56
- >>> config = AlgorithmModel(**config_dict)
56
+ >>> config = AlgorithmConfig(**config_dict)
57
57
 
58
58
  Using a custom model:
59
59
  >>> from torch import nn, ones
60
- >>> from careamics.config import AlgorithmModel, register_model
60
+ >>> from careamics.config import AlgorithmConfig, register_model
61
61
  ...
62
62
  >>> @register_model(name="linear_model")
63
63
  ... class LinearModel(nn.Module):
@@ -80,7 +80,7 @@ class AlgorithmModel(BaseModel):
80
80
  ... "out_features": 5,
81
81
  ... }
82
82
  ... }
83
- >>> config = AlgorithmModel(**config_dict)
83
+ >>> config = AlgorithmConfig(**config_dict)
84
84
  """
85
85
 
86
86
  # Pydantic class configuration
@@ -39,6 +39,7 @@ class UNetModel(ArchitectureModel):
39
39
  "None", "Sigmoid", "Softmax", "Tanh", "ReLU", "LeakyReLU"
40
40
  ] = Field(default="None", validate_default=True)
41
41
  n2v2: bool = Field(default=False, validate_default=True)
42
+ independent_channels: bool = Field(default=True, validate_default=True)
42
43
 
43
44
  @field_validator("num_channels_init")
44
45
  @classmethod
@@ -1,4 +1,5 @@
1
1
  """Checkpoint saving configuration."""
2
+
2
3
  from __future__ import annotations
3
4
 
4
5
  from datetime import timedelta
@@ -0,0 +1,87 @@
1
+ from .algorithm_model import AlgorithmConfig
2
+ from .architectures import UNetModel
3
+ from .configuration_model import Configuration
4
+ from .data_model import DataConfig
5
+ from .optimizer_models import LrSchedulerModel, OptimizerModel
6
+ from .support import (
7
+ SupportedActivation,
8
+ SupportedAlgorithm,
9
+ SupportedArchitecture,
10
+ SupportedData,
11
+ SupportedLogger,
12
+ SupportedLoss,
13
+ SupportedOptimizer,
14
+ SupportedPixelManipulation,
15
+ SupportedScheduler,
16
+ SupportedTransform,
17
+ )
18
+ from .training_model import TrainingConfig
19
+
20
+
21
+ def full_configuration_example() -> Configuration:
22
+ """Returns a dictionnary representing a full configuration example.
23
+
24
+ Returns
25
+ -------
26
+ Configuration
27
+ Full configuration example.
28
+ """
29
+ experiment_name = "Full example"
30
+ algorithm_model = AlgorithmConfig(
31
+ algorithm=SupportedAlgorithm.N2V.value,
32
+ loss=SupportedLoss.N2V.value,
33
+ model=UNetModel(
34
+ architecture=SupportedArchitecture.UNET.value,
35
+ in_channels=1,
36
+ num_classes=1,
37
+ depth=2,
38
+ num_channels_init=32,
39
+ final_activation=SupportedActivation.NONE.value,
40
+ n2v2=True,
41
+ ),
42
+ optimizer=OptimizerModel(
43
+ name=SupportedOptimizer.ADAM.value, parameters={"lr": 0.0001}
44
+ ),
45
+ lr_scheduler=LrSchedulerModel(
46
+ name=SupportedScheduler.REDUCE_LR_ON_PLATEAU.value,
47
+ ),
48
+ )
49
+ data_model = DataConfig(
50
+ data_type=SupportedData.ARRAY.value,
51
+ patch_size=(256, 256),
52
+ batch_size=8,
53
+ axes="YX",
54
+ transforms=[
55
+ {
56
+ "name": SupportedTransform.NORMALIZE.value,
57
+ },
58
+ {
59
+ "name": SupportedTransform.NDFLIP.value,
60
+ },
61
+ {
62
+ "name": SupportedTransform.XY_RANDOM_ROTATE90.value,
63
+ },
64
+ {
65
+ "name": SupportedTransform.N2V_MANIPULATE.value,
66
+ "roi_size": 11,
67
+ "masked_pixel_percentage": 0.2,
68
+ "strategy": SupportedPixelManipulation.MEDIAN.value,
69
+ },
70
+ ],
71
+ mean=0.485,
72
+ std=0.229,
73
+ dataloader_params={
74
+ "num_workers": 4,
75
+ },
76
+ )
77
+ training_model = TrainingConfig(
78
+ num_epochs=30,
79
+ logger=SupportedLogger.WANDB.value,
80
+ )
81
+
82
+ return Configuration(
83
+ experiment_name=experiment_name,
84
+ algorithm_config=algorithm_model,
85
+ data_config=data_model,
86
+ training_config=training_model,
87
+ )