careamics 0.1.0rc5__tar.gz → 0.1.0rc6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of careamics might be problematic. Click here for more details.

Files changed (185) hide show
  1. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/.github/workflows/ci.yml +1 -1
  2. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/.pre-commit-config.yaml +10 -10
  3. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/PKG-INFO +2 -1
  4. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/examples/2D/n2v/example_BSD68_lightning.ipynb +0 -8
  5. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/pyproject.toml +5 -5
  6. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/callbacks/hyperparameters_callback.py +10 -3
  7. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/callbacks/progress_bar_callback.py +37 -4
  8. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/careamist.py +80 -44
  9. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/algorithm_model.py +5 -3
  10. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/architectures/architecture_model.py +7 -0
  11. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/architectures/custom_model.py +8 -1
  12. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/architectures/register_model.py +3 -1
  13. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/architectures/unet_model.py +2 -0
  14. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/architectures/vae_model.py +2 -0
  15. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/callback_model.py +3 -15
  16. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/configuration_example.py +4 -2
  17. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/configuration_factory.py +4 -16
  18. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/data_model.py +10 -14
  19. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/inference_model.py +0 -65
  20. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/optimizer_models.py +4 -4
  21. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/support/__init__.py +0 -2
  22. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/support/supported_activations.py +2 -0
  23. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/support/supported_algorithms.py +3 -1
  24. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/support/supported_architectures.py +2 -0
  25. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/support/supported_data.py +2 -0
  26. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/support/supported_loggers.py +2 -0
  27. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/support/supported_losses.py +2 -0
  28. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/support/supported_optimizers.py +2 -0
  29. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/support/supported_pixel_manipulations.py +3 -3
  30. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/support/supported_struct_axis.py +2 -0
  31. careamics-0.1.0rc6/src/careamics/config/support/supported_transforms.py +12 -0
  32. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/tile_information.py +2 -0
  33. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/transformations/__init__.py +3 -2
  34. careamics-0.1.0rc6/src/careamics/config/transformations/xy_flip_model.py +43 -0
  35. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/transformations/xy_random_rotate90_model.py +11 -3
  36. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/conftest.py +12 -0
  37. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/dataset_utils/dataset_utils.py +4 -4
  38. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/dataset_utils/file_utils.py +4 -3
  39. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/dataset_utils/read_tiff.py +6 -2
  40. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/dataset_utils/read_utils.py +2 -0
  41. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/dataset_utils/read_zarr.py +11 -7
  42. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/in_memory_dataset.py +71 -32
  43. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/iterable_dataset.py +155 -68
  44. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/patching/patching.py +56 -15
  45. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/patching/random_patching.py +8 -2
  46. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/patching/sequential_patching.py +14 -8
  47. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/patching/tiled_patching.py +3 -1
  48. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/patching/validate_patch_dimension.py +2 -0
  49. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/zarr_dataset.py +2 -0
  50. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/lightning_datamodule.py +45 -19
  51. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/lightning_module.py +8 -2
  52. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/lightning_prediction_datamodule.py +3 -13
  53. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/lightning_prediction_loop.py +8 -6
  54. careamics-0.1.0rc6/src/careamics/losses/__init__.py +5 -0
  55. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/losses/loss_factory.py +1 -1
  56. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/losses/losses.py +11 -7
  57. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/model_io/bmz_io.py +3 -3
  58. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/models/activation.py +2 -0
  59. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/models/layers.py +121 -25
  60. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/models/model_factory.py +1 -1
  61. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/models/unet.py +35 -14
  62. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/prediction/stitch_prediction.py +2 -6
  63. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/transforms/__init__.py +2 -2
  64. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/transforms/compose.py +33 -7
  65. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/transforms/n2v_manipulate.py +49 -13
  66. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/transforms/normalize.py +55 -3
  67. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/transforms/pixel_manipulation.py +5 -5
  68. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/transforms/struct_mask_parameters.py +3 -1
  69. careamics-0.1.0rc6/src/careamics/transforms/transform.py +24 -0
  70. careamics-0.1.0rc6/src/careamics/transforms/xy_flip.py +123 -0
  71. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/transforms/xy_random_rotate90.py +38 -5
  72. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/utils/base_enum.py +28 -0
  73. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/utils/path_utils.py +2 -0
  74. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/utils/ram.py +2 -0
  75. careamics-0.1.0rc6/src/careamics/utils/receptive_field.py +108 -0
  76. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/test_configuration_factory.py +3 -4
  77. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/test_configuration_model.py +1 -1
  78. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/test_data_model.py +12 -12
  79. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/test_inference_model.py +0 -56
  80. careamics-0.1.0rc6/tests/config/transformations/test_n2v_manipulate_model.py +33 -0
  81. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/transformations/test_normalize_model.py +9 -0
  82. careamics-0.1.0rc6/tests/config/transformations/test_xy_flip_model.py +10 -0
  83. careamics-0.1.0rc6/tests/config/transformations/test_xy_random_rotate90_model.py +10 -0
  84. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/conftest.py +1 -1
  85. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/dataset/test_in_memory_dataset.py +3 -3
  86. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/test_careamist.py +175 -33
  87. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/transforms/test_compose.py +50 -7
  88. careamics-0.1.0rc6/tests/transforms/test_xy_flip.py +137 -0
  89. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/transforms/test_xy_random_rotate90.py +28 -5
  90. careamics-0.1.0rc5/src/careamics/config/noise_models.py +0 -162
  91. careamics-0.1.0rc5/src/careamics/config/support/supported_extraction_strategies.py +0 -25
  92. careamics-0.1.0rc5/src/careamics/config/support/supported_transforms.py +0 -23
  93. careamics-0.1.0rc5/src/careamics/config/transformations/nd_flip_model.py +0 -27
  94. careamics-0.1.0rc5/src/careamics/losses/__init__.py +0 -6
  95. careamics-0.1.0rc5/src/careamics/losses/noise_model_factory.py +0 -40
  96. careamics-0.1.0rc5/src/careamics/losses/noise_models.py +0 -524
  97. careamics-0.1.0rc5/src/careamics/transforms/nd_flip.py +0 -67
  98. careamics-0.1.0rc5/src/careamics/transforms/transform.py +0 -33
  99. careamics-0.1.0rc5/src/careamics/utils/receptive_field.py +0 -102
  100. careamics-0.1.0rc5/tests/config/transformations/test_n2v_manipulate_model.py +0 -26
  101. careamics-0.1.0rc5/tests/transforms/test_nd_flip.py +0 -60
  102. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
  103. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
  104. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/.github/pull_request_template.md +0 -0
  105. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/.gitignore +0 -0
  106. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/LICENSE +0 -0
  107. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/README.md +0 -0
  108. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/examples/2D/n2n/example_SEM_careamist.ipynb +0 -0
  109. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/examples/2D/n2n/n2n_2D_SEM.yml +0 -0
  110. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/examples/2D/n2v/example_BSD68_careamist.ipynb +0 -0
  111. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/examples/2D/n2v/example_SEM_lightning.ipynb +0 -0
  112. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/examples/2D/n2v/n2v_2D_BSD.yml +0 -0
  113. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/examples/2D/pn2v/pN2V_Convallaria.yml +0 -0
  114. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/examples/3D/example_flywing_3D.ipynb +0 -0
  115. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/examples/3D/n2v_flywing_3D.yml +0 -0
  116. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/__init__.py +0 -0
  117. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/callbacks/__init__.py +0 -0
  118. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/__init__.py +0 -0
  119. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/architectures/__init__.py +0 -0
  120. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/configuration_model.py +0 -0
  121. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/references/__init__.py +0 -0
  122. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/references/algorithm_descriptions.py +0 -0
  123. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/references/references.py +0 -0
  124. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/training_model.py +0 -0
  125. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/transformations/n2v_manipulate_model.py +0 -0
  126. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/transformations/normalize_model.py +0 -0
  127. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/transformations/transform_model.py +0 -0
  128. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/validators/__init__.py +0 -0
  129. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/config/validators/validator_utils.py +0 -0
  130. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/__init__.py +0 -0
  131. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/dataset_utils/__init__.py +0 -0
  132. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/dataset/patching/__init__.py +0 -0
  133. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/model_io/__init__.py +0 -0
  134. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/model_io/bioimage/__init__.py +0 -0
  135. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/model_io/bioimage/_readme_factory.py +0 -0
  136. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/model_io/bioimage/bioimage_utils.py +0 -0
  137. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/model_io/bioimage/model_description.py +0 -0
  138. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/model_io/model_io_utils.py +0 -0
  139. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/models/__init__.py +0 -0
  140. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/prediction/__init__.py +0 -0
  141. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/py.typed +0 -0
  142. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/transforms/tta.py +0 -0
  143. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/utils/__init__.py +0 -0
  144. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/utils/context.py +0 -0
  145. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/utils/logging.py +0 -0
  146. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/utils/metrics.py +0 -0
  147. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/utils/running_stats.py +0 -0
  148. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/src/careamics/utils/torch_utils.py +0 -0
  149. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/architectures/test_architecture_model.py +0 -0
  150. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/architectures/test_custom_model.py +0 -0
  151. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/architectures/test_register_model.py +0 -0
  152. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/architectures/test_unet_model.py +0 -0
  153. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/support/test_supported_data.py +0 -0
  154. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/support/test_supported_optimizers.py +0 -0
  155. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/test_algorithm_model.py +0 -0
  156. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/test_full_config_example.py +0 -0
  157. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/test_optimizers_model.py +0 -0
  158. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/test_tile_information.py +0 -0
  159. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/test_training_model.py +0 -0
  160. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/config/validators/test_validator_utils.py +0 -0
  161. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/dataset/dataset_utils/test_list_files.py +0 -0
  162. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/dataset/dataset_utils/test_read_tiff.py +0 -0
  163. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/dataset/patching/test_patching_utils.py +0 -0
  164. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/dataset/patching/test_random_patching.py +0 -0
  165. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/dataset/patching/test_sequential_patching.py +0 -0
  166. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/dataset/patching/test_tiled_patching.py +0 -0
  167. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/dataset/test_iterable_dataset.py +0 -0
  168. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/model_io/test_bmz_io.py +0 -0
  169. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/models/test_model_factory.py +0 -0
  170. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/models/test_unet.py +0 -0
  171. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/prediction/test_stitch_prediction.py +0 -0
  172. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/test_conftest.py +0 -0
  173. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/test_lightning_datamodule.py +0 -0
  174. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/test_lightning_module.py +0 -0
  175. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/test_lightning_prediction_datamodule.py +0 -0
  176. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/transforms/test_manipulate_n2v.py +0 -0
  177. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/transforms/test_normalize.py +0 -0
  178. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/transforms/test_pixel_manipulation.py +0 -0
  179. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/transforms/test_supported_transforms.py +0 -0
  180. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/utils/test_base_enum.py +0 -0
  181. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/utils/test_context.py +0 -0
  182. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/utils/test_logging.py +0 -0
  183. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/utils/test_metrics.py +0 -0
  184. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/utils/test_torch_utils.py +0 -0
  185. {careamics-0.1.0rc5 → careamics-0.1.0rc6}/tests/utils/test_wandb.py +0 -0
@@ -27,7 +27,7 @@ jobs:
27
27
  strategy:
28
28
  fail-fast: false
29
29
  matrix:
30
- python-version: ["3.8", "3.9", "3.10", "3.11"]
30
+ python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
31
31
  # https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories
32
32
  platform: [ubuntu-latest, macos-13, windows-latest]
33
33
 
@@ -9,12 +9,12 @@ ci:
9
9
 
10
10
  repos:
11
11
  - repo: https://github.com/abravalheri/validate-pyproject
12
- rev: v0.16
12
+ rev: v0.18
13
13
  hooks:
14
14
  - id: validate-pyproject
15
15
 
16
16
  - repo: https://github.com/astral-sh/ruff-pre-commit
17
- rev: v0.4.3
17
+ rev: v0.4.7
18
18
  hooks:
19
19
  - id: ruff
20
20
  args: [--fix, --target-version, py38]
@@ -40,14 +40,14 @@ repos:
40
40
  hooks:
41
41
  - id: numpydoc-validation
42
42
 
43
- # jupyter linting and formatting
44
- - repo: https://github.com/nbQA-dev/nbQA
45
- rev: 1.8.5
46
- hooks:
47
- - id: nbqa-ruff
48
- args: [--fix]
49
- - id: nbqa-black
50
- #- id: nbqa-mypy
43
+ # # jupyter linting and formatting
44
+ # - repo: https://github.com/nbQA-dev/nbQA
45
+ # rev: 1.8.5
46
+ # hooks:
47
+ # - id: nbqa-ruff
48
+ # args: [--fix]
49
+ # - id: nbqa-black
50
+ # #- id: nbqa-mypy
51
51
 
52
52
  # strip out jupyter notebooks
53
53
  - repo: https://github.com/kynan/nbstripout
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: careamics
3
- Version: 0.1.0rc5
3
+ Version: 0.1.0rc6
4
4
  Summary: Toolbox for running N2V and friends.
5
5
  Project-URL: homepage, https://careamics.github.io/
6
6
  Project-URL: repository, https://github.com/CAREamics/careamics
@@ -14,6 +14,7 @@ Classifier: Programming Language :: Python :: 3.8
14
14
  Classifier: Programming Language :: Python :: 3.9
15
15
  Classifier: Programming Language :: Python :: 3.10
16
16
  Classifier: Programming Language :: Python :: 3.11
17
+ Classifier: Programming Language :: Python :: 3.12
17
18
  Classifier: Typing :: Typed
18
19
  Requires-Python: >=3.8
19
20
  Requires-Dist: bioimageio-core>=0.6.0
@@ -18,11 +18,7 @@
18
18
  " CAREamicsPredictDataModule,\n",
19
19
  " CAREamicsTrainDataModule,\n",
20
20
  ")\n",
21
- <<<<<<< Updated upstream
22
21
  "from careamics.lightning_prediction import CAREamicsPredictionLoop\n",
23
- =======
24
- "from careamics.lightning_prediction import CAREamicsFiring\n",
25
- >>>>>>> Stashed changes
26
22
  "from careamics.utils.metrics import psnr"
27
23
  ]
28
24
  },
@@ -172,11 +168,7 @@
172
168
  "train_data_module = CAREamicsTrainDataModule(\n",
173
169
  " train_data=train_path,\n",
174
170
  " val_data=val_path,\n",
175
- <<<<<<< Updated upstream
176
171
  " data_type=\"tiff\", # to use np.ndarray, set data_type to \"array\"\n",
177
- =======
178
- " data_type=\"tiff\",\n",
179
- >>>>>>> Stashed changes
180
172
  " patch_size=(64, 64),\n",
181
173
  " axes=\"SYX\",\n",
182
174
  " batch_size=128,\n",
@@ -33,6 +33,7 @@ classifiers = [
33
33
  "Programming Language :: Python :: 3.9",
34
34
  "Programming Language :: Python :: 3.10",
35
35
  "Programming Language :: Python :: 3.11",
36
+ "Programming Language :: Python :: 3.12",
36
37
  "License :: OSI Approved :: BSD License",
37
38
  "Typing :: Typed",
38
39
  ]
@@ -73,7 +74,7 @@ repository = "https://github.com/CAREamics/careamics"
73
74
  line-length = 88
74
75
  target-version = "py38"
75
76
  src = ["src"]
76
- select = [
77
+ lint.select = [
77
78
  "E", # style errors
78
79
  "W", # style warnings
79
80
  "F", # flakes
@@ -86,7 +87,7 @@ select = [
86
87
  "A001", # flake8-builtins
87
88
  "RUF", # ruff-specific rules
88
89
  ]
89
- ignore = [
90
+ lint.ignore = [
90
91
  "D100", # Missing docstring in public module
91
92
  "D107", # Missing docstring in __init__
92
93
  "D203", # 1 blank line required before class docstring
@@ -103,13 +104,12 @@ ignore = [
103
104
  "UP006", # Replace typing.List by list, mandatory for py3.8
104
105
  "UP007", # Replace Union by |, mandatory for py3.9
105
106
  ]
106
- ignore-init-module-imports = true
107
107
  show-fixes = true
108
108
 
109
- [tool.ruff.pydocstyle]
109
+ [tool.ruff.lint.pydocstyle]
110
110
  convention = "numpy"
111
111
 
112
- [tool.ruff.per-file-ignores]
112
+ [tool.ruff.lint.per-file-ignores]
113
113
  "tests/*.py" = ["D", "S"]
114
114
  "setup.py" = ["D"]
115
115
 
@@ -1,3 +1,5 @@
1
+ """Callback saving CAREamics configuration as hyperparameters in the model."""
2
+
1
3
  from pytorch_lightning import LightningModule, Trainer
2
4
  from pytorch_lightning.callbacks import Callback
3
5
 
@@ -11,13 +13,18 @@ class HyperParametersCallback(Callback):
11
13
  This allows saving the configuration as dictionnary in the checkpoints, and
12
14
  loading it subsequently in a CAREamist instance.
13
15
 
16
+ Parameters
17
+ ----------
18
+ config : Configuration
19
+ CAREamics configuration to be saved as hyperparameter in the model.
20
+
14
21
  Attributes
15
22
  ----------
16
23
  config : Configuration
17
24
  CAREamics configuration to be saved as hyperparameter in the model.
18
25
  """
19
26
 
20
- def __init__(self, config: Configuration):
27
+ def __init__(self, config: Configuration) -> None:
21
28
  """
22
29
  Constructor.
23
30
 
@@ -28,14 +35,14 @@ class HyperParametersCallback(Callback):
28
35
  """
29
36
  self.config = config
30
37
 
31
- def on_train_start(self, trainer: Trainer, pl_module: LightningModule):
38
+ def on_train_start(self, trainer: Trainer, pl_module: LightningModule) -> None:
32
39
  """
33
40
  Update the hyperparameters of the model with the configuration on train start.
34
41
 
35
42
  Parameters
36
43
  ----------
37
44
  trainer : Trainer
38
- PyTorch Lightning trainer.
45
+ PyTorch Lightning trainer, unused.
39
46
  pl_module : LightningModule
40
47
  PyTorch Lightning module.
41
48
  """
@@ -1,3 +1,5 @@
1
+ """Progressbar callback."""
2
+
1
3
  import sys
2
4
  from typing import Dict, Union
3
5
 
@@ -10,7 +12,13 @@ class ProgressBarCallback(TQDMProgressBar):
10
12
  """Progress bar for training and validation steps."""
11
13
 
12
14
  def init_train_tqdm(self) -> tqdm:
13
- """Override this to customize the tqdm bar for training."""
15
+ """Override this to customize the tqdm bar for training.
16
+
17
+ Returns
18
+ -------
19
+ tqdm
20
+ A tqdm bar.
21
+ """
14
22
  bar = tqdm(
15
23
  desc="Training",
16
24
  position=(2 * self.process_position),
@@ -23,7 +31,13 @@ class ProgressBarCallback(TQDMProgressBar):
23
31
  return bar
24
32
 
25
33
  def init_validation_tqdm(self) -> tqdm:
26
- """Override this to customize the tqdm bar for validation."""
34
+ """Override this to customize the tqdm bar for validation.
35
+
36
+ Returns
37
+ -------
38
+ tqdm
39
+ A tqdm bar.
40
+ """
27
41
  # The main progress bar doesn't exist in `trainer.validate()`
28
42
  has_main_bar = self.train_progress_bar is not None
29
43
  bar = tqdm(
@@ -37,7 +51,13 @@ class ProgressBarCallback(TQDMProgressBar):
37
51
  return bar
38
52
 
39
53
  def init_test_tqdm(self) -> tqdm:
40
- """Override this to customize the tqdm bar for testing."""
54
+ """Override this to customize the tqdm bar for testing.
55
+
56
+ Returns
57
+ -------
58
+ tqdm
59
+ A tqdm bar.
60
+ """
41
61
  bar = tqdm(
42
62
  desc="Testing",
43
63
  position=(2 * self.process_position),
@@ -52,6 +72,19 @@ class ProgressBarCallback(TQDMProgressBar):
52
72
  def get_metrics(
53
73
  self, trainer: Trainer, pl_module: LightningModule
54
74
  ) -> Dict[str, Union[int, str, float, Dict[str, float]]]:
55
- """Override this to customize the metrics displayed in the progress bar."""
75
+ """Override this to customize the metrics displayed in the progress bar.
76
+
77
+ Parameters
78
+ ----------
79
+ trainer : Trainer
80
+ The trainer object.
81
+ pl_module : LightningModule
82
+ The LightningModule object, unused.
83
+
84
+ Returns
85
+ -------
86
+ dict
87
+ A dictionary with the metrics to display in the progress bar.
88
+ """
56
89
  pbar_metrics = trainer.progress_bar_metrics
57
90
  return {**pbar_metrics}
@@ -18,13 +18,14 @@ from careamics.config import (
18
18
  create_inference_configuration,
19
19
  load_configuration,
20
20
  )
21
- from careamics.config.inference_model import TRANSFORMS_UNION
22
21
  from careamics.config.support import SupportedAlgorithm, SupportedData, SupportedLogger
22
+ from careamics.dataset.dataset_utils import reshape_array
23
23
  from careamics.lightning_datamodule import CAREamicsTrainData
24
24
  from careamics.lightning_module import CAREamicsModule
25
25
  from careamics.lightning_prediction_datamodule import CAREamicsPredictData
26
26
  from careamics.lightning_prediction_loop import CAREamicsPredictionLoop
27
27
  from careamics.model_io import export_to_bmz, load_pretrained
28
+ from careamics.transforms import Denormalize
28
29
  from careamics.utils import check_path_exists, get_logger
29
30
 
30
31
  from .callbacks import HyperParametersCallback
@@ -488,7 +489,6 @@ class CAREamist:
488
489
  tile_overlap: Tuple[int, ...] = (48, 48),
489
490
  axes: Optional[str] = None,
490
491
  data_type: Optional[Literal["tiff", "custom"]] = None,
491
- transforms: Optional[List[TRANSFORMS_UNION]] = None,
492
492
  tta_transforms: bool = True,
493
493
  dataloader_params: Optional[Dict] = None,
494
494
  read_source_func: Optional[Callable] = None,
@@ -506,7 +506,6 @@ class CAREamist:
506
506
  tile_overlap: Tuple[int, ...] = (48, 48),
507
507
  axes: Optional[str] = None,
508
508
  data_type: Optional[Literal["array"]] = None,
509
- transforms: Optional[List[TRANSFORMS_UNION]] = None,
510
509
  tta_transforms: bool = True,
511
510
  dataloader_params: Optional[Dict] = None,
512
511
  checkpoint: Optional[Literal["best", "last"]] = None,
@@ -521,7 +520,6 @@ class CAREamist:
521
520
  tile_overlap: Tuple[int, ...] = (48, 48),
522
521
  axes: Optional[str] = None,
523
522
  data_type: Optional[Literal["array", "tiff", "custom"]] = None,
524
- transforms: Optional[List[TRANSFORMS_UNION]] = None,
525
523
  tta_transforms: bool = True,
526
524
  dataloader_params: Optional[Dict] = None,
527
525
  read_source_func: Optional[Callable] = None,
@@ -538,8 +536,6 @@ class CAREamist:
538
536
  configuration parameters will be used, with the `patch_size` instead of
539
537
  `tile_size`.
540
538
 
541
- The default transforms are defined in the `InferenceModel` Pydantic model.
542
-
543
539
  Test-time augmentation (TTA) can be switched off using the `tta_transforms`
544
540
  parameter.
545
541
 
@@ -563,8 +559,6 @@ class CAREamist:
563
559
  Axes of the input data, by default None.
564
560
  data_type : Optional[Literal["array", "tiff", "custom"]], optional
565
561
  Type of the input data, by default None.
566
- transforms : Optional[List[TRANSFORMS_UNION]], optional
567
- List of transforms to apply to the data, by default None.
568
562
  tta_transforms : bool, optional
569
563
  Whether to apply test-time augmentation, by default True.
570
564
  dataloader_params : Optional[Dict], optional
@@ -608,7 +602,6 @@ class CAREamist:
608
602
  tile_overlap=tile_overlap,
609
603
  data_type=data_type,
610
604
  axes=axes,
611
- transforms=transforms,
612
605
  tta_transforms=tta_transforms,
613
606
  batch_size=batch_size,
614
607
  )
@@ -660,38 +653,41 @@ class CAREamist:
660
653
  f"np.ndarray (got {type(source)})."
661
654
  )
662
655
 
663
- def export_to_bmz(
656
+ def _create_data_for_bmz(
664
657
  self,
665
- path: Union[Path, str],
666
- name: str,
667
- authors: List[dict],
668
658
  input_array: Optional[np.ndarray] = None,
669
- general_description: str = "",
670
- channel_names: Optional[List[str]] = None,
671
- data_description: Optional[str] = None,
672
- ) -> None:
673
- """Export the model to the BioImage Model Zoo format.
659
+ ) -> np.ndarray:
660
+ """Create data for BMZ export.
674
661
 
675
- Input array must be of shape SC(Z)YX, with S and C singleton dimensions.
662
+ If no `input_array` is provided, this method checks if there is a prediction
663
+ datamodule, or a training data module, to extract a patch. If none exists,
664
+ then a random aray is created.
665
+
666
+ If there is a non-singleton batch dimension, this method returns only the first
667
+ element.
676
668
 
677
669
  Parameters
678
670
  ----------
679
- path : Union[Path, str]
680
- Path to save the model.
681
- name : str
682
- Name of the model.
683
- authors : List[dict]
684
- List of authors of the model.
685
671
  input_array : Optional[np.ndarray], optional
686
- Input array for the model, must be of shape SC(Z)YX, by default None.
687
- general_description : str
688
- General description of the model, used in the metadata of the BMZ archive.
689
- channel_names : Optional[List[str]], optional
690
- Channel names, by default None.
691
- data_description : Optional[str], optional
692
- Description of the data, by default None.
672
+ Input array, by default None.
673
+
674
+ Returns
675
+ -------
676
+ np.ndarray
677
+ Input data for BMZ export.
678
+
679
+ Raises
680
+ ------
681
+ ValueError
682
+ If mean and std are not provided in the configuration.
693
683
  """
694
684
  if input_array is None:
685
+ if self.cfg.data_config.mean is None or self.cfg.data_config.std is None:
686
+ raise ValueError(
687
+ "Mean and std cannot be None in the configuration in order to"
688
+ "export to the BMZ format. Was the model trained?"
689
+ )
690
+
695
691
  # generate images, priority is given to the prediction data module
696
692
  if self.pred_datamodule is not None:
697
693
  # unpack a batch, ignore masks or targets
@@ -699,19 +695,23 @@ class CAREamist:
699
695
 
700
696
  # convert torch.Tensor to numpy
701
697
  input_patch = input_patch.numpy()
698
+
699
+ # denormalize
700
+ denormalize = Denormalize(
701
+ mean=self.cfg.data_config.mean, std=self.cfg.data_config.std
702
+ )
703
+ input_patch, _ = denormalize(input_patch)
704
+
702
705
  elif self.train_datamodule is not None:
703
706
  input_patch, *_ = next(iter(self.train_datamodule.train_dataloader()))
704
707
  input_patch = input_patch.numpy()
705
- else:
706
- if (
707
- self.cfg.data_config.mean is None
708
- or self.cfg.data_config.std is None
709
- ):
710
- raise ValueError(
711
- "Mean and std cannot be None in the configuration in order to"
712
- "export to the BMZ format. Was the model trained?"
713
- )
714
708
 
709
+ # denormalize
710
+ denormalize = Denormalize(
711
+ mean=self.cfg.data_config.mean, std=self.cfg.data_config.std
712
+ )
713
+ input_patch, _ = denormalize(input_patch)
714
+ else:
715
715
  # create a random input array
716
716
  input_patch = np.random.normal(
717
717
  loc=self.cfg.data_config.mean,
@@ -721,11 +721,47 @@ class CAREamist:
721
721
  np.newaxis, np.newaxis, ...
722
722
  ] # add S & C dimensions
723
723
  else:
724
- input_patch = input_array
724
+ # potentially correct shape
725
+ input_patch = reshape_array(input_array, self.cfg.data_config.axes)
725
726
 
726
- # if there is a batch dimension
727
+ # if this a batch
727
728
  if input_patch.shape[0] > 1:
728
- input_patch = input_patch[0:1, ...] # keep singleton dim
729
+ input_patch = input_patch[[0], ...] # keep singleton dim
730
+
731
+ return input_patch
732
+
733
+ def export_to_bmz(
734
+ self,
735
+ path: Union[Path, str],
736
+ name: str,
737
+ authors: List[dict],
738
+ input_array: Optional[np.ndarray] = None,
739
+ general_description: str = "",
740
+ channel_names: Optional[List[str]] = None,
741
+ data_description: Optional[str] = None,
742
+ ) -> None:
743
+ """Export the model to the BioImage Model Zoo format.
744
+
745
+ Input array must be of shape SC(Z)YX, with S and C singleton dimensions.
746
+
747
+ Parameters
748
+ ----------
749
+ path : Union[Path, str]
750
+ Path to save the model.
751
+ name : str
752
+ Name of the model.
753
+ authors : List[dict]
754
+ List of authors of the model.
755
+ input_array : Optional[np.ndarray], optional
756
+ Input array for the model, must be of shape SC(Z)YX, by default None.
757
+ general_description : str
758
+ General description of the model, used in the metadata of the BMZ archive.
759
+ channel_names : Optional[List[str]], optional
760
+ Channel names, by default None.
761
+ data_description : Optional[str], optional
762
+ Description of the data, by default None.
763
+ """
764
+ input_patch = self._create_data_for_bmz(input_array)
729
765
 
730
766
  # axes need to be reformated for the export because reshaping was done in the
731
767
  # datamodule
@@ -1,3 +1,5 @@
1
+ """Algorithm configuration."""
2
+
1
3
  from __future__ import annotations
2
4
 
3
5
  from pprint import pformat
@@ -17,9 +19,9 @@ class AlgorithmConfig(BaseModel):
17
19
  training algorithm: which algorithm, loss function, model architecture, optimizer,
18
20
  and learning rate scheduler to use.
19
21
 
20
- Currently, we only support N2V and custom algorithms. The `n2v` algorithm is only
21
- compatible with `n2v` loss and `UNet` architecture. The `custom` algorithm allows
22
- you to register your own architecture and select it using its name as
22
+ Currently, we only support N2V, CARE, N2N and custom models. The `n2v` algorithm is
23
+ only compatible with `n2v` loss and `UNet` architecture. The `custom` algorithm
24
+ allows you to register your own architecture and select it using its name as
23
25
  `name` in the custom pydantic model.
24
26
 
25
27
  Attributes
@@ -1,3 +1,5 @@
1
+ """Base model for the various CAREamics architectures."""
2
+
1
3
  from typing import Any, Dict
2
4
 
3
5
  from pydantic import BaseModel
@@ -16,6 +18,11 @@ class ArchitectureModel(BaseModel):
16
18
  """
17
19
  Dump the model as a dictionary, ignoring the architecture keyword.
18
20
 
21
+ Parameters
22
+ ----------
23
+ **kwargs : Any
24
+ Additional keyword arguments from Pydantic BaseModel model_dump method.
25
+
19
26
  Returns
20
27
  -------
21
28
  dict[str, Any]
@@ -1,3 +1,5 @@
1
+ """Custom architecture Pydantic model."""
2
+
1
3
  from __future__ import annotations
2
4
 
3
5
  from pprint import pformat
@@ -84,6 +86,11 @@ class CustomModel(ArchitectureModel):
84
86
  value : str
85
87
  Name of the custom model as registered using the `@register_model`
86
88
  decorator.
89
+
90
+ Returns
91
+ -------
92
+ str
93
+ The custom model name.
87
94
  """
88
95
  # delegate error to get_custom_model
89
96
  model = get_custom_model(value)
@@ -134,7 +141,7 @@ class CustomModel(ArchitectureModel):
134
141
 
135
142
  Parameters
136
143
  ----------
137
- kwargs : Any
144
+ **kwargs : Any
138
145
  Additional keyword arguments from Pydantic BaseModel model_dump method.
139
146
 
140
147
  Returns
@@ -1,3 +1,5 @@
1
+ """Custom model registration utilities."""
2
+
1
3
  from typing import Callable
2
4
 
3
5
  from torch.nn import Module
@@ -53,7 +55,7 @@ def register_model(name: str) -> Callable:
53
55
  Parameters
54
56
  ----------
55
57
  model : Module
56
- Module class to register
58
+ Module class to register.
57
59
 
58
60
  Returns
59
61
  -------
@@ -1,3 +1,5 @@
1
+ """UNet Pydantic model."""
2
+
1
3
  from __future__ import annotations
2
4
 
3
5
  from typing import Literal
@@ -1,3 +1,5 @@
1
+ """VAE Pydantic model."""
2
+
1
3
  from typing import Literal
2
4
 
3
5
  from pydantic import (
@@ -1,4 +1,4 @@
1
- """Checkpoint saving configuration."""
1
+ """Callback Pydantic models."""
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -13,13 +13,7 @@ from pydantic import (
13
13
 
14
14
 
15
15
  class CheckpointModel(BaseModel):
16
- """_summary_.
17
-
18
- Parameters
19
- ----------
20
- BaseModel : _type_
21
- _description_
22
- """
16
+ """Checkpoint saving callback Pydantic model."""
23
17
 
24
18
  model_config = ConfigDict(
25
19
  validate_assignment=True,
@@ -46,13 +40,7 @@ class CheckpointModel(BaseModel):
46
40
 
47
41
 
48
42
  class EarlyStoppingModel(BaseModel):
49
- """_summary_.
50
-
51
- Parameters
52
- ----------
53
- BaseModel : _type_
54
- _description_
55
- """
43
+ """Early stopping callback Pydantic model."""
56
44
 
57
45
  model_config = ConfigDict(
58
46
  validate_assignment=True,
@@ -1,3 +1,5 @@
1
+ """Example of configurations."""
2
+
1
3
  from .algorithm_model import AlgorithmConfig
2
4
  from .architectures import UNetModel
3
5
  from .configuration_model import Configuration
@@ -19,7 +21,7 @@ from .training_model import TrainingConfig
19
21
 
20
22
 
21
23
  def full_configuration_example() -> Configuration:
22
- """Returns a dictionnary representing a full configuration example.
24
+ """Return a dictionnary representing a full configuration example.
23
25
 
24
26
  Returns
25
27
  -------
@@ -56,7 +58,7 @@ def full_configuration_example() -> Configuration:
56
58
  "name": SupportedTransform.NORMALIZE.value,
57
59
  },
58
60
  {
59
- "name": SupportedTransform.NDFLIP.value,
61
+ "name": SupportedTransform.XY_FLIP.value,
60
62
  },
61
63
  {
62
64
  "name": SupportedTransform.XY_RANDOM_ROTATE90.value,