careamics 0.0.5__py3-none-any.whl → 0.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of careamics might be problematic. Click here for more details.

Files changed (98) hide show
  1. careamics/__init__.py +17 -2
  2. careamics/careamist.py +4 -3
  3. careamics/cli/conf.py +1 -2
  4. careamics/cli/main.py +1 -2
  5. careamics/cli/utils.py +3 -3
  6. careamics/config/__init__.py +47 -25
  7. careamics/config/algorithms/__init__.py +15 -0
  8. careamics/config/algorithms/care_algorithm_model.py +50 -0
  9. careamics/config/algorithms/n2n_algorithm_model.py +42 -0
  10. careamics/config/algorithms/n2v_algorithm_model.py +35 -0
  11. careamics/config/algorithms/unet_algorithm_model.py +88 -0
  12. careamics/config/{vae_algorithm_model.py → algorithms/vae_algorithm_model.py} +14 -12
  13. careamics/config/architectures/__init__.py +1 -11
  14. careamics/config/architectures/architecture_model.py +3 -3
  15. careamics/config/architectures/lvae_model.py +6 -1
  16. careamics/config/architectures/unet_model.py +1 -0
  17. careamics/config/care_configuration.py +100 -0
  18. careamics/config/configuration.py +354 -0
  19. careamics/config/{configuration_factory.py → configuration_factories.py} +103 -36
  20. careamics/config/configuration_io.py +85 -0
  21. careamics/config/data/__init__.py +10 -0
  22. careamics/config/{data_model.py → data/data_model.py} +58 -198
  23. careamics/config/data/n2v_data_model.py +193 -0
  24. careamics/config/likelihood_model.py +1 -2
  25. careamics/config/n2n_configuration.py +101 -0
  26. careamics/config/n2v_configuration.py +266 -0
  27. careamics/config/nm_model.py +1 -2
  28. careamics/config/support/__init__.py +7 -7
  29. careamics/config/support/supported_algorithms.py +0 -3
  30. careamics/config/support/supported_architectures.py +0 -4
  31. careamics/config/transformations/__init__.py +10 -4
  32. careamics/config/transformations/transform_model.py +3 -3
  33. careamics/config/transformations/transform_unions.py +42 -0
  34. careamics/config/validators/validator_utils.py +3 -3
  35. careamics/dataset/__init__.py +2 -2
  36. careamics/dataset/dataset_utils/__init__.py +3 -3
  37. careamics/dataset/dataset_utils/dataset_utils.py +4 -6
  38. careamics/dataset/dataset_utils/file_utils.py +9 -9
  39. careamics/dataset/dataset_utils/iterate_over_files.py +4 -3
  40. careamics/dataset/in_memory_dataset.py +11 -12
  41. careamics/dataset/iterable_dataset.py +4 -4
  42. careamics/dataset/iterable_pred_dataset.py +2 -1
  43. careamics/dataset/iterable_tiled_pred_dataset.py +2 -1
  44. careamics/dataset/patching/random_patching.py +11 -10
  45. careamics/dataset/patching/sequential_patching.py +26 -26
  46. careamics/dataset/patching/validate_patch_dimension.py +3 -3
  47. careamics/dataset/tiling/__init__.py +2 -2
  48. careamics/dataset/tiling/collate_tiles.py +3 -3
  49. careamics/dataset/tiling/lvae_tiled_patching.py +2 -1
  50. careamics/dataset/tiling/tiled_patching.py +11 -10
  51. careamics/file_io/__init__.py +5 -5
  52. careamics/file_io/read/__init__.py +1 -1
  53. careamics/file_io/read/get_func.py +2 -2
  54. careamics/file_io/write/__init__.py +2 -2
  55. careamics/lightning/__init__.py +5 -5
  56. careamics/lightning/callbacks/__init__.py +1 -1
  57. careamics/lightning/callbacks/prediction_writer_callback/__init__.py +3 -3
  58. careamics/lightning/callbacks/prediction_writer_callback/prediction_writer_callback.py +2 -1
  59. careamics/lightning/callbacks/prediction_writer_callback/write_strategy.py +2 -1
  60. careamics/lightning/callbacks/progress_bar_callback.py +2 -2
  61. careamics/lightning/lightning_module.py +11 -7
  62. careamics/lightning/train_data_module.py +26 -26
  63. careamics/losses/__init__.py +3 -3
  64. careamics/model_io/__init__.py +1 -1
  65. careamics/model_io/bioimage/__init__.py +1 -1
  66. careamics/model_io/bioimage/_readme_factory.py +1 -1
  67. careamics/model_io/bioimage/model_description.py +17 -17
  68. careamics/model_io/bmz_io.py +6 -17
  69. careamics/model_io/model_io_utils.py +9 -9
  70. careamics/models/layers.py +16 -16
  71. careamics/models/lvae/lvae.py +0 -3
  72. careamics/models/model_factory.py +2 -15
  73. careamics/models/unet.py +8 -8
  74. careamics/prediction_utils/__init__.py +1 -1
  75. careamics/prediction_utils/prediction_outputs.py +15 -15
  76. careamics/prediction_utils/stitch_prediction.py +6 -6
  77. careamics/transforms/__init__.py +5 -5
  78. careamics/transforms/compose.py +13 -13
  79. careamics/transforms/n2v_manipulate.py +3 -3
  80. careamics/transforms/pixel_manipulation.py +9 -9
  81. careamics/transforms/xy_random_rotate90.py +4 -4
  82. careamics/utils/__init__.py +5 -5
  83. careamics/utils/context.py +2 -1
  84. careamics/utils/logging.py +11 -10
  85. careamics/utils/torch_utils.py +7 -7
  86. {careamics-0.0.5.dist-info → careamics-0.0.6.dist-info}/METADATA +11 -11
  87. {careamics-0.0.5.dist-info → careamics-0.0.6.dist-info}/RECORD +90 -85
  88. careamics/config/architectures/custom_model.py +0 -162
  89. careamics/config/architectures/register_model.py +0 -103
  90. careamics/config/configuration_model.py +0 -603
  91. careamics/config/fcn_algorithm_model.py +0 -152
  92. careamics/config/references/__init__.py +0 -45
  93. careamics/config/references/algorithm_descriptions.py +0 -132
  94. careamics/config/references/references.py +0 -39
  95. careamics/config/transformations/transform_union.py +0 -20
  96. {careamics-0.0.5.dist-info → careamics-0.0.6.dist-info}/WHEEL +0 -0
  97. {careamics-0.0.5.dist-info → careamics-0.0.6.dist-info}/entry_points.txt +0 -0
  98. {careamics-0.0.5.dist-info → careamics-0.0.6.dist-info}/licenses/LICENSE +0 -0
@@ -1,6 +1,6 @@
1
1
  """Module containing functions to convert prediction outputs to desired form."""
2
2
 
3
- from typing import Any, List, Literal, Tuple, Union, overload
3
+ from typing import Any, Literal, Union, overload
4
4
 
5
5
  import numpy as np
6
6
  from numpy.typing import NDArray
@@ -9,7 +9,7 @@ from ..config.tile_information import TileInformation
9
9
  from .stitch_prediction import stitch_prediction
10
10
 
11
11
 
12
- def convert_outputs(predictions: List[Any], tiled: bool) -> list[NDArray]:
12
+ def convert_outputs(predictions: list[Any], tiled: bool) -> list[NDArray]:
13
13
  """
14
14
  Convert the Lightning trainer outputs to the desired form.
15
15
 
@@ -25,7 +25,7 @@ def convert_outputs(predictions: List[Any], tiled: bool) -> list[NDArray]:
25
25
  Returns
26
26
  -------
27
27
  list of numpy.ndarray or numpy.ndarray
28
- List of arrays with the axes SC(Z)YX. If there is only 1 output it will not
28
+ list of arrays with the axes SC(Z)YX. If there is only 1 output it will not
29
29
  be in a list.
30
30
  """
31
31
  if len(predictions) == 0:
@@ -44,27 +44,27 @@ def convert_outputs(predictions: List[Any], tiled: bool) -> list[NDArray]:
44
44
  # for mypy
45
45
  @overload
46
46
  def combine_batches( # numpydoc ignore=GL08
47
- predictions: List[Any], tiled: Literal[True]
48
- ) -> Tuple[List[NDArray], List[TileInformation]]: ...
47
+ predictions: list[Any], tiled: Literal[True]
48
+ ) -> tuple[list[NDArray], list[TileInformation]]: ...
49
49
 
50
50
 
51
51
  # for mypy
52
52
  @overload
53
53
  def combine_batches( # numpydoc ignore=GL08
54
- predictions: List[Any], tiled: Literal[False]
55
- ) -> List[NDArray]: ...
54
+ predictions: list[Any], tiled: Literal[False]
55
+ ) -> list[NDArray]: ...
56
56
 
57
57
 
58
58
  # for mypy
59
59
  @overload
60
60
  def combine_batches( # numpydoc ignore=GL08
61
- predictions: List[Any], tiled: Union[bool, Literal[True], Literal[False]]
62
- ) -> Union[List[NDArray], Tuple[List[NDArray], List[TileInformation]]]: ...
61
+ predictions: list[Any], tiled: Union[bool, Literal[True], Literal[False]]
62
+ ) -> Union[list[NDArray], tuple[list[NDArray], list[TileInformation]]]: ...
63
63
 
64
64
 
65
65
  def combine_batches(
66
- predictions: List[Any], tiled: bool
67
- ) -> Union[List[NDArray], Tuple[List[NDArray], List[TileInformation]]]:
66
+ predictions: list[Any], tiled: bool
67
+ ) -> Union[list[NDArray], tuple[list[NDArray], list[TileInformation]]]:
68
68
  """
69
69
  If predictions are in batches, they will be combined.
70
70
 
@@ -87,8 +87,8 @@ def combine_batches(
87
87
 
88
88
 
89
89
  def _combine_tiled_batches(
90
- predictions: List[Tuple[NDArray, List[TileInformation]]]
91
- ) -> Tuple[List[NDArray], List[TileInformation]]:
90
+ predictions: list[tuple[NDArray, list[TileInformation]]]
91
+ ) -> tuple[list[NDArray], list[TileInformation]]:
92
92
  """
93
93
  Combine batches from tiled output.
94
94
 
@@ -109,13 +109,13 @@ def _combine_tiled_batches(
109
109
  tile_infos = [
110
110
  tile_info for _, tile_info_list in predictions for tile_info in tile_info_list
111
111
  ]
112
- prediction_tiles: List[NDArray] = _combine_array_batches(
112
+ prediction_tiles: list[NDArray] = _combine_array_batches(
113
113
  [preds for preds, _ in predictions]
114
114
  )
115
115
  return prediction_tiles, tile_infos
116
116
 
117
117
 
118
- def _combine_array_batches(predictions: List[NDArray]) -> List[NDArray]:
118
+ def _combine_array_batches(predictions: list[NDArray]) -> list[NDArray]:
119
119
  """
120
120
  Combine batches of arrays.
121
121
 
@@ -1,7 +1,7 @@
1
1
  """Prediction utility functions."""
2
2
 
3
3
  import builtins
4
- from typing import List, Union
4
+ from typing import Union
5
5
 
6
6
  import numpy as np
7
7
  from numpy.typing import NDArray
@@ -11,9 +11,9 @@ from careamics.config.tile_information import TileInformation
11
11
 
12
12
  # TODO: why not allow input and output of torch.tensor ?
13
13
  def stitch_prediction(
14
- tiles: List[np.ndarray],
15
- tile_infos: List[TileInformation],
16
- ) -> List[np.ndarray]:
14
+ tiles: list[np.ndarray],
15
+ tile_infos: list[TileInformation],
16
+ ) -> list[np.ndarray]:
17
17
  """
18
18
  Stitch tiles back together to form a full image(s).
19
19
 
@@ -54,8 +54,8 @@ def stitch_prediction(
54
54
 
55
55
 
56
56
  def stitch_prediction_single(
57
- tiles: List[NDArray],
58
- tile_infos: List[TileInformation],
57
+ tiles: list[NDArray],
58
+ tile_infos: list[TileInformation],
59
59
  ) -> NDArray:
60
60
  """
61
61
  Stitch tiles back together to form a full image.
@@ -1,14 +1,14 @@
1
1
  """Transforms that are used to augment the data."""
2
2
 
3
3
  __all__ = [
4
- "get_all_transforms",
4
+ "Compose",
5
+ "Denormalize",
6
+ "ImageRestorationTTA",
5
7
  "N2VManipulate",
8
+ "Normalize",
6
9
  "XYFlip",
7
10
  "XYRandomRotate90",
8
- "ImageRestorationTTA",
9
- "Denormalize",
10
- "Normalize",
11
- "Compose",
11
+ "get_all_transforms",
12
12
  ]
13
13
 
14
14
 
@@ -1,10 +1,10 @@
1
1
  """A class chaining transforms together."""
2
2
 
3
- from typing import Dict, List, Optional, Tuple, Union, cast
3
+ from typing import Optional, Union, cast
4
4
 
5
5
  from numpy.typing import NDArray
6
6
 
7
- from careamics.config.transformations import TransformModel
7
+ from careamics.config.transformations import NORM_AND_SPATIAL_UNION
8
8
 
9
9
  from .n2v_manipulate import N2VManipulate
10
10
  from .normalize import Normalize
@@ -20,7 +20,7 @@ ALL_TRANSFORMS = {
20
20
  }
21
21
 
22
22
 
23
- def get_all_transforms() -> Dict[str, type]:
23
+ def get_all_transforms() -> dict[str, type]:
24
24
  """Return all the transforms accepted by CAREamics.
25
25
 
26
26
  Returns
@@ -37,7 +37,7 @@ class Compose:
37
37
 
38
38
  Parameters
39
39
  ----------
40
- transform_list : List[TransformModel]
40
+ transform_list : list[TransformModel]
41
41
  A list of dictionaries where each dictionary contains the name of a
42
42
  transform and its parameters.
43
43
 
@@ -47,12 +47,12 @@ class Compose:
47
47
  A callable that applies the transforms to the input data.
48
48
  """
49
49
 
50
- def __init__(self, transform_list: List[TransformModel]) -> None:
50
+ def __init__(self, transform_list: list[NORM_AND_SPATIAL_UNION]) -> None:
51
51
  """Instantiate a Compose object.
52
52
 
53
53
  Parameters
54
54
  ----------
55
- transform_list : List[TransformModel]
55
+ transform_list : list[NORM_AND_SPATIAL_UNION]
56
56
  A list of dictionaries where each dictionary contains the name of a
57
57
  transform and its parameters.
58
58
  """
@@ -67,7 +67,7 @@ class Compose:
67
67
 
68
68
  def _chain_transforms(
69
69
  self, patch: NDArray, target: Optional[NDArray]
70
- ) -> Tuple[Optional[NDArray], ...]:
70
+ ) -> tuple[Optional[NDArray], ...]:
71
71
  """Chain transforms on the input data.
72
72
 
73
73
  Parameters
@@ -79,7 +79,7 @@ class Compose:
79
79
 
80
80
  Returns
81
81
  -------
82
- Tuple[np.ndarray, Optional[np.ndarray]]
82
+ tuple[np.ndarray, Optional[np.ndarray]]
83
83
  The output of the transformations.
84
84
  """
85
85
  params: Union[
@@ -103,7 +103,7 @@ class Compose:
103
103
  patch: NDArray,
104
104
  target: Optional[NDArray],
105
105
  **additional_arrays: NDArray,
106
- ) -> Tuple[NDArray, Optional[NDArray], dict[str, NDArray]]:
106
+ ) -> tuple[NDArray, Optional[NDArray], dict[str, NDArray]]:
107
107
  """Chain transforms on the input data, with additional arrays.
108
108
 
109
109
  Parameters
@@ -118,7 +118,7 @@ class Compose:
118
118
 
119
119
  Returns
120
120
  -------
121
- Tuple[np.ndarray, Optional[np.ndarray]]
121
+ tuple[np.ndarray, Optional[np.ndarray]]
122
122
  The output of the transformations.
123
123
  """
124
124
  params = {"patch": patch, "target": target, **additional_arrays}
@@ -131,7 +131,7 @@ class Compose:
131
131
 
132
132
  def __call__(
133
133
  self, patch: NDArray, target: Optional[NDArray] = None
134
- ) -> Tuple[NDArray, ...]:
134
+ ) -> tuple[NDArray, ...]:
135
135
  """Apply the transforms to the input data.
136
136
 
137
137
  Parameters
@@ -143,11 +143,11 @@ class Compose:
143
143
 
144
144
  Returns
145
145
  -------
146
- Tuple[np.ndarray, ...]
146
+ tuple[np.ndarray, ...]
147
147
  The output of the transformations.
148
148
  """
149
149
  # TODO: solve casting Compose.__call__ ouput
150
- return cast(Tuple[NDArray, ...], self._chain_transforms(patch, target))
150
+ return cast(tuple[NDArray, ...], self._chain_transforms(patch, target))
151
151
 
152
152
  def transform_with_additional_arrays(
153
153
  self,
@@ -1,6 +1,6 @@
1
1
  """N2V manipulation transform."""
2
2
 
3
- from typing import Any, Literal, Optional, Tuple
3
+ from typing import Any, Literal, Optional
4
4
 
5
5
  import numpy as np
6
6
  from numpy.typing import NDArray
@@ -100,7 +100,7 @@ class N2VManipulate(Transform):
100
100
 
101
101
  def __call__(
102
102
  self, patch: NDArray, *args: Any, **kwargs: Any
103
- ) -> Tuple[NDArray, NDArray, NDArray]:
103
+ ) -> tuple[NDArray, NDArray, NDArray]:
104
104
  """Apply the transform to the image.
105
105
 
106
106
  Parameters
@@ -114,7 +114,7 @@ class N2VManipulate(Transform):
114
114
 
115
115
  Returns
116
116
  -------
117
- Tuple[np.ndarray, np.ndarray, np.ndarray]
117
+ tuple[np.ndarray, np.ndarray, np.ndarray]
118
118
  Masked patch, original patch, and mask.
119
119
  """
120
120
  masked = np.zeros_like(patch)
@@ -5,7 +5,7 @@ Pixel manipulation is used in N2V and similar algorithm to replace the value of
5
5
  masked pixels.
6
6
  """
7
7
 
8
- from typing import Optional, Tuple
8
+ from typing import Optional
9
9
 
10
10
  import numpy as np
11
11
 
@@ -107,7 +107,7 @@ def _odd_jitter_func(step: float, rng: np.random.Generator) -> np.ndarray:
107
107
 
108
108
  def _get_stratified_coords(
109
109
  mask_pixel_perc: float,
110
- shape: Tuple[int, ...],
110
+ shape: tuple[int, ...],
111
111
  rng: Optional[np.random.Generator] = None,
112
112
  ) -> np.ndarray:
113
113
  """
@@ -121,7 +121,7 @@ def _get_stratified_coords(
121
121
  mask_pixel_perc : float
122
122
  Actual (quasi) percentage of masked pixels across the whole image. Used in
123
123
  calculating the distance between masked pixels across each axis.
124
- shape : Tuple[int, ...]
124
+ shape : tuple[int, ...]
125
125
  Shape of the input patch.
126
126
  rng : np.random.Generator or None
127
127
  Random number generator.
@@ -242,7 +242,7 @@ def uniform_manipulate(
242
242
  remove_center: bool = True,
243
243
  struct_params: Optional[StructMaskParameters] = None,
244
244
  rng: Optional[np.random.Generator] = None,
245
- ) -> Tuple[np.ndarray, np.ndarray]:
245
+ ) -> tuple[np.ndarray, np.ndarray]:
246
246
  """
247
247
  Manipulate pixels by replacing them with a neighbor values.
248
248
 
@@ -269,8 +269,8 @@ def uniform_manipulate(
269
269
 
270
270
  Returns
271
271
  -------
272
- Tuple[np.ndarray]
273
- Tuple containing the manipulated patch and the corresponding mask.
272
+ tuple[np.ndarray]
273
+ tuple containing the manipulated patch and the corresponding mask.
274
274
  """
275
275
  if rng is None:
276
276
  rng = np.random.default_rng()
@@ -322,7 +322,7 @@ def median_manipulate(
322
322
  subpatch_size: int = 11,
323
323
  struct_params: Optional[StructMaskParameters] = None,
324
324
  rng: Optional[np.random.Generator] = None,
325
- ) -> Tuple[np.ndarray, np.ndarray]:
325
+ ) -> tuple[np.ndarray, np.ndarray]:
326
326
  """
327
327
  Manipulate pixels by replacing them with the median of their surrounding subpatch.
328
328
 
@@ -348,8 +348,8 @@ def median_manipulate(
348
348
 
349
349
  Returns
350
350
  -------
351
- Tuple[np.ndarray]
352
- Tuple containing the manipulated patch, the original patch and the mask.
351
+ tuple[np.ndarray]
352
+ tuple containing the manipulated patch, the original patch and the mask.
353
353
  """
354
354
  if rng is None:
355
355
  rng = np.random.default_rng()
@@ -1,6 +1,6 @@
1
1
  """Patch transform applying XY random 90 degrees rotations."""
2
2
 
3
- from typing import Optional, Tuple
3
+ from typing import Optional
4
4
 
5
5
  import numpy as np
6
6
  from numpy.typing import NDArray
@@ -69,7 +69,7 @@ class XYRandomRotate90(Transform):
69
69
 
70
70
  Returns
71
71
  -------
72
- Tuple[np.ndarray, Optional[np.ndarray]]
72
+ tuple[np.ndarray, Optional[np.ndarray]]
73
73
  Transformed patch and target.
74
74
  """
75
75
  if self.rng.random() > self.p:
@@ -90,7 +90,7 @@ class XYRandomRotate90(Transform):
90
90
 
91
91
  return patch_transformed, target_transformed, additional_transformed
92
92
 
93
- def _apply(self, patch: NDArray, n_rot: int, axes: Tuple[int, int]) -> NDArray:
93
+ def _apply(self, patch: NDArray, n_rot: int, axes: tuple[int, int]) -> NDArray:
94
94
  """Apply the transform to the image.
95
95
 
96
96
  Parameters
@@ -99,7 +99,7 @@ class XYRandomRotate90(Transform):
99
99
  Image or image patch, 2D or 3D, shape C(Z)YX.
100
100
  n_rot : int
101
101
  Number of 90 degree rotations.
102
- axes : Tuple[int, int]
102
+ axes : tuple[int, int]
103
103
  Axes along which to rotate the patch.
104
104
 
105
105
  Returns
@@ -1,13 +1,13 @@
1
1
  """Utils module."""
2
2
 
3
3
  __all__ = [
4
- "cwd",
5
- "get_ram_size",
6
- "check_path_exists",
7
4
  "BaseEnum",
8
- "get_logger",
9
- "get_careamics_home",
10
5
  "autocorrelation",
6
+ "check_path_exists",
7
+ "cwd",
8
+ "get_careamics_home",
9
+ "get_logger",
10
+ "get_ram_size",
11
11
  ]
12
12
 
13
13
 
@@ -5,9 +5,10 @@ A convenience function to change the working directory in order to save data.
5
5
  """
6
6
 
7
7
  import os
8
+ from collections.abc import Iterator
8
9
  from contextlib import contextmanager
9
10
  from pathlib import Path
10
- from typing import Iterator, Union
11
+ from typing import Union
11
12
 
12
13
 
13
14
  def get_careamics_home() -> Path:
@@ -7,8 +7,9 @@ The methods are responsible for the in-console logger.
7
7
  import logging
8
8
  import sys
9
9
  import time
10
+ from collections.abc import Generator
10
11
  from pathlib import Path
11
- from typing import Any, Dict, Generator, List, Optional, Union
12
+ from typing import Any, Optional, Union
12
13
 
13
14
  LOGGERS: dict = {}
14
15
 
@@ -84,7 +85,7 @@ class ProgressBar:
84
85
  Zero-indexed current epoch, by default None.
85
86
  num_epochs : Optional[int], optional
86
87
  Total number of epochs, by default None.
87
- stateful_metrics : Optional[List], optional
88
+ stateful_metrics : Optional[list], optional
88
89
  Iterable of string names of metrics that should *not* be averaged over time.
89
90
  Metrics in this list will be displayed as-is. All others will be averaged by
90
91
  the progress bar before display, by default None.
@@ -99,7 +100,7 @@ class ProgressBar:
99
100
  max_value: Optional[int] = None,
100
101
  epoch: Optional[int] = None,
101
102
  num_epochs: Optional[int] = None,
102
- stateful_metrics: Optional[List] = None,
103
+ stateful_metrics: Optional[list] = None,
103
104
  always_stateful: bool = False,
104
105
  mode: str = "train",
105
106
  ) -> None:
@@ -114,7 +115,7 @@ class ProgressBar:
114
115
  Zero-indexed current epoch, by default None.
115
116
  num_epochs : Optional[int], optional
116
117
  Total number of epochs, by default None.
117
- stateful_metrics : Optional[List], optional
118
+ stateful_metrics : Optional[list], optional
118
119
  Iterable of string names of metrics that should *not* be averaged over time.
119
120
  Metrics in this list will be displayed as-is. All others will be averaged by
120
121
  the progress bar before display, by default None.
@@ -145,8 +146,8 @@ class ProgressBar:
145
146
  self._seen_so_far = 0
146
147
  # We use a dict + list to avoid garbage collection
147
148
  # issues found in OrderedDict
148
- self._values: Dict[Any, Any] = {}
149
- self._values_order: List[Any] = []
149
+ self._values: dict[Any, Any] = {}
150
+ self._values_order: list[Any] = []
150
151
  self._start = time.time()
151
152
  self._last_update = 0.0
152
153
  self.spin = self.spinning_cursor() if self.max_value is None else None
@@ -158,7 +159,7 @@ class ProgressBar:
158
159
  self.message = "Denoising"
159
160
 
160
161
  def update(
161
- self, current_step: int, batch_size: int = 1, values: Optional[List] = None
162
+ self, current_step: int, batch_size: int = 1, values: Optional[list] = None
162
163
  ) -> None:
163
164
  """
164
165
  Update the progress bar.
@@ -169,7 +170,7 @@ class ProgressBar:
169
170
  Index of the current step.
170
171
  batch_size : int, optional
171
172
  Batch size, by default 1.
172
- values : Optional[List], optional
173
+ values : Optional[list], optional
173
174
  Updated metrics values, by default None.
174
175
  """
175
176
  values = values or []
@@ -263,7 +264,7 @@ class ProgressBar:
263
264
 
264
265
  self._last_update = now
265
266
 
266
- def add(self, n: int, values: Optional[List] = None) -> None:
267
+ def add(self, n: int, values: Optional[list] = None) -> None:
267
268
  """
268
269
  Update the progress bar by n steps.
269
270
 
@@ -271,7 +272,7 @@ class ProgressBar:
271
272
  ----------
272
273
  n : int
273
274
  Number of steps to increase the progress bar with.
274
- values : Optional[List], optional
275
+ values : Optional[list], optional
275
276
  Updated metrics values, by default None.
276
277
  """
277
278
  self.update(self._seen_so_far + n, 1, values=values)
@@ -5,7 +5,7 @@ These functions are used to control certain aspects and behaviours of PyTorch.
5
5
  """
6
6
 
7
7
  import inspect
8
- from typing import Dict, Union
8
+ from typing import Union
9
9
 
10
10
  import torch
11
11
 
@@ -27,12 +27,12 @@ def filter_parameters(
27
27
  ----------
28
28
  func : type
29
29
  Class object.
30
- user_params : Dict
30
+ user_params : dict
31
31
  User provided parameters.
32
32
 
33
33
  Returns
34
34
  -------
35
- Dict
35
+ dict
36
36
  Parameters matching `func`'s signature.
37
37
  """
38
38
  # Get the list of all default parameters
@@ -64,13 +64,13 @@ def get_optimizer(name: str) -> torch.optim.Optimizer:
64
64
  return getattr(torch.optim, name)
65
65
 
66
66
 
67
- def get_optimizers() -> Dict[str, str]:
67
+ def get_optimizers() -> dict[str, str]:
68
68
  """
69
69
  Return the list of all optimizers available in torch.optim.
70
70
 
71
71
  Returns
72
72
  -------
73
- Dict
73
+ dict
74
74
  Optimizers available in torch.optim.
75
75
  """
76
76
  optims = {}
@@ -106,13 +106,13 @@ def get_scheduler(
106
106
  return getattr(torch.optim.lr_scheduler, name)
107
107
 
108
108
 
109
- def get_schedulers() -> Dict[str, str]:
109
+ def get_schedulers() -> dict[str, str]:
110
110
  """
111
111
  Return the list of all schedulers available in torch.optim.lr_scheduler.
112
112
 
113
113
  Returns
114
114
  -------
115
- Dict
115
+ dict
116
116
  Schedulers available in torch.optim.lr_scheduler.
117
117
  """
118
118
  schedulers = {}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: careamics
3
- Version: 0.0.5
3
+ Version: 0.0.6
4
4
  Summary: Toolbox for running N2V and friends.
5
5
  Project-URL: homepage, https://careamics.github.io/
6
6
  Project-URL: repository, https://github.com/CAREamics/careamics
@@ -18,16 +18,16 @@ Classifier: Typing :: Typed
18
18
  Requires-Python: >=3.9
19
19
  Requires-Dist: bioimageio-core==0.7
20
20
  Requires-Dist: numpy<2.0.0
21
- Requires-Dist: pillow<=10.3.0
22
- Requires-Dist: psutil<=6.1
23
- Requires-Dist: pydantic<2.9,>=2.5
24
- Requires-Dist: pytorch-lightning<=2.4,>=2.2
21
+ Requires-Dist: pillow<=11.1.0
22
+ Requires-Dist: psutil<=6.1.1
23
+ Requires-Dist: pydantic<2.11,>=2.5
24
+ Requires-Dist: pytorch-lightning<=2.5.0.post0,>=2.2
25
25
  Requires-Dist: pyyaml!=6.0.0,<=6.0.2
26
- Requires-Dist: scikit-image<=0.23.2
27
- Requires-Dist: tifffile<=2024.8.30
28
- Requires-Dist: torch<=2.5,>=2.0
29
- Requires-Dist: torchvision<=0.20
30
- Requires-Dist: typer==0.12.3
26
+ Requires-Dist: scikit-image<=0.25.0
27
+ Requires-Dist: tifffile<=2025.1.10
28
+ Requires-Dist: torch<=2.5.1,>=2.0
29
+ Requires-Dist: torchvision<=0.20.1
30
+ Requires-Dist: typer<=0.15.1,>=0.12.3
31
31
  Requires-Dist: zarr<3.0.0
32
32
  Provides-Extra: dev
33
33
  Requires-Dist: onnx; extra == 'dev'
@@ -40,7 +40,7 @@ Requires-Dist: careamics-portfolio; extra == 'examples'
40
40
  Requires-Dist: jupyter; extra == 'examples'
41
41
  Requires-Dist: matplotlib; extra == 'examples'
42
42
  Provides-Extra: tensorboard
43
- Requires-Dist: protobuf==3.20.3; extra == 'tensorboard'
43
+ Requires-Dist: protobuf==5.29.1; extra == 'tensorboard'
44
44
  Requires-Dist: tensorboard; extra == 'tensorboard'
45
45
  Provides-Extra: wandb
46
46
  Requires-Dist: wandb; extra == 'wandb'