careamics 0.0.15__py3-none-any.whl → 0.0.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of careamics might be problematic. Click here for more details.

Files changed (79) hide show
  1. careamics/careamist.py +11 -14
  2. careamics/cli/conf.py +18 -3
  3. careamics/config/__init__.py +8 -0
  4. careamics/config/algorithms/__init__.py +4 -0
  5. careamics/config/algorithms/hdn_algorithm_model.py +103 -0
  6. careamics/config/algorithms/microsplit_algorithm_model.py +103 -0
  7. careamics/config/algorithms/n2v_algorithm_model.py +1 -2
  8. careamics/config/algorithms/vae_algorithm_model.py +51 -16
  9. careamics/config/architectures/lvae_model.py +12 -8
  10. careamics/config/callback_model.py +7 -3
  11. careamics/config/configuration.py +15 -63
  12. careamics/config/configuration_factories.py +853 -29
  13. careamics/config/data/data_model.py +50 -11
  14. careamics/config/data/ng_data_model.py +168 -4
  15. careamics/config/data/patch_filter/__init__.py +15 -0
  16. careamics/config/data/patch_filter/filter_model.py +16 -0
  17. careamics/config/data/patch_filter/mask_filter_model.py +17 -0
  18. careamics/config/data/patch_filter/max_filter_model.py +15 -0
  19. careamics/config/data/patch_filter/meanstd_filter_model.py +18 -0
  20. careamics/config/data/patch_filter/shannon_filter_model.py +15 -0
  21. careamics/config/inference_model.py +1 -2
  22. careamics/config/likelihood_model.py +2 -2
  23. careamics/config/loss_model.py +6 -2
  24. careamics/config/nm_model.py +26 -1
  25. careamics/config/optimizer_models.py +1 -2
  26. careamics/config/support/supported_algorithms.py +5 -3
  27. careamics/config/support/supported_filters.py +17 -0
  28. careamics/config/support/supported_losses.py +5 -2
  29. careamics/config/training_model.py +6 -36
  30. careamics/config/transformations/normalize_model.py +1 -2
  31. careamics/dataset_ng/dataset.py +57 -5
  32. careamics/dataset_ng/factory.py +101 -18
  33. careamics/dataset_ng/patch_extractor/demo_custom_image_stack_loader.py +4 -4
  34. careamics/dataset_ng/patch_extractor/image_stack/in_memory_image_stack.py +1 -2
  35. careamics/dataset_ng/patch_extractor/image_stack/zarr_image_stack.py +33 -7
  36. careamics/dataset_ng/patch_extractor/image_stack_loader.py +2 -2
  37. careamics/dataset_ng/patch_filter/__init__.py +20 -0
  38. careamics/dataset_ng/patch_filter/coordinate_filter_protocol.py +27 -0
  39. careamics/dataset_ng/patch_filter/filter_factory.py +94 -0
  40. careamics/dataset_ng/patch_filter/mask_filter.py +95 -0
  41. careamics/dataset_ng/patch_filter/max_filter.py +188 -0
  42. careamics/dataset_ng/patch_filter/mean_std_filter.py +218 -0
  43. careamics/dataset_ng/patch_filter/patch_filter_protocol.py +50 -0
  44. careamics/dataset_ng/patch_filter/shannon_filter.py +188 -0
  45. careamics/file_io/read/__init__.py +0 -1
  46. careamics/lightning/__init__.py +16 -2
  47. careamics/lightning/callbacks/__init__.py +2 -0
  48. careamics/lightning/callbacks/data_stats_callback.py +33 -0
  49. careamics/lightning/dataset_ng/data_module.py +79 -2
  50. careamics/lightning/lightning_module.py +162 -61
  51. careamics/lightning/microsplit_data_module.py +636 -0
  52. careamics/lightning/predict_data_module.py +8 -1
  53. careamics/lightning/train_data_module.py +19 -8
  54. careamics/losses/__init__.py +7 -1
  55. careamics/losses/loss_factory.py +9 -1
  56. careamics/losses/lvae/losses.py +85 -0
  57. careamics/lvae_training/dataset/__init__.py +8 -8
  58. careamics/lvae_training/dataset/config.py +56 -44
  59. careamics/lvae_training/dataset/lc_dataset.py +18 -12
  60. careamics/lvae_training/dataset/ms_dataset_ref.py +5 -5
  61. careamics/lvae_training/dataset/multich_dataset.py +24 -18
  62. careamics/lvae_training/dataset/multifile_dataset.py +6 -6
  63. careamics/lvae_training/eval_utils.py +46 -24
  64. careamics/model_io/bmz_io.py +9 -5
  65. careamics/models/lvae/likelihoods.py +31 -14
  66. careamics/models/lvae/lvae.py +2 -2
  67. careamics/models/lvae/noise_models.py +20 -14
  68. careamics/prediction_utils/__init__.py +8 -2
  69. careamics/prediction_utils/prediction_outputs.py +49 -3
  70. careamics/prediction_utils/stitch_prediction.py +83 -1
  71. careamics/transforms/xy_random_rotate90.py +1 -1
  72. careamics/utils/version.py +4 -4
  73. {careamics-0.0.15.dist-info → careamics-0.0.17.dist-info}/METADATA +19 -22
  74. {careamics-0.0.15.dist-info → careamics-0.0.17.dist-info}/RECORD +77 -60
  75. careamics/dataset/zarr_dataset.py +0 -151
  76. careamics/file_io/read/zarr.py +0 -60
  77. {careamics-0.0.15.dist-info → careamics-0.0.17.dist-info}/WHEEL +0 -0
  78. {careamics-0.0.15.dist-info → careamics-0.0.17.dist-info}/entry_points.txt +0 -0
  79. {careamics-0.0.15.dist-info → careamics-0.0.17.dist-info}/licenses/LICENSE +0 -0
@@ -3,26 +3,28 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import re
6
- from collections.abc import Callable
7
6
  from pprint import pformat
8
- from typing import Any, Literal, Union
7
+ from typing import Any, Literal, Self, Union
9
8
 
10
9
  import numpy as np
11
10
  from bioimageio.spec.generic.v0_3 import CiteEntry
12
11
  from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator
13
- from pydantic.main import IncEx
14
- from typing_extensions import Self
15
12
 
16
13
  from careamics.config.algorithms import (
17
14
  CAREAlgorithm,
15
+ HDNAlgorithm,
16
+ MicroSplitAlgorithm,
18
17
  N2NAlgorithm,
19
18
  N2VAlgorithm,
20
19
  )
21
20
  from careamics.config.data import DataConfig
22
21
  from careamics.config.training_model import TrainingConfig
22
+ from careamics.lvae_training.dataset.config import MicroSplitDataConfig
23
23
 
24
24
  ALGORITHMS = Union[
25
25
  CAREAlgorithm,
26
+ HDNAlgorithm,
27
+ MicroSplitAlgorithm,
26
28
  N2NAlgorithm,
27
29
  N2VAlgorithm,
28
30
  ]
@@ -86,7 +88,6 @@ class Configuration(BaseModel):
86
88
  ... axes="YX",
87
89
  ... patch_size=[64, 64],
88
90
  ... batch_size=32,
89
- ... num_epochs=100
90
91
  ... )
91
92
 
92
93
  The configuration can be exported to a dictionary using the model_dump method:
@@ -110,9 +111,7 @@ class Configuration(BaseModel):
110
111
  ... "architecture": "UNet",
111
112
  ... },
112
113
  ... },
113
- ... "training_config": {
114
- ... "num_epochs": 200,
115
- ... },
114
+ ... "training_config": {},
116
115
  ... "data_config": {
117
116
  ... "data_type": "tiff",
118
117
  ... "patch_size": [64, 64],
@@ -140,7 +139,7 @@ class Configuration(BaseModel):
140
139
  """Algorithm configuration, holding all parameters required to configure the
141
140
  model."""
142
141
 
143
- data_config: DataConfig
142
+ data_config: DataConfig | MicroSplitDataConfig
144
143
  """Data configuration, holding all parameters required to configure the training
145
144
  data loader."""
146
145
 
@@ -185,7 +184,7 @@ class Configuration(BaseModel):
185
184
 
186
185
  return name
187
186
 
188
- @model_validator(mode="after")
187
+ @model_validator(mode="after") # TODO move to n2v configs or remove
189
188
  def validate_n2v_mask_pixel_perc(self: Self) -> Self:
190
189
  """
191
190
  Validate that there will always be at least one blind-spot pixel in every patch.
@@ -342,19 +341,7 @@ class Configuration(BaseModel):
342
341
 
343
342
  def model_dump(
344
343
  self,
345
- *,
346
- mode: Literal["json", "python"] | str = "python",
347
- include: IncEx | None = None,
348
- exclude: IncEx | None = None,
349
- context: Any | None = None,
350
- by_alias: bool | None = False,
351
- exclude_unset: bool = False,
352
- exclude_defaults: bool = False,
353
- exclude_none: bool = True,
354
- round_trip: bool = False,
355
- warnings: bool | Literal["none", "warn", "error"] = True,
356
- fallback: Callable[[Any], Any] | None = None,
357
- serialize_as_any: bool = False,
344
+ **kwargs: Any,
358
345
  ) -> dict[str, Any]:
359
346
  """
360
347
  Override model_dump method in order to set default values.
@@ -364,50 +351,15 @@ class Configuration(BaseModel):
364
351
 
365
352
  Parameters
366
353
  ----------
367
- mode : Literal['json', 'python'] | str, default='python'
368
- The serialization format.
369
- include : Any | None, default=None
370
- Attributes to include.
371
- exclude : Any | None, default=None
372
- Attributes to exclude.
373
- context : Any | None, default=None
374
- Additional context to pass to the serialization functions.
375
- by_alias : bool, default=False
376
- Whether to use attribute aliases.
377
- exclude_unset : bool, default=False
378
- Whether to exclude fields that are not set.
379
- exclude_defaults : bool, default=False
380
- Whether to exclude fields that have default values.
381
- exclude_none : bool, default=true
382
- Whether to exclude fields that have None values.
383
- round_trip : bool, default=False
384
- Whether to dump and load the data to ensure that the output is a valid
385
- representation.
386
- warnings : bool | Literal['none', 'warn', 'error'], default=True
387
- Whether to emit warnings.
388
- fallback : Callable[[Any], Any] | None, default=None
389
- A function to call when an unknown value is encountered.
390
- serialize_as_any : bool, default=False
391
- Whether to serialize all types as Any.
354
+ **kwargs : Any
355
+ Additional arguments to pass to the parent model_dump method.
392
356
 
393
357
  Returns
394
358
  -------
395
359
  dict
396
360
  Dictionary containing the model parameters.
397
361
  """
398
- dictionary = super().model_dump(
399
- mode=mode,
400
- include=include,
401
- exclude=exclude,
402
- context=context,
403
- by_alias=by_alias,
404
- exclude_unset=exclude_unset,
405
- exclude_defaults=exclude_defaults,
406
- exclude_none=exclude_none,
407
- round_trip=round_trip,
408
- warnings=warnings,
409
- fallback=fallback,
410
- serialize_as_any=serialize_as_any,
411
- )
362
+ if "exclude_none" not in kwargs:
363
+ kwargs["exclude_none"] = True
412
364
 
413
- return dictionary
365
+ return super().model_dump(**kwargs)