nshtrainer 1.0.0b26__tar.gz → 1.0.0b28__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (143) hide show
  1. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/PKG-INFO +1 -1
  2. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/pyproject.toml +1 -1
  3. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/actsave.py +2 -2
  4. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/base.py +5 -3
  5. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/shared_parameters.py +5 -3
  6. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/__init__.py +22 -0
  7. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/_directory/__init__.py +2 -0
  8. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/__init__.py +6 -0
  9. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/checkpoint/__init__.py +6 -0
  10. nshtrainer-1.0.0b28/src/nshtrainer/configs/callbacks/checkpoint/time_checkpoint/__init__.py +19 -0
  11. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/loggers/__init__.py +2 -0
  12. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/lr_scheduler/__init__.py +6 -0
  13. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/lr_scheduler/linear_warmup_cosine/__init__.py +4 -0
  14. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/nn/__init__.py +2 -0
  15. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/nn/mlp/__init__.py +2 -0
  16. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/nn/nonlinearity/__init__.py +2 -0
  17. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/optimizer/__init__.py +2 -0
  18. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/profiler/__init__.py +2 -0
  19. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/trainer/__init__.py +14 -0
  20. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/trainer/_config/__init__.py +14 -0
  21. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/util/__init__.py +2 -0
  22. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/util/config/__init__.py +2 -0
  23. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/util/config/duration/__init__.py +2 -0
  24. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/loggers/__init__.py +12 -5
  25. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/lr_scheduler/__init__.py +9 -5
  26. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/model/mixins/callback.py +6 -4
  27. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/optimizer.py +5 -3
  28. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/profiler/__init__.py +9 -5
  29. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/trainer/_config.py +47 -42
  30. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/trainer/_runtime_callback.py +3 -3
  31. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/trainer/signal_connector.py +6 -4
  32. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/util/_useful_types.py +11 -2
  33. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/util/config/dtype.py +46 -43
  34. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/util/path.py +3 -2
  35. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/README.md +0 -0
  36. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/.nshconfig.generated.json +0 -0
  37. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/__init__.py +0 -0
  38. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/_callback.py +0 -0
  39. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/_checkpoint/metadata.py +0 -0
  40. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/_checkpoint/saver.py +0 -0
  41. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/_directory.py +0 -0
  42. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/_experimental/__init__.py +0 -0
  43. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/_hf_hub.py +0 -0
  44. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/__init__.py +0 -0
  45. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/checkpoint/__init__.py +0 -0
  46. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/checkpoint/_base.py +0 -0
  47. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/checkpoint/best_checkpoint.py +0 -0
  48. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/checkpoint/last_checkpoint.py +0 -0
  49. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/checkpoint/on_exception_checkpoint.py +0 -0
  50. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/checkpoint/time_checkpoint.py +0 -0
  51. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/debug_flag.py +0 -0
  52. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/directory_setup.py +0 -0
  53. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/early_stopping.py +0 -0
  54. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/ema.py +0 -0
  55. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/finite_checks.py +0 -0
  56. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/gradient_skipping.py +0 -0
  57. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/interval.py +0 -0
  58. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/log_epoch.py +0 -0
  59. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/lr_monitor.py +0 -0
  60. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/norm_logging.py +0 -0
  61. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/print_table.py +0 -0
  62. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/rlp_sanity_checks.py +0 -0
  63. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/timer.py +0 -0
  64. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/wandb_upload_code.py +0 -0
  65. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/callbacks/wandb_watch.py +0 -0
  66. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/_checkpoint/__init__.py +0 -0
  67. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/_checkpoint/metadata/__init__.py +0 -0
  68. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/_hf_hub/__init__.py +0 -0
  69. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/actsave/__init__.py +0 -0
  70. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/base/__init__.py +0 -0
  71. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/checkpoint/_base/__init__.py +0 -0
  72. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/checkpoint/best_checkpoint/__init__.py +0 -0
  73. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/checkpoint/last_checkpoint/__init__.py +0 -0
  74. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/checkpoint/on_exception_checkpoint/__init__.py +0 -0
  75. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/debug_flag/__init__.py +0 -0
  76. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/directory_setup/__init__.py +0 -0
  77. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/early_stopping/__init__.py +0 -0
  78. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/ema/__init__.py +0 -0
  79. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/finite_checks/__init__.py +0 -0
  80. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/gradient_skipping/__init__.py +0 -0
  81. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/log_epoch/__init__.py +0 -0
  82. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/lr_monitor/__init__.py +0 -0
  83. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/norm_logging/__init__.py +0 -0
  84. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/print_table/__init__.py +0 -0
  85. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/rlp_sanity_checks/__init__.py +0 -0
  86. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/shared_parameters/__init__.py +0 -0
  87. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/timer/__init__.py +0 -0
  88. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/wandb_upload_code/__init__.py +0 -0
  89. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/callbacks/wandb_watch/__init__.py +0 -0
  90. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/loggers/_base/__init__.py +0 -0
  91. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/loggers/actsave/__init__.py +0 -0
  92. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/loggers/csv/__init__.py +0 -0
  93. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/loggers/tensorboard/__init__.py +0 -0
  94. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/loggers/wandb/__init__.py +0 -0
  95. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/lr_scheduler/_base/__init__.py +0 -0
  96. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/lr_scheduler/reduce_lr_on_plateau/__init__.py +0 -0
  97. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/metrics/__init__.py +0 -0
  98. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/metrics/_config/__init__.py +0 -0
  99. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/profiler/_base/__init__.py +0 -0
  100. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/profiler/advanced/__init__.py +0 -0
  101. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/profiler/pytorch/__init__.py +0 -0
  102. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/profiler/simple/__init__.py +0 -0
  103. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/trainer/trainer/__init__.py +0 -0
  104. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/util/_environment_info/__init__.py +0 -0
  105. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/configs/util/config/dtype/__init__.py +0 -0
  106. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/data/__init__.py +0 -0
  107. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/data/balanced_batch_sampler.py +0 -0
  108. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/data/datamodule.py +0 -0
  109. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/data/transform.py +0 -0
  110. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/loggers/_base.py +0 -0
  111. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/loggers/actsave.py +0 -0
  112. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/loggers/csv.py +0 -0
  113. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/loggers/tensorboard.py +0 -0
  114. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/loggers/wandb.py +0 -0
  115. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/lr_scheduler/_base.py +0 -0
  116. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/lr_scheduler/linear_warmup_cosine.py +0 -0
  117. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/lr_scheduler/reduce_lr_on_plateau.py +0 -0
  118. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/metrics/__init__.py +0 -0
  119. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/metrics/_config.py +0 -0
  120. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/model/__init__.py +0 -0
  121. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/model/base.py +0 -0
  122. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/model/mixins/debug.py +0 -0
  123. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/model/mixins/logger.py +0 -0
  124. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/nn/__init__.py +0 -0
  125. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/nn/mlp.py +0 -0
  126. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/nn/module_dict.py +0 -0
  127. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/nn/module_list.py +0 -0
  128. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/nn/nonlinearity.py +0 -0
  129. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/profiler/_base.py +0 -0
  130. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/profiler/advanced.py +0 -0
  131. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/profiler/pytorch.py +0 -0
  132. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/profiler/simple.py +0 -0
  133. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/trainer/__init__.py +0 -0
  134. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/trainer/trainer.py +0 -0
  135. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/util/_environment_info.py +0 -0
  136. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/util/bf16.py +0 -0
  137. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/util/config/__init__.py +0 -0
  138. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/util/config/duration.py +0 -0
  139. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/util/environment.py +0 -0
  140. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/util/seed.py +0 -0
  141. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/util/slurm.py +0 -0
  142. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/util/typed.py +0 -0
  143. {nshtrainer-1.0.0b26 → nshtrainer-1.0.0b28}/src/nshtrainer/util/typing_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: nshtrainer
3
- Version: 1.0.0b26
3
+ Version: 1.0.0b28
4
4
  Summary:
5
5
  Author: Nima Shoghi
6
6
  Author-email: nimashoghi@gmail.com
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "nshtrainer"
3
- version = "1.0.0-beta26"
3
+ version = "1.0.0-beta28"
4
4
  description = ""
5
5
  authors = ["Nima Shoghi <nimashoghi@gmail.com>"]
6
6
  readme = "README.md"
@@ -4,12 +4,12 @@ import contextlib
4
4
  from pathlib import Path
5
5
  from typing import Literal
6
6
 
7
- from typing_extensions import TypeAlias, override
7
+ from typing_extensions import TypeAliasType, override
8
8
 
9
9
  from .._callback import NTCallbackBase
10
10
  from .base import CallbackConfigBase
11
11
 
12
- Stage: TypeAlias = Literal["train", "validation", "test", "predict"]
12
+ Stage = TypeAliasType("Stage", Literal["train", "validation", "test", "predict"])
13
13
 
14
14
 
15
15
  class ActSaveConfig(CallbackConfigBase):
@@ -4,11 +4,11 @@ from abc import ABC, abstractmethod
4
4
  from collections import Counter
5
5
  from collections.abc import Iterable
6
6
  from dataclasses import dataclass
7
- from typing import TYPE_CHECKING, ClassVar, TypeAlias
7
+ from typing import TYPE_CHECKING, ClassVar
8
8
 
9
9
  import nshconfig as C
10
10
  from lightning.pytorch import Callback
11
- from typing_extensions import TypedDict, Unpack
11
+ from typing_extensions import TypeAliasType, TypedDict, Unpack
12
12
 
13
13
  if TYPE_CHECKING:
14
14
  from ..trainer._config import TrainerConfig
@@ -30,7 +30,9 @@ class CallbackWithMetadata:
30
30
  metadata: CallbackMetadataConfig
31
31
 
32
32
 
33
- ConstructedCallback: TypeAlias = Callback | CallbackWithMetadata
33
+ ConstructedCallback = TypeAliasType(
34
+ "ConstructedCallback", Callback | CallbackWithMetadata
35
+ )
34
36
 
35
37
 
36
38
  class CallbackConfigBase(C.Config, ABC):
@@ -2,12 +2,12 @@ from __future__ import annotations
2
2
 
3
3
  import logging
4
4
  from collections.abc import Iterable
5
- from typing import Literal, Protocol, TypeAlias, runtime_checkable
5
+ from typing import Literal, Protocol, runtime_checkable
6
6
 
7
7
  import torch.nn as nn
8
8
  from lightning.pytorch import LightningModule, Trainer
9
9
  from lightning.pytorch.callbacks import Callback
10
- from typing_extensions import override
10
+ from typing_extensions import TypeAliasType, override
11
11
 
12
12
  from .base import CallbackConfigBase
13
13
 
@@ -34,7 +34,9 @@ class SharedParametersCallbackConfig(CallbackConfigBase):
34
34
  yield SharedParametersCallback(self)
35
35
 
36
36
 
37
- SharedParametersList: TypeAlias = list[tuple[nn.Parameter, int | float]]
37
+ SharedParametersList = TypeAliasType(
38
+ "SharedParametersList", list[tuple[nn.Parameter, int | float]]
39
+ )
38
40
 
39
41
 
40
42
  @runtime_checkable
@@ -14,6 +14,7 @@ from nshtrainer._hf_hub import HuggingFaceHubConfig as HuggingFaceHubConfig
14
14
  from nshtrainer.callbacks import (
15
15
  BestCheckpointCallbackConfig as BestCheckpointCallbackConfig,
16
16
  )
17
+ from nshtrainer.callbacks import CallbackConfig as CallbackConfig
17
18
  from nshtrainer.callbacks import DebugFlagCallbackConfig as DebugFlagCallbackConfig
18
19
  from nshtrainer.callbacks import (
19
20
  DirectorySetupCallbackConfig as DirectorySetupCallbackConfig,
@@ -46,6 +47,9 @@ from nshtrainer.callbacks import (
46
47
  from nshtrainer.callbacks import (
47
48
  SharedParametersCallbackConfig as SharedParametersCallbackConfig,
48
49
  )
50
+ from nshtrainer.callbacks import (
51
+ TimeCheckpointCallbackConfig as TimeCheckpointCallbackConfig,
52
+ )
49
53
  from nshtrainer.callbacks import (
50
54
  WandbUploadCodeCallbackConfig as WandbUploadCodeCallbackConfig,
51
55
  )
@@ -57,11 +61,13 @@ from nshtrainer.callbacks.checkpoint._base import (
57
61
  from nshtrainer.loggers import ActSaveLoggerConfig as ActSaveLoggerConfig
58
62
  from nshtrainer.loggers import BaseLoggerConfig as BaseLoggerConfig
59
63
  from nshtrainer.loggers import CSVLoggerConfig as CSVLoggerConfig
64
+ from nshtrainer.loggers import LoggerConfig as LoggerConfig
60
65
  from nshtrainer.loggers import TensorboardLoggerConfig as TensorboardLoggerConfig
61
66
  from nshtrainer.loggers import WandbLoggerConfig as WandbLoggerConfig
62
67
  from nshtrainer.lr_scheduler import (
63
68
  LinearWarmupCosineDecayLRSchedulerConfig as LinearWarmupCosineDecayLRSchedulerConfig,
64
69
  )
70
+ from nshtrainer.lr_scheduler import LRSchedulerConfig as LRSchedulerConfig
65
71
  from nshtrainer.lr_scheduler import LRSchedulerConfigBase as LRSchedulerConfigBase
66
72
  from nshtrainer.lr_scheduler import ReduceLROnPlateauConfig as ReduceLROnPlateauConfig
67
73
  from nshtrainer.nn import BaseNonlinearityConfig as BaseNonlinearityConfig
@@ -70,6 +76,7 @@ from nshtrainer.nn import GELUNonlinearityConfig as GELUNonlinearityConfig
70
76
  from nshtrainer.nn import LeakyReLUNonlinearityConfig as LeakyReLUNonlinearityConfig
71
77
  from nshtrainer.nn import MishNonlinearityConfig as MishNonlinearityConfig
72
78
  from nshtrainer.nn import MLPConfig as MLPConfig
79
+ from nshtrainer.nn import NonlinearityConfig as NonlinearityConfig
73
80
  from nshtrainer.nn import PReLUConfig as PReLUConfig
74
81
  from nshtrainer.nn import ReLUNonlinearityConfig as ReLUNonlinearityConfig
75
82
  from nshtrainer.nn import SigmoidNonlinearityConfig as SigmoidNonlinearityConfig
@@ -83,12 +90,17 @@ from nshtrainer.nn.nonlinearity import (
83
90
  SwiGLUNonlinearityConfig as SwiGLUNonlinearityConfig,
84
91
  )
85
92
  from nshtrainer.optimizer import AdamWConfig as AdamWConfig
93
+ from nshtrainer.optimizer import OptimizerConfig as OptimizerConfig
86
94
  from nshtrainer.optimizer import OptimizerConfigBase as OptimizerConfigBase
87
95
  from nshtrainer.profiler import AdvancedProfilerConfig as AdvancedProfilerConfig
88
96
  from nshtrainer.profiler import BaseProfilerConfig as BaseProfilerConfig
97
+ from nshtrainer.profiler import ProfilerConfig as ProfilerConfig
89
98
  from nshtrainer.profiler import PyTorchProfilerConfig as PyTorchProfilerConfig
90
99
  from nshtrainer.profiler import SimpleProfilerConfig as SimpleProfilerConfig
91
100
  from nshtrainer.trainer._config import AcceleratorConfigBase as AcceleratorConfigBase
101
+ from nshtrainer.trainer._config import (
102
+ CheckpointCallbackConfig as CheckpointCallbackConfig,
103
+ )
92
104
  from nshtrainer.trainer._config import CheckpointSavingConfig as CheckpointSavingConfig
93
105
  from nshtrainer.trainer._config import EnvironmentConfig as EnvironmentConfig
94
106
  from nshtrainer.trainer._config import GradientClippingConfig as GradientClippingConfig
@@ -127,6 +139,7 @@ from nshtrainer.util._environment_info import (
127
139
  )
128
140
  from nshtrainer.util._environment_info import GitRepositoryConfig as GitRepositoryConfig
129
141
  from nshtrainer.util.config import DTypeConfig as DTypeConfig
142
+ from nshtrainer.util.config import DurationConfig as DurationConfig
130
143
  from nshtrainer.util.config import EpochsConfig as EpochsConfig
131
144
  from nshtrainer.util.config import StepsConfig as StepsConfig
132
145
 
@@ -155,13 +168,16 @@ __all__ = [
155
168
  "BaseProfilerConfig",
156
169
  "BestCheckpointCallbackConfig",
157
170
  "CSVLoggerConfig",
171
+ "CallbackConfig",
158
172
  "CallbackConfigBase",
173
+ "CheckpointCallbackConfig",
159
174
  "CheckpointMetadata",
160
175
  "CheckpointSavingConfig",
161
176
  "DTypeConfig",
162
177
  "DebugFlagCallbackConfig",
163
178
  "DirectoryConfig",
164
179
  "DirectorySetupCallbackConfig",
180
+ "DurationConfig",
165
181
  "ELUNonlinearityConfig",
166
182
  "EMACallbackConfig",
167
183
  "EarlyStoppingCallbackConfig",
@@ -184,21 +200,26 @@ __all__ = [
184
200
  "GradientSkippingCallbackConfig",
185
201
  "HuggingFaceHubAutoCreateConfig",
186
202
  "HuggingFaceHubConfig",
203
+ "LRSchedulerConfig",
187
204
  "LRSchedulerConfigBase",
188
205
  "LastCheckpointCallbackConfig",
189
206
  "LeakyReLUNonlinearityConfig",
190
207
  "LearningRateMonitorConfig",
191
208
  "LinearWarmupCosineDecayLRSchedulerConfig",
192
209
  "LogEpochCallbackConfig",
210
+ "LoggerConfig",
193
211
  "MLPConfig",
194
212
  "MetricConfig",
195
213
  "MishNonlinearityConfig",
214
+ "NonlinearityConfig",
196
215
  "NormLoggingCallbackConfig",
197
216
  "OnExceptionCheckpointCallbackConfig",
217
+ "OptimizerConfig",
198
218
  "OptimizerConfigBase",
199
219
  "PReLUConfig",
200
220
  "PluginConfigBase",
201
221
  "PrintTableMetricsCallbackConfig",
222
+ "ProfilerConfig",
202
223
  "PyTorchProfilerConfig",
203
224
  "RLPSanityChecksCallbackConfig",
204
225
  "ReLUNonlinearityConfig",
@@ -217,6 +238,7 @@ __all__ = [
217
238
  "SwishNonlinearityConfig",
218
239
  "TanhNonlinearityConfig",
219
240
  "TensorboardLoggerConfig",
241
+ "TimeCheckpointCallbackConfig",
220
242
  "TrainerConfig",
221
243
  "WandbLoggerConfig",
222
244
  "WandbUploadCodeCallbackConfig",
@@ -6,8 +6,10 @@ from nshtrainer._directory import DirectoryConfig as DirectoryConfig
6
6
  from nshtrainer._directory import (
7
7
  DirectorySetupCallbackConfig as DirectorySetupCallbackConfig,
8
8
  )
9
+ from nshtrainer._directory import LoggerConfig as LoggerConfig
9
10
 
10
11
  __all__ = [
11
12
  "DirectoryConfig",
12
13
  "DirectorySetupCallbackConfig",
14
+ "LoggerConfig",
13
15
  ]
@@ -5,6 +5,7 @@ __codegen__ = True
5
5
  from nshtrainer.callbacks import (
6
6
  BestCheckpointCallbackConfig as BestCheckpointCallbackConfig,
7
7
  )
8
+ from nshtrainer.callbacks import CallbackConfig as CallbackConfig
8
9
  from nshtrainer.callbacks import CallbackConfigBase as CallbackConfigBase
9
10
  from nshtrainer.callbacks import DebugFlagCallbackConfig as DebugFlagCallbackConfig
10
11
  from nshtrainer.callbacks import (
@@ -38,6 +39,9 @@ from nshtrainer.callbacks import (
38
39
  from nshtrainer.callbacks import (
39
40
  SharedParametersCallbackConfig as SharedParametersCallbackConfig,
40
41
  )
42
+ from nshtrainer.callbacks import (
43
+ TimeCheckpointCallbackConfig as TimeCheckpointCallbackConfig,
44
+ )
41
45
  from nshtrainer.callbacks import (
42
46
  WandbUploadCodeCallbackConfig as WandbUploadCodeCallbackConfig,
43
47
  )
@@ -77,6 +81,7 @@ __all__ = [
77
81
  "ActSaveConfig",
78
82
  "BaseCheckpointCallbackConfig",
79
83
  "BestCheckpointCallbackConfig",
84
+ "CallbackConfig",
80
85
  "CallbackConfigBase",
81
86
  "CheckpointMetadata",
82
87
  "DebugFlagCallbackConfig",
@@ -95,6 +100,7 @@ __all__ = [
95
100
  "PrintTableMetricsCallbackConfig",
96
101
  "RLPSanityChecksCallbackConfig",
97
102
  "SharedParametersCallbackConfig",
103
+ "TimeCheckpointCallbackConfig",
98
104
  "WandbUploadCodeCallbackConfig",
99
105
  "WandbWatchCallbackConfig",
100
106
  "actsave",
@@ -11,6 +11,9 @@ from nshtrainer.callbacks.checkpoint import (
11
11
  from nshtrainer.callbacks.checkpoint import (
12
12
  OnExceptionCheckpointCallbackConfig as OnExceptionCheckpointCallbackConfig,
13
13
  )
14
+ from nshtrainer.callbacks.checkpoint import (
15
+ TimeCheckpointCallbackConfig as TimeCheckpointCallbackConfig,
16
+ )
14
17
  from nshtrainer.callbacks.checkpoint._base import (
15
18
  BaseCheckpointCallbackConfig as BaseCheckpointCallbackConfig,
16
19
  )
@@ -26,6 +29,7 @@ from . import _base as _base
26
29
  from . import best_checkpoint as best_checkpoint
27
30
  from . import last_checkpoint as last_checkpoint
28
31
  from . import on_exception_checkpoint as on_exception_checkpoint
32
+ from . import time_checkpoint as time_checkpoint
29
33
 
30
34
  __all__ = [
31
35
  "BaseCheckpointCallbackConfig",
@@ -35,8 +39,10 @@ __all__ = [
35
39
  "LastCheckpointCallbackConfig",
36
40
  "MetricConfig",
37
41
  "OnExceptionCheckpointCallbackConfig",
42
+ "TimeCheckpointCallbackConfig",
38
43
  "_base",
39
44
  "best_checkpoint",
40
45
  "last_checkpoint",
41
46
  "on_exception_checkpoint",
47
+ "time_checkpoint",
42
48
  ]
@@ -0,0 +1,19 @@
1
+ from __future__ import annotations
2
+
3
+ __codegen__ = True
4
+
5
+ from nshtrainer.callbacks.checkpoint.time_checkpoint import (
6
+ BaseCheckpointCallbackConfig as BaseCheckpointCallbackConfig,
7
+ )
8
+ from nshtrainer.callbacks.checkpoint.time_checkpoint import (
9
+ CheckpointMetadata as CheckpointMetadata,
10
+ )
11
+ from nshtrainer.callbacks.checkpoint.time_checkpoint import (
12
+ TimeCheckpointCallbackConfig as TimeCheckpointCallbackConfig,
13
+ )
14
+
15
+ __all__ = [
16
+ "BaseCheckpointCallbackConfig",
17
+ "CheckpointMetadata",
18
+ "TimeCheckpointCallbackConfig",
19
+ ]
@@ -5,6 +5,7 @@ __codegen__ = True
5
5
  from nshtrainer.loggers import ActSaveLoggerConfig as ActSaveLoggerConfig
6
6
  from nshtrainer.loggers import BaseLoggerConfig as BaseLoggerConfig
7
7
  from nshtrainer.loggers import CSVLoggerConfig as CSVLoggerConfig
8
+ from nshtrainer.loggers import LoggerConfig as LoggerConfig
8
9
  from nshtrainer.loggers import TensorboardLoggerConfig as TensorboardLoggerConfig
9
10
  from nshtrainer.loggers import WandbLoggerConfig as WandbLoggerConfig
10
11
  from nshtrainer.loggers.wandb import CallbackConfigBase as CallbackConfigBase
@@ -26,6 +27,7 @@ __all__ = [
26
27
  "BaseLoggerConfig",
27
28
  "CSVLoggerConfig",
28
29
  "CallbackConfigBase",
30
+ "LoggerConfig",
29
31
  "TensorboardLoggerConfig",
30
32
  "WandbLoggerConfig",
31
33
  "WandbUploadCodeCallbackConfig",
@@ -5,8 +5,12 @@ __codegen__ = True
5
5
  from nshtrainer.lr_scheduler import (
6
6
  LinearWarmupCosineDecayLRSchedulerConfig as LinearWarmupCosineDecayLRSchedulerConfig,
7
7
  )
8
+ from nshtrainer.lr_scheduler import LRSchedulerConfig as LRSchedulerConfig
8
9
  from nshtrainer.lr_scheduler import LRSchedulerConfigBase as LRSchedulerConfigBase
9
10
  from nshtrainer.lr_scheduler import ReduceLROnPlateauConfig as ReduceLROnPlateauConfig
11
+ from nshtrainer.lr_scheduler.linear_warmup_cosine import (
12
+ DurationConfig as DurationConfig,
13
+ )
10
14
  from nshtrainer.lr_scheduler.reduce_lr_on_plateau import MetricConfig as MetricConfig
11
15
 
12
16
  from . import _base as _base
@@ -14,6 +18,8 @@ from . import linear_warmup_cosine as linear_warmup_cosine
14
18
  from . import reduce_lr_on_plateau as reduce_lr_on_plateau
15
19
 
16
20
  __all__ = [
21
+ "DurationConfig",
22
+ "LRSchedulerConfig",
17
23
  "LRSchedulerConfigBase",
18
24
  "LinearWarmupCosineDecayLRSchedulerConfig",
19
25
  "MetricConfig",
@@ -2,6 +2,9 @@ from __future__ import annotations
2
2
 
3
3
  __codegen__ = True
4
4
 
5
+ from nshtrainer.lr_scheduler.linear_warmup_cosine import (
6
+ DurationConfig as DurationConfig,
7
+ )
5
8
  from nshtrainer.lr_scheduler.linear_warmup_cosine import (
6
9
  LinearWarmupCosineDecayLRSchedulerConfig as LinearWarmupCosineDecayLRSchedulerConfig,
7
10
  )
@@ -10,6 +13,7 @@ from nshtrainer.lr_scheduler.linear_warmup_cosine import (
10
13
  )
11
14
 
12
15
  __all__ = [
16
+ "DurationConfig",
13
17
  "LRSchedulerConfigBase",
14
18
  "LinearWarmupCosineDecayLRSchedulerConfig",
15
19
  ]
@@ -8,6 +8,7 @@ from nshtrainer.nn import GELUNonlinearityConfig as GELUNonlinearityConfig
8
8
  from nshtrainer.nn import LeakyReLUNonlinearityConfig as LeakyReLUNonlinearityConfig
9
9
  from nshtrainer.nn import MishNonlinearityConfig as MishNonlinearityConfig
10
10
  from nshtrainer.nn import MLPConfig as MLPConfig
11
+ from nshtrainer.nn import NonlinearityConfig as NonlinearityConfig
11
12
  from nshtrainer.nn import PReLUConfig as PReLUConfig
12
13
  from nshtrainer.nn import ReLUNonlinearityConfig as ReLUNonlinearityConfig
13
14
  from nshtrainer.nn import SigmoidNonlinearityConfig as SigmoidNonlinearityConfig
@@ -31,6 +32,7 @@ __all__ = [
31
32
  "LeakyReLUNonlinearityConfig",
32
33
  "MLPConfig",
33
34
  "MishNonlinearityConfig",
35
+ "NonlinearityConfig",
34
36
  "PReLUConfig",
35
37
  "ReLUNonlinearityConfig",
36
38
  "SiLUNonlinearityConfig",
@@ -4,8 +4,10 @@ __codegen__ = True
4
4
 
5
5
  from nshtrainer.nn.mlp import BaseNonlinearityConfig as BaseNonlinearityConfig
6
6
  from nshtrainer.nn.mlp import MLPConfig as MLPConfig
7
+ from nshtrainer.nn.mlp import NonlinearityConfig as NonlinearityConfig
7
8
 
8
9
  __all__ = [
9
10
  "BaseNonlinearityConfig",
10
11
  "MLPConfig",
12
+ "NonlinearityConfig",
11
13
  ]
@@ -9,6 +9,7 @@ from nshtrainer.nn.nonlinearity import (
9
9
  LeakyReLUNonlinearityConfig as LeakyReLUNonlinearityConfig,
10
10
  )
11
11
  from nshtrainer.nn.nonlinearity import MishNonlinearityConfig as MishNonlinearityConfig
12
+ from nshtrainer.nn.nonlinearity import NonlinearityConfig as NonlinearityConfig
12
13
  from nshtrainer.nn.nonlinearity import PReLUConfig as PReLUConfig
13
14
  from nshtrainer.nn.nonlinearity import ReLUNonlinearityConfig as ReLUNonlinearityConfig
14
15
  from nshtrainer.nn.nonlinearity import (
@@ -38,6 +39,7 @@ __all__ = [
38
39
  "GELUNonlinearityConfig",
39
40
  "LeakyReLUNonlinearityConfig",
40
41
  "MishNonlinearityConfig",
42
+ "NonlinearityConfig",
41
43
  "PReLUConfig",
42
44
  "ReLUNonlinearityConfig",
43
45
  "SiLUNonlinearityConfig",
@@ -3,9 +3,11 @@ from __future__ import annotations
3
3
  __codegen__ = True
4
4
 
5
5
  from nshtrainer.optimizer import AdamWConfig as AdamWConfig
6
+ from nshtrainer.optimizer import OptimizerConfig as OptimizerConfig
6
7
  from nshtrainer.optimizer import OptimizerConfigBase as OptimizerConfigBase
7
8
 
8
9
  __all__ = [
9
10
  "AdamWConfig",
11
+ "OptimizerConfig",
10
12
  "OptimizerConfigBase",
11
13
  ]
@@ -4,6 +4,7 @@ __codegen__ = True
4
4
 
5
5
  from nshtrainer.profiler import AdvancedProfilerConfig as AdvancedProfilerConfig
6
6
  from nshtrainer.profiler import BaseProfilerConfig as BaseProfilerConfig
7
+ from nshtrainer.profiler import ProfilerConfig as ProfilerConfig
7
8
  from nshtrainer.profiler import PyTorchProfilerConfig as PyTorchProfilerConfig
8
9
  from nshtrainer.profiler import SimpleProfilerConfig as SimpleProfilerConfig
9
10
 
@@ -15,6 +16,7 @@ from . import simple as simple
15
16
  __all__ = [
16
17
  "AdvancedProfilerConfig",
17
18
  "BaseProfilerConfig",
19
+ "ProfilerConfig",
18
20
  "PyTorchProfilerConfig",
19
21
  "SimpleProfilerConfig",
20
22
  "_base",
@@ -9,7 +9,11 @@ from nshtrainer.trainer._config import BaseLoggerConfig as BaseLoggerConfig
9
9
  from nshtrainer.trainer._config import (
10
10
  BestCheckpointCallbackConfig as BestCheckpointCallbackConfig,
11
11
  )
12
+ from nshtrainer.trainer._config import CallbackConfig as CallbackConfig
12
13
  from nshtrainer.trainer._config import CallbackConfigBase as CallbackConfigBase
14
+ from nshtrainer.trainer._config import (
15
+ CheckpointCallbackConfig as CheckpointCallbackConfig,
16
+ )
13
17
  from nshtrainer.trainer._config import CheckpointSavingConfig as CheckpointSavingConfig
14
18
  from nshtrainer.trainer._config import CSVLoggerConfig as CSVLoggerConfig
15
19
  from nshtrainer.trainer._config import (
@@ -29,6 +33,7 @@ from nshtrainer.trainer._config import (
29
33
  LearningRateMonitorConfig as LearningRateMonitorConfig,
30
34
  )
31
35
  from nshtrainer.trainer._config import LogEpochCallbackConfig as LogEpochCallbackConfig
36
+ from nshtrainer.trainer._config import LoggerConfig as LoggerConfig
32
37
  from nshtrainer.trainer._config import MetricConfig as MetricConfig
33
38
  from nshtrainer.trainer._config import (
34
39
  NormLoggingCallbackConfig as NormLoggingCallbackConfig,
@@ -37,6 +42,7 @@ from nshtrainer.trainer._config import (
37
42
  OnExceptionCheckpointCallbackConfig as OnExceptionCheckpointCallbackConfig,
38
43
  )
39
44
  from nshtrainer.trainer._config import PluginConfigBase as PluginConfigBase
45
+ from nshtrainer.trainer._config import ProfilerConfig as ProfilerConfig
40
46
  from nshtrainer.trainer._config import (
41
47
  RLPSanityChecksCallbackConfig as RLPSanityChecksCallbackConfig,
42
48
  )
@@ -48,6 +54,9 @@ from nshtrainer.trainer._config import StrategyConfigBase as StrategyConfigBase
48
54
  from nshtrainer.trainer._config import (
49
55
  TensorboardLoggerConfig as TensorboardLoggerConfig,
50
56
  )
57
+ from nshtrainer.trainer._config import (
58
+ TimeCheckpointCallbackConfig as TimeCheckpointCallbackConfig,
59
+ )
51
60
  from nshtrainer.trainer._config import WandbLoggerConfig as WandbLoggerConfig
52
61
 
53
62
  from . import _config as _config
@@ -59,7 +68,9 @@ __all__ = [
59
68
  "BaseLoggerConfig",
60
69
  "BestCheckpointCallbackConfig",
61
70
  "CSVLoggerConfig",
71
+ "CallbackConfig",
62
72
  "CallbackConfigBase",
73
+ "CheckpointCallbackConfig",
63
74
  "CheckpointSavingConfig",
64
75
  "DebugFlagCallbackConfig",
65
76
  "DirectoryConfig",
@@ -70,15 +81,18 @@ __all__ = [
70
81
  "LastCheckpointCallbackConfig",
71
82
  "LearningRateMonitorConfig",
72
83
  "LogEpochCallbackConfig",
84
+ "LoggerConfig",
73
85
  "MetricConfig",
74
86
  "NormLoggingCallbackConfig",
75
87
  "OnExceptionCheckpointCallbackConfig",
76
88
  "PluginConfigBase",
89
+ "ProfilerConfig",
77
90
  "RLPSanityChecksCallbackConfig",
78
91
  "SanityCheckingConfig",
79
92
  "SharedParametersCallbackConfig",
80
93
  "StrategyConfigBase",
81
94
  "TensorboardLoggerConfig",
95
+ "TimeCheckpointCallbackConfig",
82
96
  "TrainerConfig",
83
97
  "WandbLoggerConfig",
84
98
  "_config",
@@ -8,7 +8,11 @@ from nshtrainer.trainer._config import BaseLoggerConfig as BaseLoggerConfig
8
8
  from nshtrainer.trainer._config import (
9
9
  BestCheckpointCallbackConfig as BestCheckpointCallbackConfig,
10
10
  )
11
+ from nshtrainer.trainer._config import CallbackConfig as CallbackConfig
11
12
  from nshtrainer.trainer._config import CallbackConfigBase as CallbackConfigBase
13
+ from nshtrainer.trainer._config import (
14
+ CheckpointCallbackConfig as CheckpointCallbackConfig,
15
+ )
12
16
  from nshtrainer.trainer._config import CheckpointSavingConfig as CheckpointSavingConfig
13
17
  from nshtrainer.trainer._config import CSVLoggerConfig as CSVLoggerConfig
14
18
  from nshtrainer.trainer._config import (
@@ -28,6 +32,7 @@ from nshtrainer.trainer._config import (
28
32
  LearningRateMonitorConfig as LearningRateMonitorConfig,
29
33
  )
30
34
  from nshtrainer.trainer._config import LogEpochCallbackConfig as LogEpochCallbackConfig
35
+ from nshtrainer.trainer._config import LoggerConfig as LoggerConfig
31
36
  from nshtrainer.trainer._config import MetricConfig as MetricConfig
32
37
  from nshtrainer.trainer._config import (
33
38
  NormLoggingCallbackConfig as NormLoggingCallbackConfig,
@@ -36,6 +41,7 @@ from nshtrainer.trainer._config import (
36
41
  OnExceptionCheckpointCallbackConfig as OnExceptionCheckpointCallbackConfig,
37
42
  )
38
43
  from nshtrainer.trainer._config import PluginConfigBase as PluginConfigBase
44
+ from nshtrainer.trainer._config import ProfilerConfig as ProfilerConfig
39
45
  from nshtrainer.trainer._config import (
40
46
  RLPSanityChecksCallbackConfig as RLPSanityChecksCallbackConfig,
41
47
  )
@@ -47,6 +53,9 @@ from nshtrainer.trainer._config import StrategyConfigBase as StrategyConfigBase
47
53
  from nshtrainer.trainer._config import (
48
54
  TensorboardLoggerConfig as TensorboardLoggerConfig,
49
55
  )
56
+ from nshtrainer.trainer._config import (
57
+ TimeCheckpointCallbackConfig as TimeCheckpointCallbackConfig,
58
+ )
50
59
  from nshtrainer.trainer._config import TrainerConfig as TrainerConfig
51
60
  from nshtrainer.trainer._config import WandbLoggerConfig as WandbLoggerConfig
52
61
 
@@ -56,7 +65,9 @@ __all__ = [
56
65
  "BaseLoggerConfig",
57
66
  "BestCheckpointCallbackConfig",
58
67
  "CSVLoggerConfig",
68
+ "CallbackConfig",
59
69
  "CallbackConfigBase",
70
+ "CheckpointCallbackConfig",
60
71
  "CheckpointSavingConfig",
61
72
  "DebugFlagCallbackConfig",
62
73
  "DirectoryConfig",
@@ -67,15 +78,18 @@ __all__ = [
67
78
  "LastCheckpointCallbackConfig",
68
79
  "LearningRateMonitorConfig",
69
80
  "LogEpochCallbackConfig",
81
+ "LoggerConfig",
70
82
  "MetricConfig",
71
83
  "NormLoggingCallbackConfig",
72
84
  "OnExceptionCheckpointCallbackConfig",
73
85
  "PluginConfigBase",
86
+ "ProfilerConfig",
74
87
  "RLPSanityChecksCallbackConfig",
75
88
  "SanityCheckingConfig",
76
89
  "SharedParametersCallbackConfig",
77
90
  "StrategyConfigBase",
78
91
  "TensorboardLoggerConfig",
92
+ "TimeCheckpointCallbackConfig",
79
93
  "TrainerConfig",
80
94
  "WandbLoggerConfig",
81
95
  ]
@@ -32,6 +32,7 @@ from nshtrainer.util._environment_info import (
32
32
  )
33
33
  from nshtrainer.util._environment_info import GitRepositoryConfig as GitRepositoryConfig
34
34
  from nshtrainer.util.config import DTypeConfig as DTypeConfig
35
+ from nshtrainer.util.config import DurationConfig as DurationConfig
35
36
  from nshtrainer.util.config import EpochsConfig as EpochsConfig
36
37
  from nshtrainer.util.config import StepsConfig as StepsConfig
37
38
 
@@ -40,6 +41,7 @@ from . import config as config
40
41
 
41
42
  __all__ = [
42
43
  "DTypeConfig",
44
+ "DurationConfig",
43
45
  "EnvironmentCUDAConfig",
44
46
  "EnvironmentClassInformationConfig",
45
47
  "EnvironmentConfig",
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  __codegen__ = True
4
4
 
5
5
  from nshtrainer.util.config import DTypeConfig as DTypeConfig
6
+ from nshtrainer.util.config import DurationConfig as DurationConfig
6
7
  from nshtrainer.util.config import EpochsConfig as EpochsConfig
7
8
  from nshtrainer.util.config import StepsConfig as StepsConfig
8
9
 
@@ -11,6 +12,7 @@ from . import duration as duration
11
12
 
12
13
  __all__ = [
13
14
  "DTypeConfig",
15
+ "DurationConfig",
14
16
  "EpochsConfig",
15
17
  "StepsConfig",
16
18
  "dtype",
@@ -2,10 +2,12 @@ from __future__ import annotations
2
2
 
3
3
  __codegen__ = True
4
4
 
5
+ from nshtrainer.util.config.duration import DurationConfig as DurationConfig
5
6
  from nshtrainer.util.config.duration import EpochsConfig as EpochsConfig
6
7
  from nshtrainer.util.config.duration import StepsConfig as StepsConfig
7
8
 
8
9
  __all__ = [
10
+ "DurationConfig",
9
11
  "EpochsConfig",
10
12
  "StepsConfig",
11
13
  ]
@@ -1,8 +1,9 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import Annotated, TypeAlias
3
+ from typing import Annotated
4
4
 
5
5
  import nshconfig as C
6
+ from typing_extensions import TypeAliasType
6
7
 
7
8
  from ._base import BaseLoggerConfig as BaseLoggerConfig
8
9
  from .actsave import ActSaveLoggerConfig as ActSaveLoggerConfig
@@ -10,7 +11,13 @@ from .csv import CSVLoggerConfig as CSVLoggerConfig
10
11
  from .tensorboard import TensorboardLoggerConfig as TensorboardLoggerConfig
11
12
  from .wandb import WandbLoggerConfig as WandbLoggerConfig
12
13
 
13
- LoggerConfig: TypeAlias = Annotated[
14
- CSVLoggerConfig | TensorboardLoggerConfig | WandbLoggerConfig | ActSaveLoggerConfig,
15
- C.Field(discriminator="name"),
16
- ]
14
+ LoggerConfig = TypeAliasType(
15
+ "LoggerConfig",
16
+ Annotated[
17
+ CSVLoggerConfig
18
+ | TensorboardLoggerConfig
19
+ | WandbLoggerConfig
20
+ | ActSaveLoggerConfig,
21
+ C.Field(discriminator="name"),
22
+ ],
23
+ )
@@ -1,8 +1,9 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import Annotated, TypeAlias
3
+ from typing import Annotated
4
4
 
5
5
  import nshconfig as C
6
+ from typing_extensions import TypeAliasType
6
7
 
7
8
  from ._base import LRSchedulerConfigBase as LRSchedulerConfigBase
8
9
  from ._base import LRSchedulerMetadata as LRSchedulerMetadata
@@ -15,7 +16,10 @@ from .linear_warmup_cosine import (
15
16
  from .reduce_lr_on_plateau import ReduceLROnPlateau as ReduceLROnPlateau
16
17
  from .reduce_lr_on_plateau import ReduceLROnPlateauConfig as ReduceLROnPlateauConfig
17
18
 
18
- LRSchedulerConfig: TypeAlias = Annotated[
19
- LinearWarmupCosineDecayLRSchedulerConfig | ReduceLROnPlateauConfig,
20
- C.Field(discriminator="name"),
21
- ]
19
+ LRSchedulerConfig = TypeAliasType(
20
+ "LRSchedulerConfig",
21
+ Annotated[
22
+ LinearWarmupCosineDecayLRSchedulerConfig | ReduceLROnPlateauConfig,
23
+ C.Field(discriminator="name"),
24
+ ],
25
+ )