nshtrainer 1.0.0b36__tar.gz → 1.0.0b37__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (160) hide show
  1. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/PKG-INFO +1 -1
  2. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/pyproject.toml +1 -1
  3. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/_directory.py +3 -1
  4. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/__init__.py +12 -4
  5. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/loggers/__init__.py +6 -4
  6. nshtrainer-1.0.0b37/src/nshtrainer/configs/loggers/actsave/__init__.py +13 -0
  7. nshtrainer-1.0.0b37/src/nshtrainer/configs/loggers/base/__init__.py +11 -0
  8. nshtrainer-1.0.0b37/src/nshtrainer/configs/loggers/csv/__init__.py +13 -0
  9. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/loggers/tensorboard/__init__.py +4 -2
  10. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/loggers/wandb/__init__.py +4 -2
  11. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/lr_scheduler/__init__.py +4 -2
  12. nshtrainer-1.0.0b37/src/nshtrainer/configs/lr_scheduler/base/__init__.py +11 -0
  13. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/lr_scheduler/linear_warmup_cosine/__init__.py +4 -0
  14. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/lr_scheduler/reduce_lr_on_plateau/__init__.py +4 -0
  15. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/nn/__init__.py +4 -2
  16. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/nn/mlp/__init__.py +2 -2
  17. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/nn/nonlinearity/__init__.py +4 -2
  18. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/optimizer/__init__.py +2 -0
  19. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/trainer/__init__.py +2 -2
  20. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/trainer/_config/__init__.py +2 -2
  21. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/loggers/__init__.py +3 -8
  22. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/loggers/actsave.py +5 -2
  23. nshtrainer-1.0.0b36/src/nshtrainer/loggers/_base.py → nshtrainer-1.0.0b37/src/nshtrainer/loggers/base.py +4 -1
  24. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/loggers/csv.py +5 -3
  25. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/loggers/tensorboard.py +5 -3
  26. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/loggers/wandb.py +5 -3
  27. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/lr_scheduler/__init__.py +2 -2
  28. nshtrainer-1.0.0b36/src/nshtrainer/lr_scheduler/_base.py → nshtrainer-1.0.0b37/src/nshtrainer/lr_scheduler/base.py +3 -0
  29. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/lr_scheduler/linear_warmup_cosine.py +56 -54
  30. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/lr_scheduler/reduce_lr_on_plateau.py +4 -2
  31. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/nn/__init__.py +1 -1
  32. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/nn/mlp.py +4 -4
  33. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/nn/nonlinearity.py +37 -33
  34. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/optimizer.py +8 -2
  35. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/trainer/_config.py +2 -2
  36. nshtrainer-1.0.0b36/src/nshtrainer/configs/loggers/_base/__init__.py +0 -9
  37. nshtrainer-1.0.0b36/src/nshtrainer/configs/loggers/actsave/__init__.py +0 -11
  38. nshtrainer-1.0.0b36/src/nshtrainer/configs/loggers/csv/__init__.py +0 -11
  39. nshtrainer-1.0.0b36/src/nshtrainer/configs/lr_scheduler/_base/__init__.py +0 -9
  40. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/README.md +0 -0
  41. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/.nshconfig.generated.json +0 -0
  42. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/__init__.py +0 -0
  43. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/_callback.py +0 -0
  44. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/_checkpoint/metadata.py +0 -0
  45. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/_checkpoint/saver.py +0 -0
  46. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/_experimental/__init__.py +0 -0
  47. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/_hf_hub.py +0 -0
  48. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/__init__.py +0 -0
  49. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/actsave.py +0 -0
  50. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/base.py +0 -0
  51. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/checkpoint/__init__.py +0 -0
  52. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/checkpoint/_base.py +0 -0
  53. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/checkpoint/best_checkpoint.py +0 -0
  54. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/checkpoint/last_checkpoint.py +0 -0
  55. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/checkpoint/on_exception_checkpoint.py +0 -0
  56. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/debug_flag.py +0 -0
  57. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/directory_setup.py +0 -0
  58. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/early_stopping.py +0 -0
  59. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/ema.py +0 -0
  60. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/finite_checks.py +0 -0
  61. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/gradient_skipping.py +0 -0
  62. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/interval.py +0 -0
  63. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/log_epoch.py +0 -0
  64. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/lr_monitor.py +0 -0
  65. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/norm_logging.py +0 -0
  66. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/print_table.py +0 -0
  67. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/rlp_sanity_checks.py +0 -0
  68. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/shared_parameters.py +0 -0
  69. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/timer.py +0 -0
  70. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/wandb_upload_code.py +0 -0
  71. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/callbacks/wandb_watch.py +0 -0
  72. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/_checkpoint/__init__.py +0 -0
  73. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/_checkpoint/metadata/__init__.py +0 -0
  74. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/_directory/__init__.py +0 -0
  75. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/_hf_hub/__init__.py +0 -0
  76. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/__init__.py +0 -0
  77. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/actsave/__init__.py +0 -0
  78. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/base/__init__.py +0 -0
  79. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/checkpoint/__init__.py +0 -0
  80. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/checkpoint/_base/__init__.py +0 -0
  81. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/checkpoint/best_checkpoint/__init__.py +0 -0
  82. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/checkpoint/last_checkpoint/__init__.py +0 -0
  83. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/checkpoint/on_exception_checkpoint/__init__.py +0 -0
  84. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/debug_flag/__init__.py +0 -0
  85. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/directory_setup/__init__.py +0 -0
  86. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/early_stopping/__init__.py +0 -0
  87. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/ema/__init__.py +0 -0
  88. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/finite_checks/__init__.py +0 -0
  89. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/gradient_skipping/__init__.py +0 -0
  90. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/log_epoch/__init__.py +0 -0
  91. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/lr_monitor/__init__.py +0 -0
  92. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/norm_logging/__init__.py +0 -0
  93. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/print_table/__init__.py +0 -0
  94. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/rlp_sanity_checks/__init__.py +0 -0
  95. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/shared_parameters/__init__.py +0 -0
  96. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/timer/__init__.py +0 -0
  97. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/wandb_upload_code/__init__.py +0 -0
  98. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/callbacks/wandb_watch/__init__.py +0 -0
  99. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/metrics/__init__.py +0 -0
  100. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/metrics/_config/__init__.py +0 -0
  101. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/profiler/__init__.py +0 -0
  102. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/profiler/_base/__init__.py +0 -0
  103. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/profiler/advanced/__init__.py +0 -0
  104. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/profiler/pytorch/__init__.py +0 -0
  105. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/profiler/simple/__init__.py +0 -0
  106. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/trainer/accelerator/__init__.py +0 -0
  107. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/trainer/plugin/__init__.py +0 -0
  108. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/trainer/plugin/base/__init__.py +0 -0
  109. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/trainer/plugin/environment/__init__.py +0 -0
  110. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/trainer/plugin/io/__init__.py +0 -0
  111. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/trainer/plugin/layer_sync/__init__.py +0 -0
  112. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/trainer/plugin/precision/__init__.py +0 -0
  113. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/trainer/strategy/__init__.py +0 -0
  114. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/trainer/trainer/__init__.py +0 -0
  115. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/util/__init__.py +0 -0
  116. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/util/_environment_info/__init__.py +0 -0
  117. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/util/config/__init__.py +0 -0
  118. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/util/config/dtype/__init__.py +0 -0
  119. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/configs/util/config/duration/__init__.py +0 -0
  120. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/data/__init__.py +0 -0
  121. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/data/balanced_batch_sampler.py +0 -0
  122. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/data/datamodule.py +0 -0
  123. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/data/transform.py +0 -0
  124. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/metrics/__init__.py +0 -0
  125. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/metrics/_config.py +0 -0
  126. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/model/__init__.py +0 -0
  127. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/model/base.py +0 -0
  128. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/model/mixins/callback.py +0 -0
  129. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/model/mixins/debug.py +0 -0
  130. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/model/mixins/logger.py +0 -0
  131. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/nn/module_dict.py +0 -0
  132. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/nn/module_list.py +0 -0
  133. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/profiler/__init__.py +0 -0
  134. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/profiler/_base.py +0 -0
  135. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/profiler/advanced.py +0 -0
  136. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/profiler/pytorch.py +0 -0
  137. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/profiler/simple.py +0 -0
  138. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/trainer/__init__.py +0 -0
  139. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/trainer/_runtime_callback.py +0 -0
  140. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/trainer/accelerator.py +0 -0
  141. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/trainer/plugin/__init__.py +0 -0
  142. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/trainer/plugin/base.py +0 -0
  143. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/trainer/plugin/environment.py +0 -0
  144. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/trainer/plugin/io.py +0 -0
  145. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/trainer/plugin/layer_sync.py +0 -0
  146. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/trainer/plugin/precision.py +0 -0
  147. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/trainer/signal_connector.py +0 -0
  148. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/trainer/strategy.py +0 -0
  149. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/trainer/trainer.py +0 -0
  150. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/util/_environment_info.py +0 -0
  151. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/util/bf16.py +0 -0
  152. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/util/config/__init__.py +0 -0
  153. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/util/config/dtype.py +0 -0
  154. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/util/config/duration.py +0 -0
  155. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/util/environment.py +0 -0
  156. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/util/path.py +0 -0
  157. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/util/seed.py +0 -0
  158. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/util/slurm.py +0 -0
  159. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/util/typed.py +0 -0
  160. {nshtrainer-1.0.0b36 → nshtrainer-1.0.0b37}/src/nshtrainer/util/typing_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: nshtrainer
3
- Version: 1.0.0b36
3
+ Version: 1.0.0b37
4
4
  Summary:
5
5
  Author: Nima Shoghi
6
6
  Author-email: nimashoghi@gmail.com
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "nshtrainer"
3
- version = "1.0.0-beta36"
3
+ version = "1.0.0-beta37"
4
4
  description = ""
5
5
  authors = ["Nima Shoghi <nimashoghi@gmail.com>"]
6
6
  readme = "README.md"
@@ -81,7 +81,9 @@ class DirectoryConfig(C.Config):
81
81
 
82
82
  # Save to nshtrainer/{id}/log/{logger name}
83
83
  log_dir = self.resolve_subdirectory(run_id, "log")
84
- log_dir = log_dir / logger.name
84
+ log_dir = log_dir / getattr(logger, "name")
85
+ # ^ NOTE: Logger must have a `name` attribute, as this is
86
+ # the discriminator for the logger registry
85
87
  log_dir.mkdir(exist_ok=True)
86
88
 
87
89
  return log_dir
@@ -60,24 +60,26 @@ from nshtrainer.callbacks.checkpoint._base import (
60
60
  BaseCheckpointCallbackConfig as BaseCheckpointCallbackConfig,
61
61
  )
62
62
  from nshtrainer.loggers import ActSaveLoggerConfig as ActSaveLoggerConfig
63
- from nshtrainer.loggers import BaseLoggerConfig as BaseLoggerConfig
64
63
  from nshtrainer.loggers import CSVLoggerConfig as CSVLoggerConfig
65
64
  from nshtrainer.loggers import LoggerConfig as LoggerConfig
65
+ from nshtrainer.loggers import LoggerConfigBase as LoggerConfigBase
66
66
  from nshtrainer.loggers import TensorboardLoggerConfig as TensorboardLoggerConfig
67
67
  from nshtrainer.loggers import WandbLoggerConfig as WandbLoggerConfig
68
+ from nshtrainer.loggers import logger_registry as logger_registry
68
69
  from nshtrainer.lr_scheduler import (
69
70
  LinearWarmupCosineDecayLRSchedulerConfig as LinearWarmupCosineDecayLRSchedulerConfig,
70
71
  )
71
72
  from nshtrainer.lr_scheduler import LRSchedulerConfig as LRSchedulerConfig
72
73
  from nshtrainer.lr_scheduler import LRSchedulerConfigBase as LRSchedulerConfigBase
73
74
  from nshtrainer.lr_scheduler import ReduceLROnPlateauConfig as ReduceLROnPlateauConfig
74
- from nshtrainer.nn import BaseNonlinearityConfig as BaseNonlinearityConfig
75
+ from nshtrainer.lr_scheduler.base import lr_scheduler_registry as lr_scheduler_registry
75
76
  from nshtrainer.nn import ELUNonlinearityConfig as ELUNonlinearityConfig
76
77
  from nshtrainer.nn import GELUNonlinearityConfig as GELUNonlinearityConfig
77
78
  from nshtrainer.nn import LeakyReLUNonlinearityConfig as LeakyReLUNonlinearityConfig
78
79
  from nshtrainer.nn import MishNonlinearityConfig as MishNonlinearityConfig
79
80
  from nshtrainer.nn import MLPConfig as MLPConfig
80
81
  from nshtrainer.nn import NonlinearityConfig as NonlinearityConfig
82
+ from nshtrainer.nn import NonlinearityConfigBase as NonlinearityConfigBase
81
83
  from nshtrainer.nn import PReLUConfig as PReLUConfig
82
84
  from nshtrainer.nn import ReLUNonlinearityConfig as ReLUNonlinearityConfig
83
85
  from nshtrainer.nn import SigmoidNonlinearityConfig as SigmoidNonlinearityConfig
@@ -90,9 +92,11 @@ from nshtrainer.nn import TanhNonlinearityConfig as TanhNonlinearityConfig
90
92
  from nshtrainer.nn.nonlinearity import (
91
93
  SwiGLUNonlinearityConfig as SwiGLUNonlinearityConfig,
92
94
  )
95
+ from nshtrainer.nn.nonlinearity import nonlinearity_registry as nonlinearity_registry
93
96
  from nshtrainer.optimizer import AdamWConfig as AdamWConfig
94
97
  from nshtrainer.optimizer import OptimizerConfig as OptimizerConfig
95
98
  from nshtrainer.optimizer import OptimizerConfigBase as OptimizerConfigBase
99
+ from nshtrainer.optimizer import optimizer_registry as optimizer_registry
96
100
  from nshtrainer.profiler import AdvancedProfilerConfig as AdvancedProfilerConfig
97
101
  from nshtrainer.profiler import BaseProfilerConfig as BaseProfilerConfig
98
102
  from nshtrainer.profiler import ProfilerConfig as ProfilerConfig
@@ -225,8 +229,6 @@ __all__ = [
225
229
  "AdvancedProfilerConfig",
226
230
  "AsyncCheckpointIOPlugin",
227
231
  "BaseCheckpointCallbackConfig",
228
- "BaseLoggerConfig",
229
- "BaseNonlinearityConfig",
230
232
  "BaseProfilerConfig",
231
233
  "BestCheckpointCallbackConfig",
232
234
  "BitsandbytesPluginConfig",
@@ -280,6 +282,7 @@ __all__ = [
280
282
  "LinearWarmupCosineDecayLRSchedulerConfig",
281
283
  "LogEpochCallbackConfig",
282
284
  "LoggerConfig",
285
+ "LoggerConfigBase",
283
286
  "MLPConfig",
284
287
  "MPIEnvironmentPlugin",
285
288
  "MPSAcceleratorConfig",
@@ -287,6 +290,7 @@ __all__ = [
287
290
  "MishNonlinearityConfig",
288
291
  "MixedPrecisionPluginConfig",
289
292
  "NonlinearityConfig",
293
+ "NonlinearityConfigBase",
290
294
  "NormLoggingCallbackConfig",
291
295
  "OnExceptionCheckpointCallbackConfig",
292
296
  "OptimizerConfig",
@@ -334,11 +338,15 @@ __all__ = [
334
338
  "accelerator_registry",
335
339
  "callback_registry",
336
340
  "callbacks",
341
+ "logger_registry",
337
342
  "loggers",
338
343
  "lr_scheduler",
344
+ "lr_scheduler_registry",
339
345
  "metrics",
340
346
  "nn",
347
+ "nonlinearity_registry",
341
348
  "optimizer",
349
+ "optimizer_registry",
342
350
  "plugin_registry",
343
351
  "profiler",
344
352
  "trainer",
@@ -3,11 +3,12 @@ from __future__ import annotations
3
3
  __codegen__ = True
4
4
 
5
5
  from nshtrainer.loggers import ActSaveLoggerConfig as ActSaveLoggerConfig
6
- from nshtrainer.loggers import BaseLoggerConfig as BaseLoggerConfig
7
6
  from nshtrainer.loggers import CSVLoggerConfig as CSVLoggerConfig
8
7
  from nshtrainer.loggers import LoggerConfig as LoggerConfig
8
+ from nshtrainer.loggers import LoggerConfigBase as LoggerConfigBase
9
9
  from nshtrainer.loggers import TensorboardLoggerConfig as TensorboardLoggerConfig
10
10
  from nshtrainer.loggers import WandbLoggerConfig as WandbLoggerConfig
11
+ from nshtrainer.loggers import logger_registry as logger_registry
11
12
  from nshtrainer.loggers.wandb import CallbackConfigBase as CallbackConfigBase
12
13
  from nshtrainer.loggers.wandb import (
13
14
  WandbUploadCodeCallbackConfig as WandbUploadCodeCallbackConfig,
@@ -16,25 +17,26 @@ from nshtrainer.loggers.wandb import (
16
17
  WandbWatchCallbackConfig as WandbWatchCallbackConfig,
17
18
  )
18
19
 
19
- from . import _base as _base
20
20
  from . import actsave as actsave
21
+ from . import base as base
21
22
  from . import csv as csv
22
23
  from . import tensorboard as tensorboard
23
24
  from . import wandb as wandb
24
25
 
25
26
  __all__ = [
26
27
  "ActSaveLoggerConfig",
27
- "BaseLoggerConfig",
28
28
  "CSVLoggerConfig",
29
29
  "CallbackConfigBase",
30
30
  "LoggerConfig",
31
+ "LoggerConfigBase",
31
32
  "TensorboardLoggerConfig",
32
33
  "WandbLoggerConfig",
33
34
  "WandbUploadCodeCallbackConfig",
34
35
  "WandbWatchCallbackConfig",
35
- "_base",
36
36
  "actsave",
37
+ "base",
37
38
  "csv",
39
+ "logger_registry",
38
40
  "tensorboard",
39
41
  "wandb",
40
42
  ]
@@ -0,0 +1,13 @@
1
+ from __future__ import annotations
2
+
3
+ __codegen__ = True
4
+
5
+ from nshtrainer.loggers.actsave import ActSaveLoggerConfig as ActSaveLoggerConfig
6
+ from nshtrainer.loggers.actsave import LoggerConfigBase as LoggerConfigBase
7
+ from nshtrainer.loggers.actsave import logger_registry as logger_registry
8
+
9
+ __all__ = [
10
+ "ActSaveLoggerConfig",
11
+ "LoggerConfigBase",
12
+ "logger_registry",
13
+ ]
@@ -0,0 +1,11 @@
1
+ from __future__ import annotations
2
+
3
+ __codegen__ = True
4
+
5
+ from nshtrainer.loggers.base import LoggerConfigBase as LoggerConfigBase
6
+ from nshtrainer.loggers.base import logger_registry as logger_registry
7
+
8
+ __all__ = [
9
+ "LoggerConfigBase",
10
+ "logger_registry",
11
+ ]
@@ -0,0 +1,13 @@
1
+ from __future__ import annotations
2
+
3
+ __codegen__ = True
4
+
5
+ from nshtrainer.loggers.csv import CSVLoggerConfig as CSVLoggerConfig
6
+ from nshtrainer.loggers.csv import LoggerConfigBase as LoggerConfigBase
7
+ from nshtrainer.loggers.csv import logger_registry as logger_registry
8
+
9
+ __all__ = [
10
+ "CSVLoggerConfig",
11
+ "LoggerConfigBase",
12
+ "logger_registry",
13
+ ]
@@ -2,12 +2,14 @@ from __future__ import annotations
2
2
 
3
3
  __codegen__ = True
4
4
 
5
- from nshtrainer.loggers.tensorboard import BaseLoggerConfig as BaseLoggerConfig
5
+ from nshtrainer.loggers.tensorboard import LoggerConfigBase as LoggerConfigBase
6
6
  from nshtrainer.loggers.tensorboard import (
7
7
  TensorboardLoggerConfig as TensorboardLoggerConfig,
8
8
  )
9
+ from nshtrainer.loggers.tensorboard import logger_registry as logger_registry
9
10
 
10
11
  __all__ = [
11
- "BaseLoggerConfig",
12
+ "LoggerConfigBase",
12
13
  "TensorboardLoggerConfig",
14
+ "logger_registry",
13
15
  ]
@@ -2,8 +2,8 @@ from __future__ import annotations
2
2
 
3
3
  __codegen__ = True
4
4
 
5
- from nshtrainer.loggers.wandb import BaseLoggerConfig as BaseLoggerConfig
6
5
  from nshtrainer.loggers.wandb import CallbackConfigBase as CallbackConfigBase
6
+ from nshtrainer.loggers.wandb import LoggerConfigBase as LoggerConfigBase
7
7
  from nshtrainer.loggers.wandb import WandbLoggerConfig as WandbLoggerConfig
8
8
  from nshtrainer.loggers.wandb import (
9
9
  WandbUploadCodeCallbackConfig as WandbUploadCodeCallbackConfig,
@@ -11,11 +11,13 @@ from nshtrainer.loggers.wandb import (
11
11
  from nshtrainer.loggers.wandb import (
12
12
  WandbWatchCallbackConfig as WandbWatchCallbackConfig,
13
13
  )
14
+ from nshtrainer.loggers.wandb import logger_registry as logger_registry
14
15
 
15
16
  __all__ = [
16
- "BaseLoggerConfig",
17
17
  "CallbackConfigBase",
18
+ "LoggerConfigBase",
18
19
  "WandbLoggerConfig",
19
20
  "WandbUploadCodeCallbackConfig",
20
21
  "WandbWatchCallbackConfig",
22
+ "logger_registry",
21
23
  ]
@@ -8,12 +8,13 @@ from nshtrainer.lr_scheduler import (
8
8
  from nshtrainer.lr_scheduler import LRSchedulerConfig as LRSchedulerConfig
9
9
  from nshtrainer.lr_scheduler import LRSchedulerConfigBase as LRSchedulerConfigBase
10
10
  from nshtrainer.lr_scheduler import ReduceLROnPlateauConfig as ReduceLROnPlateauConfig
11
+ from nshtrainer.lr_scheduler.base import lr_scheduler_registry as lr_scheduler_registry
11
12
  from nshtrainer.lr_scheduler.linear_warmup_cosine import (
12
13
  DurationConfig as DurationConfig,
13
14
  )
14
15
  from nshtrainer.lr_scheduler.reduce_lr_on_plateau import MetricConfig as MetricConfig
15
16
 
16
- from . import _base as _base
17
+ from . import base as base
17
18
  from . import linear_warmup_cosine as linear_warmup_cosine
18
19
  from . import reduce_lr_on_plateau as reduce_lr_on_plateau
19
20
 
@@ -24,7 +25,8 @@ __all__ = [
24
25
  "LinearWarmupCosineDecayLRSchedulerConfig",
25
26
  "MetricConfig",
26
27
  "ReduceLROnPlateauConfig",
27
- "_base",
28
+ "base",
28
29
  "linear_warmup_cosine",
30
+ "lr_scheduler_registry",
29
31
  "reduce_lr_on_plateau",
30
32
  ]
@@ -0,0 +1,11 @@
1
+ from __future__ import annotations
2
+
3
+ __codegen__ = True
4
+
5
+ from nshtrainer.lr_scheduler.base import LRSchedulerConfigBase as LRSchedulerConfigBase
6
+ from nshtrainer.lr_scheduler.base import lr_scheduler_registry as lr_scheduler_registry
7
+
8
+ __all__ = [
9
+ "LRSchedulerConfigBase",
10
+ "lr_scheduler_registry",
11
+ ]
@@ -11,9 +11,13 @@ from nshtrainer.lr_scheduler.linear_warmup_cosine import (
11
11
  from nshtrainer.lr_scheduler.linear_warmup_cosine import (
12
12
  LRSchedulerConfigBase as LRSchedulerConfigBase,
13
13
  )
14
+ from nshtrainer.lr_scheduler.linear_warmup_cosine import (
15
+ lr_scheduler_registry as lr_scheduler_registry,
16
+ )
14
17
 
15
18
  __all__ = [
16
19
  "DurationConfig",
17
20
  "LRSchedulerConfigBase",
18
21
  "LinearWarmupCosineDecayLRSchedulerConfig",
22
+ "lr_scheduler_registry",
19
23
  ]
@@ -9,9 +9,13 @@ from nshtrainer.lr_scheduler.reduce_lr_on_plateau import MetricConfig as MetricC
9
9
  from nshtrainer.lr_scheduler.reduce_lr_on_plateau import (
10
10
  ReduceLROnPlateauConfig as ReduceLROnPlateauConfig,
11
11
  )
12
+ from nshtrainer.lr_scheduler.reduce_lr_on_plateau import (
13
+ lr_scheduler_registry as lr_scheduler_registry,
14
+ )
12
15
 
13
16
  __all__ = [
14
17
  "LRSchedulerConfigBase",
15
18
  "MetricConfig",
16
19
  "ReduceLROnPlateauConfig",
20
+ "lr_scheduler_registry",
17
21
  ]
@@ -2,13 +2,13 @@ from __future__ import annotations
2
2
 
3
3
  __codegen__ = True
4
4
 
5
- from nshtrainer.nn import BaseNonlinearityConfig as BaseNonlinearityConfig
6
5
  from nshtrainer.nn import ELUNonlinearityConfig as ELUNonlinearityConfig
7
6
  from nshtrainer.nn import GELUNonlinearityConfig as GELUNonlinearityConfig
8
7
  from nshtrainer.nn import LeakyReLUNonlinearityConfig as LeakyReLUNonlinearityConfig
9
8
  from nshtrainer.nn import MishNonlinearityConfig as MishNonlinearityConfig
10
9
  from nshtrainer.nn import MLPConfig as MLPConfig
11
10
  from nshtrainer.nn import NonlinearityConfig as NonlinearityConfig
11
+ from nshtrainer.nn import NonlinearityConfigBase as NonlinearityConfigBase
12
12
  from nshtrainer.nn import PReLUConfig as PReLUConfig
13
13
  from nshtrainer.nn import ReLUNonlinearityConfig as ReLUNonlinearityConfig
14
14
  from nshtrainer.nn import SigmoidNonlinearityConfig as SigmoidNonlinearityConfig
@@ -21,18 +21,19 @@ from nshtrainer.nn import TanhNonlinearityConfig as TanhNonlinearityConfig
21
21
  from nshtrainer.nn.nonlinearity import (
22
22
  SwiGLUNonlinearityConfig as SwiGLUNonlinearityConfig,
23
23
  )
24
+ from nshtrainer.nn.nonlinearity import nonlinearity_registry as nonlinearity_registry
24
25
 
25
26
  from . import mlp as mlp
26
27
  from . import nonlinearity as nonlinearity
27
28
 
28
29
  __all__ = [
29
- "BaseNonlinearityConfig",
30
30
  "ELUNonlinearityConfig",
31
31
  "GELUNonlinearityConfig",
32
32
  "LeakyReLUNonlinearityConfig",
33
33
  "MLPConfig",
34
34
  "MishNonlinearityConfig",
35
35
  "NonlinearityConfig",
36
+ "NonlinearityConfigBase",
36
37
  "PReLUConfig",
37
38
  "ReLUNonlinearityConfig",
38
39
  "SiLUNonlinearityConfig",
@@ -45,4 +46,5 @@ __all__ = [
45
46
  "TanhNonlinearityConfig",
46
47
  "mlp",
47
48
  "nonlinearity",
49
+ "nonlinearity_registry",
48
50
  ]
@@ -2,12 +2,12 @@ from __future__ import annotations
2
2
 
3
3
  __codegen__ = True
4
4
 
5
- from nshtrainer.nn.mlp import BaseNonlinearityConfig as BaseNonlinearityConfig
6
5
  from nshtrainer.nn.mlp import MLPConfig as MLPConfig
7
6
  from nshtrainer.nn.mlp import NonlinearityConfig as NonlinearityConfig
7
+ from nshtrainer.nn.mlp import NonlinearityConfigBase as NonlinearityConfigBase
8
8
 
9
9
  __all__ = [
10
- "BaseNonlinearityConfig",
11
10
  "MLPConfig",
12
11
  "NonlinearityConfig",
12
+ "NonlinearityConfigBase",
13
13
  ]
@@ -2,7 +2,6 @@ from __future__ import annotations
2
2
 
3
3
  __codegen__ = True
4
4
 
5
- from nshtrainer.nn.nonlinearity import BaseNonlinearityConfig as BaseNonlinearityConfig
6
5
  from nshtrainer.nn.nonlinearity import ELUNonlinearityConfig as ELUNonlinearityConfig
7
6
  from nshtrainer.nn.nonlinearity import GELUNonlinearityConfig as GELUNonlinearityConfig
8
7
  from nshtrainer.nn.nonlinearity import (
@@ -10,6 +9,7 @@ from nshtrainer.nn.nonlinearity import (
10
9
  )
11
10
  from nshtrainer.nn.nonlinearity import MishNonlinearityConfig as MishNonlinearityConfig
12
11
  from nshtrainer.nn.nonlinearity import NonlinearityConfig as NonlinearityConfig
12
+ from nshtrainer.nn.nonlinearity import NonlinearityConfigBase as NonlinearityConfigBase
13
13
  from nshtrainer.nn.nonlinearity import PReLUConfig as PReLUConfig
14
14
  from nshtrainer.nn.nonlinearity import ReLUNonlinearityConfig as ReLUNonlinearityConfig
15
15
  from nshtrainer.nn.nonlinearity import (
@@ -32,14 +32,15 @@ from nshtrainer.nn.nonlinearity import (
32
32
  SwishNonlinearityConfig as SwishNonlinearityConfig,
33
33
  )
34
34
  from nshtrainer.nn.nonlinearity import TanhNonlinearityConfig as TanhNonlinearityConfig
35
+ from nshtrainer.nn.nonlinearity import nonlinearity_registry as nonlinearity_registry
35
36
 
36
37
  __all__ = [
37
- "BaseNonlinearityConfig",
38
38
  "ELUNonlinearityConfig",
39
39
  "GELUNonlinearityConfig",
40
40
  "LeakyReLUNonlinearityConfig",
41
41
  "MishNonlinearityConfig",
42
42
  "NonlinearityConfig",
43
+ "NonlinearityConfigBase",
43
44
  "PReLUConfig",
44
45
  "ReLUNonlinearityConfig",
45
46
  "SiLUNonlinearityConfig",
@@ -50,4 +51,5 @@ __all__ = [
50
51
  "SwiGLUNonlinearityConfig",
51
52
  "SwishNonlinearityConfig",
52
53
  "TanhNonlinearityConfig",
54
+ "nonlinearity_registry",
53
55
  ]
@@ -5,9 +5,11 @@ __codegen__ = True
5
5
  from nshtrainer.optimizer import AdamWConfig as AdamWConfig
6
6
  from nshtrainer.optimizer import OptimizerConfig as OptimizerConfig
7
7
  from nshtrainer.optimizer import OptimizerConfigBase as OptimizerConfigBase
8
+ from nshtrainer.optimizer import optimizer_registry as optimizer_registry
8
9
 
9
10
  __all__ = [
10
11
  "AdamWConfig",
11
12
  "OptimizerConfig",
12
13
  "OptimizerConfigBase",
14
+ "optimizer_registry",
13
15
  ]
@@ -8,7 +8,6 @@ from nshtrainer.trainer import callback_registry as callback_registry
8
8
  from nshtrainer.trainer import plugin_registry as plugin_registry
9
9
  from nshtrainer.trainer._config import AcceleratorConfig as AcceleratorConfig
10
10
  from nshtrainer.trainer._config import ActSaveLoggerConfig as ActSaveLoggerConfig
11
- from nshtrainer.trainer._config import BaseLoggerConfig as BaseLoggerConfig
12
11
  from nshtrainer.trainer._config import (
13
12
  BestCheckpointCallbackConfig as BestCheckpointCallbackConfig,
14
13
  )
@@ -37,6 +36,7 @@ from nshtrainer.trainer._config import (
37
36
  )
38
37
  from nshtrainer.trainer._config import LogEpochCallbackConfig as LogEpochCallbackConfig
39
38
  from nshtrainer.trainer._config import LoggerConfig as LoggerConfig
39
+ from nshtrainer.trainer._config import LoggerConfigBase as LoggerConfigBase
40
40
  from nshtrainer.trainer._config import MetricConfig as MetricConfig
41
41
  from nshtrainer.trainer._config import (
42
42
  NormLoggingCallbackConfig as NormLoggingCallbackConfig,
@@ -133,7 +133,6 @@ __all__ = [
133
133
  "AcceleratorConfigBase",
134
134
  "ActSaveLoggerConfig",
135
135
  "AsyncCheckpointIOPlugin",
136
- "BaseLoggerConfig",
137
136
  "BestCheckpointCallbackConfig",
138
137
  "BitsandbytesPluginConfig",
139
138
  "CPUAcceleratorConfig",
@@ -161,6 +160,7 @@ __all__ = [
161
160
  "LightningEnvironmentPlugin",
162
161
  "LogEpochCallbackConfig",
163
162
  "LoggerConfig",
163
+ "LoggerConfigBase",
164
164
  "MPIEnvironmentPlugin",
165
165
  "MPSAcceleratorConfig",
166
166
  "MetricConfig",
@@ -4,7 +4,6 @@ __codegen__ = True
4
4
 
5
5
  from nshtrainer.trainer._config import AcceleratorConfig as AcceleratorConfig
6
6
  from nshtrainer.trainer._config import ActSaveLoggerConfig as ActSaveLoggerConfig
7
- from nshtrainer.trainer._config import BaseLoggerConfig as BaseLoggerConfig
8
7
  from nshtrainer.trainer._config import (
9
8
  BestCheckpointCallbackConfig as BestCheckpointCallbackConfig,
10
9
  )
@@ -33,6 +32,7 @@ from nshtrainer.trainer._config import (
33
32
  )
34
33
  from nshtrainer.trainer._config import LogEpochCallbackConfig as LogEpochCallbackConfig
35
34
  from nshtrainer.trainer._config import LoggerConfig as LoggerConfig
35
+ from nshtrainer.trainer._config import LoggerConfigBase as LoggerConfigBase
36
36
  from nshtrainer.trainer._config import MetricConfig as MetricConfig
37
37
  from nshtrainer.trainer._config import (
38
38
  NormLoggingCallbackConfig as NormLoggingCallbackConfig,
@@ -59,7 +59,6 @@ from nshtrainer.trainer._config import WandbLoggerConfig as WandbLoggerConfig
59
59
  __all__ = [
60
60
  "AcceleratorConfig",
61
61
  "ActSaveLoggerConfig",
62
- "BaseLoggerConfig",
63
62
  "BestCheckpointCallbackConfig",
64
63
  "CSVLoggerConfig",
65
64
  "CallbackConfig",
@@ -76,6 +75,7 @@ __all__ = [
76
75
  "LearningRateMonitorConfig",
77
76
  "LogEpochCallbackConfig",
78
77
  "LoggerConfig",
78
+ "LoggerConfigBase",
79
79
  "MetricConfig",
80
80
  "NormLoggingCallbackConfig",
81
81
  "OnExceptionCheckpointCallbackConfig",
@@ -5,19 +5,14 @@ from typing import Annotated
5
5
  import nshconfig as C
6
6
  from typing_extensions import TypeAliasType
7
7
 
8
- from ._base import BaseLoggerConfig as BaseLoggerConfig
9
8
  from .actsave import ActSaveLoggerConfig as ActSaveLoggerConfig
9
+ from .base import LoggerConfigBase as LoggerConfigBase
10
+ from .base import logger_registry as logger_registry
10
11
  from .csv import CSVLoggerConfig as CSVLoggerConfig
11
12
  from .tensorboard import TensorboardLoggerConfig as TensorboardLoggerConfig
12
13
  from .wandb import WandbLoggerConfig as WandbLoggerConfig
13
14
 
14
15
  LoggerConfig = TypeAliasType(
15
16
  "LoggerConfig",
16
- Annotated[
17
- CSVLoggerConfig
18
- | TensorboardLoggerConfig
19
- | WandbLoggerConfig
20
- | ActSaveLoggerConfig,
21
- C.Field(discriminator="name"),
22
- ],
17
+ Annotated[LoggerConfigBase, logger_registry.DynamicResolution()],
23
18
  )
@@ -5,11 +5,14 @@ from typing import Any, Literal
5
5
 
6
6
  import numpy as np
7
7
  from lightning.pytorch.loggers import Logger
8
+ from typing_extensions import final
8
9
 
9
- from ._base import BaseLoggerConfig
10
+ from .base import LoggerConfigBase, logger_registry
10
11
 
11
12
 
12
- class ActSaveLoggerConfig(BaseLoggerConfig):
13
+ @final
14
+ @logger_registry.register
15
+ class ActSaveLoggerConfig(LoggerConfigBase):
13
16
  name: Literal["actsave"] = "actsave"
14
17
 
15
18
  def create_logger(self, trainer_config):
@@ -10,7 +10,7 @@ if TYPE_CHECKING:
10
10
  from ..trainer._config import TrainerConfig
11
11
 
12
12
 
13
- class BaseLoggerConfig(C.Config, ABC):
13
+ class LoggerConfigBase(C.Config, ABC):
14
14
  enabled: bool = True
15
15
  """Enable this logger."""
16
16
 
@@ -29,3 +29,6 @@ class BaseLoggerConfig(C.Config, ABC):
29
29
 
30
30
  def __bool__(self):
31
31
  return self.enabled
32
+
33
+
34
+ logger_registry = C.Registry(LoggerConfigBase, discriminator="name")
@@ -2,12 +2,14 @@ from __future__ import annotations
2
2
 
3
3
  from typing import Literal
4
4
 
5
- from typing_extensions import override
5
+ from typing_extensions import final, override
6
6
 
7
- from ._base import BaseLoggerConfig
7
+ from .base import LoggerConfigBase, logger_registry
8
8
 
9
9
 
10
- class CSVLoggerConfig(BaseLoggerConfig):
10
+ @final
11
+ @logger_registry.register
12
+ class CSVLoggerConfig(LoggerConfigBase):
11
13
  name: Literal["csv"] = "csv"
12
14
 
13
15
  enabled: bool = True
@@ -4,9 +4,9 @@ import logging
4
4
  from typing import Literal
5
5
 
6
6
  import nshconfig as C
7
- from typing_extensions import override
7
+ from typing_extensions import final, override
8
8
 
9
- from ._base import BaseLoggerConfig
9
+ from .base import LoggerConfigBase, logger_registry
10
10
 
11
11
  log = logging.getLogger(__name__)
12
12
 
@@ -30,7 +30,9 @@ def _tensorboard_available():
30
30
  return False
31
31
 
32
32
 
33
- class TensorboardLoggerConfig(BaseLoggerConfig):
33
+ @final
34
+ @logger_registry.register
35
+ class TensorboardLoggerConfig(LoggerConfigBase):
34
36
  name: Literal["tensorboard"] = "tensorboard"
35
37
 
36
38
  enabled: bool = C.Field(default_factory=lambda: _tensorboard_available())
@@ -7,12 +7,12 @@ from typing import TYPE_CHECKING, Literal
7
7
  import nshconfig as C
8
8
  from lightning.pytorch import Callback, LightningModule, Trainer
9
9
  from packaging import version
10
- from typing_extensions import assert_never, override
10
+ from typing_extensions import assert_never, final, override
11
11
 
12
12
  from ..callbacks.base import CallbackConfigBase
13
13
  from ..callbacks.wandb_upload_code import WandbUploadCodeCallbackConfig
14
14
  from ..callbacks.wandb_watch import WandbWatchCallbackConfig
15
- from ._base import BaseLoggerConfig
15
+ from .base import LoggerConfigBase, logger_registry
16
16
 
17
17
  if TYPE_CHECKING:
18
18
  from ..trainer._config import TrainerConfig
@@ -73,7 +73,9 @@ class FinishWandbOnTeardownCallback(Callback):
73
73
  wandb.finish()
74
74
 
75
75
 
76
- class WandbLoggerConfig(CallbackConfigBase, BaseLoggerConfig):
76
+ @final
77
+ @logger_registry.register
78
+ class WandbLoggerConfig(CallbackConfigBase, LoggerConfigBase):
77
79
  name: Literal["wandb"] = "wandb"
78
80
 
79
81
  enabled: bool = C.Field(default_factory=lambda: _wandb_available())
@@ -5,8 +5,8 @@ from typing import Annotated
5
5
  import nshconfig as C
6
6
  from typing_extensions import TypeAliasType
7
7
 
8
- from ._base import LRSchedulerConfigBase as LRSchedulerConfigBase
9
- from ._base import LRSchedulerMetadata as LRSchedulerMetadata
8
+ from .base import LRSchedulerConfigBase as LRSchedulerConfigBase
9
+ from .base import LRSchedulerMetadata as LRSchedulerMetadata
10
10
  from .linear_warmup_cosine import (
11
11
  LinearWarmupCosineAnnealingLR as LinearWarmupCosineAnnealingLR,
12
12
  )
@@ -94,3 +94,6 @@ class LRSchedulerConfigBase(C.Config, ABC):
94
94
  # ^ This is a hack to trigger the computation of the estimated stepping batches
95
95
  # and make sure that the `trainer.num_training_batches` attribute is set.
96
96
  return math.ceil(trainer.num_training_batches / trainer.accumulate_grad_batches)
97
+
98
+
99
+ lr_scheduler_registry = C.Registry(LRSchedulerConfigBase, discriminator="name")