nshtrainer 1.4.0__tar.gz → 1.4.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (165) hide show
  1. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/PKG-INFO +2 -2
  2. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/pyproject.toml +2 -2
  3. nshtrainer-1.4.1/src/nshtrainer/callbacks/log_epoch.py +136 -0
  4. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/trainer/_config.py +1 -1
  5. nshtrainer-1.4.0/src/nshtrainer/callbacks/log_epoch.py +0 -49
  6. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/README.md +0 -0
  7. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/.nshconfig.generated.json +0 -0
  8. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/__init__.py +0 -0
  9. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/_callback.py +0 -0
  10. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/_checkpoint/metadata.py +0 -0
  11. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/_checkpoint/saver.py +0 -0
  12. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/_experimental/__init__.py +0 -0
  13. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/_hf_hub.py +0 -0
  14. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/__init__.py +0 -0
  15. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/actsave.py +0 -0
  16. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/base.py +0 -0
  17. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/checkpoint/__init__.py +0 -0
  18. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/checkpoint/_base.py +0 -0
  19. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/checkpoint/best_checkpoint.py +0 -0
  20. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/checkpoint/last_checkpoint.py +0 -0
  21. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/checkpoint/on_exception_checkpoint.py +0 -0
  22. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/debug_flag.py +0 -0
  23. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/directory_setup.py +0 -0
  24. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/distributed_prediction_writer.py +0 -0
  25. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/early_stopping.py +0 -0
  26. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/ema.py +0 -0
  27. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/finite_checks.py +0 -0
  28. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/gradient_skipping.py +0 -0
  29. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/interval.py +0 -0
  30. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/lr_monitor.py +0 -0
  31. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/metric_validation.py +0 -0
  32. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/norm_logging.py +0 -0
  33. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/print_table.py +0 -0
  34. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/rlp_sanity_checks.py +0 -0
  35. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/shared_parameters.py +0 -0
  36. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/timer.py +0 -0
  37. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/wandb_upload_code.py +0 -0
  38. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/callbacks/wandb_watch.py +0 -0
  39. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/.gitattributes +0 -0
  40. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/__init__.py +0 -0
  41. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/_checkpoint/__init__.py +0 -0
  42. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/_checkpoint/metadata/__init__.py +0 -0
  43. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/_hf_hub/__init__.py +0 -0
  44. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/__init__.py +0 -0
  45. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/actsave/__init__.py +0 -0
  46. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/base/__init__.py +0 -0
  47. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/checkpoint/__init__.py +0 -0
  48. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/checkpoint/_base/__init__.py +0 -0
  49. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/checkpoint/best_checkpoint/__init__.py +0 -0
  50. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/checkpoint/last_checkpoint/__init__.py +0 -0
  51. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/checkpoint/on_exception_checkpoint/__init__.py +0 -0
  52. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/debug_flag/__init__.py +0 -0
  53. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/directory_setup/__init__.py +0 -0
  54. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/distributed_prediction_writer/__init__.py +0 -0
  55. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/early_stopping/__init__.py +0 -0
  56. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/ema/__init__.py +0 -0
  57. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/finite_checks/__init__.py +0 -0
  58. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/gradient_skipping/__init__.py +0 -0
  59. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/log_epoch/__init__.py +0 -0
  60. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/lr_monitor/__init__.py +0 -0
  61. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/metric_validation/__init__.py +0 -0
  62. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/norm_logging/__init__.py +0 -0
  63. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/print_table/__init__.py +0 -0
  64. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/rlp_sanity_checks/__init__.py +0 -0
  65. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/shared_parameters/__init__.py +0 -0
  66. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/timer/__init__.py +0 -0
  67. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/wandb_upload_code/__init__.py +0 -0
  68. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/callbacks/wandb_watch/__init__.py +0 -0
  69. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/loggers/__init__.py +0 -0
  70. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/loggers/actsave/__init__.py +0 -0
  71. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/loggers/base/__init__.py +0 -0
  72. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/loggers/csv/__init__.py +0 -0
  73. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/loggers/tensorboard/__init__.py +0 -0
  74. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/loggers/wandb/__init__.py +0 -0
  75. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/lr_scheduler/__init__.py +0 -0
  76. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/lr_scheduler/base/__init__.py +0 -0
  77. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/lr_scheduler/linear_warmup_cosine/__init__.py +0 -0
  78. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/lr_scheduler/reduce_lr_on_plateau/__init__.py +0 -0
  79. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/metrics/__init__.py +0 -0
  80. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/metrics/_config/__init__.py +0 -0
  81. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/nn/__init__.py +0 -0
  82. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/nn/mlp/__init__.py +0 -0
  83. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/nn/nonlinearity/__init__.py +0 -0
  84. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/nn/rng/__init__.py +0 -0
  85. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/optimizer/__init__.py +0 -0
  86. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/profiler/__init__.py +0 -0
  87. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/profiler/_base/__init__.py +0 -0
  88. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/profiler/advanced/__init__.py +0 -0
  89. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/profiler/pytorch/__init__.py +0 -0
  90. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/profiler/simple/__init__.py +0 -0
  91. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/trainer/__init__.py +0 -0
  92. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/trainer/_config/__init__.py +0 -0
  93. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/trainer/accelerator/__init__.py +0 -0
  94. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/trainer/plugin/__init__.py +0 -0
  95. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/trainer/plugin/base/__init__.py +0 -0
  96. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/trainer/plugin/environment/__init__.py +0 -0
  97. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/trainer/plugin/io/__init__.py +0 -0
  98. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/trainer/plugin/layer_sync/__init__.py +0 -0
  99. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/trainer/plugin/precision/__init__.py +0 -0
  100. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/trainer/strategy/__init__.py +0 -0
  101. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/trainer/trainer/__init__.py +0 -0
  102. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/util/__init__.py +0 -0
  103. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/util/_environment_info/__init__.py +0 -0
  104. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/util/config/__init__.py +0 -0
  105. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/util/config/dtype/__init__.py +0 -0
  106. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/configs/util/config/duration/__init__.py +0 -0
  107. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/data/__init__.py +0 -0
  108. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/data/balanced_batch_sampler.py +0 -0
  109. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/data/datamodule.py +0 -0
  110. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/data/transform.py +0 -0
  111. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/loggers/__init__.py +0 -0
  112. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/loggers/actsave.py +0 -0
  113. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/loggers/base.py +0 -0
  114. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/loggers/csv.py +0 -0
  115. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/loggers/tensorboard.py +0 -0
  116. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/loggers/wandb.py +0 -0
  117. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/lr_scheduler/__init__.py +0 -0
  118. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/lr_scheduler/base.py +0 -0
  119. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/lr_scheduler/linear_warmup_cosine.py +0 -0
  120. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/lr_scheduler/reduce_lr_on_plateau.py +0 -0
  121. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/metrics/__init__.py +0 -0
  122. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/metrics/_config.py +0 -0
  123. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/model/__init__.py +0 -0
  124. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/model/base.py +0 -0
  125. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/model/mixins/callback.py +0 -0
  126. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/model/mixins/debug.py +0 -0
  127. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/model/mixins/logger.py +0 -0
  128. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/nn/__init__.py +0 -0
  129. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/nn/mlp.py +0 -0
  130. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/nn/module_dict.py +0 -0
  131. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/nn/module_list.py +0 -0
  132. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/nn/nonlinearity.py +0 -0
  133. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/nn/rng.py +0 -0
  134. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/optimizer.py +0 -0
  135. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/profiler/__init__.py +0 -0
  136. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/profiler/_base.py +0 -0
  137. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/profiler/advanced.py +0 -0
  138. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/profiler/pytorch.py +0 -0
  139. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/profiler/simple.py +0 -0
  140. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/trainer/__init__.py +0 -0
  141. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/trainer/_distributed_prediction_result.py +0 -0
  142. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/trainer/_log_hparams.py +0 -0
  143. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/trainer/_runtime_callback.py +0 -0
  144. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/trainer/accelerator.py +0 -0
  145. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/trainer/plugin/__init__.py +0 -0
  146. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/trainer/plugin/base.py +0 -0
  147. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/trainer/plugin/environment.py +0 -0
  148. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/trainer/plugin/io.py +0 -0
  149. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/trainer/plugin/layer_sync.py +0 -0
  150. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/trainer/plugin/precision.py +0 -0
  151. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/trainer/signal_connector.py +0 -0
  152. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/trainer/strategy.py +0 -0
  153. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/trainer/trainer.py +0 -0
  154. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/util/_environment_info.py +0 -0
  155. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/util/bf16.py +0 -0
  156. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/util/code_upload.py +0 -0
  157. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/util/config/__init__.py +0 -0
  158. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/util/config/dtype.py +0 -0
  159. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/util/config/duration.py +0 -0
  160. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/util/environment.py +0 -0
  161. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/util/path.py +0 -0
  162. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/util/seed.py +0 -0
  163. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/util/slurm.py +0 -0
  164. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/util/typed.py +0 -0
  165. {nshtrainer-1.4.0 → nshtrainer-1.4.1}/src/nshtrainer/util/typing_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: nshtrainer
3
- Version: 1.4.0
3
+ Version: 1.4.1
4
4
  Summary:
5
5
  Author: Nima Shoghi
6
6
  Author-email: nimashoghi@gmail.com
@@ -14,7 +14,7 @@ Provides-Extra: extra
14
14
  Requires-Dist: GitPython ; extra == "extra"
15
15
  Requires-Dist: huggingface-hub ; extra == "extra"
16
16
  Requires-Dist: lightning
17
- Requires-Dist: nshconfig (>0.39)
17
+ Requires-Dist: nshconfig (>=0.43)
18
18
  Requires-Dist: nshrunner ; extra == "extra"
19
19
  Requires-Dist: nshutils ; extra == "extra"
20
20
  Requires-Dist: numpy
@@ -1,13 +1,13 @@
1
1
  [project]
2
2
  name = "nshtrainer"
3
- version = "1.4.0"
3
+ version = "1.4.1"
4
4
  description = ""
5
5
  authors = [{ name = "Nima Shoghi", email = "nimashoghi@gmail.com" }]
6
6
  requires-python = ">=3.10,<4.0"
7
7
  readme = "README.md"
8
8
 
9
9
  dependencies = [
10
- "nshconfig>0.39",
10
+ "nshconfig>=0.43",
11
11
  "psutil",
12
12
  "numpy",
13
13
  "torch",
@@ -0,0 +1,136 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import math
5
+ from typing import Any, Literal
6
+
7
+ from lightning.pytorch import LightningModule, Trainer
8
+ from lightning.pytorch.callbacks import Callback
9
+ from typing_extensions import final, override
10
+
11
+ from .base import CallbackConfigBase, callback_registry
12
+
13
+ log = logging.getLogger(__name__)
14
+
15
+
16
+ @final
17
+ @callback_registry.register
18
+ class LogEpochCallbackConfig(CallbackConfigBase):
19
+ name: Literal["log_epoch"] = "log_epoch"
20
+
21
+ metric_name: str = "computed_epoch"
22
+ """The name of the metric to log the epoch as."""
23
+
24
+ train: bool = True
25
+ """Whether to log the epoch during training."""
26
+
27
+ val: bool = True
28
+ """Whether to log the epoch during validation."""
29
+
30
+ test: bool = True
31
+ """Whether to log the epoch during testing."""
32
+
33
+ @override
34
+ def create_callbacks(self, trainer_config):
35
+ yield LogEpochCallback(self)
36
+
37
+
38
+ def _worker_fn(
39
+ trainer: Trainer,
40
+ pl_module: LightningModule,
41
+ num_batches_prop: str,
42
+ dataloader_idx: int | None = None,
43
+ *,
44
+ metric_name: str,
45
+ ):
46
+ if trainer.logger is None:
47
+ return
48
+
49
+ # If trainer.num_{training/val/test}_batches is not set or is nan/inf, we cannot calculate the epoch
50
+ if not (num_batches := getattr(trainer, num_batches_prop, None)):
51
+ log.warning(f"Trainer has no valid `{num_batches_prop}`. Cannot log epoch.")
52
+ return
53
+
54
+ # If the trainer has a dataloader_idx, num_batches is a list of num_batches for each dataloader.
55
+ if dataloader_idx is not None:
56
+ assert isinstance(num_batches, list), (
57
+ f"Expected num_batches to be a list, got {type(num_batches)}"
58
+ )
59
+ assert 0 <= dataloader_idx < len(num_batches), (
60
+ f"Expected dataloader_idx to be between 0 and {len(num_batches)}, got {dataloader_idx}"
61
+ )
62
+ num_batches = num_batches[dataloader_idx]
63
+
64
+ if (
65
+ not isinstance(num_batches, (int, float))
66
+ or math.isnan(num_batches)
67
+ or math.isinf(num_batches)
68
+ ):
69
+ log.warning(
70
+ f"Trainer has no valid `{num_batches_prop}` (got {num_batches=}). Cannot log epoch."
71
+ )
72
+ return
73
+
74
+ epoch = pl_module.global_step / num_batches
75
+ pl_module.log(metric_name, epoch, on_step=True, on_epoch=False)
76
+
77
+
78
+ class LogEpochCallback(Callback):
79
+ def __init__(self, config: LogEpochCallbackConfig):
80
+ super().__init__()
81
+
82
+ self.config = config
83
+
84
+ @override
85
+ def on_train_batch_start(
86
+ self, trainer: Trainer, pl_module: LightningModule, batch: Any, batch_idx: int
87
+ ):
88
+ if trainer.logger is None or not self.config.train:
89
+ return
90
+
91
+ _worker_fn(
92
+ trainer,
93
+ pl_module,
94
+ "num_training_batches",
95
+ metric_name=self.config.metric_name,
96
+ )
97
+
98
+ @override
99
+ def on_validation_batch_start(
100
+ self,
101
+ trainer: Trainer,
102
+ pl_module: LightningModule,
103
+ batch: Any,
104
+ batch_idx: int,
105
+ dataloader_idx: int = 0,
106
+ ) -> None:
107
+ if trainer.logger is None or not self.config.val:
108
+ return
109
+
110
+ _worker_fn(
111
+ trainer,
112
+ pl_module,
113
+ "num_val_batches",
114
+ dataloader_idx=dataloader_idx,
115
+ metric_name=self.config.metric_name,
116
+ )
117
+
118
+ @override
119
+ def on_test_batch_start(
120
+ self,
121
+ trainer: Trainer,
122
+ pl_module: LightningModule,
123
+ batch: Any,
124
+ batch_idx: int,
125
+ dataloader_idx: int = 0,
126
+ ) -> None:
127
+ if trainer.logger is None or not self.config.test:
128
+ return
129
+
130
+ _worker_fn(
131
+ trainer,
132
+ pl_module,
133
+ "num_test_batches",
134
+ dataloader_idx=dataloader_idx,
135
+ metric_name=self.config.metric_name,
136
+ )
@@ -419,7 +419,7 @@ class DirectoryConfig(C.Config):
419
419
 
420
420
  class TrainerConfig(C.Config):
421
421
  # region Active Run Configuration
422
- id: Annotated[str, C.AllowMissing()] = C.MISSING
422
+ id: C.AllowMissing[str] = C.MISSING
423
423
  """ID of the run."""
424
424
  name: list[str] = []
425
425
  """Run name in parts. Full name is constructed by joining the parts with spaces."""
@@ -1,49 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import logging
4
- import math
5
- from typing import Any, Literal
6
-
7
- from lightning.pytorch import LightningModule, Trainer
8
- from lightning.pytorch.callbacks import Callback
9
- from typing_extensions import final, override
10
-
11
- from .base import CallbackConfigBase, callback_registry
12
-
13
- log = logging.getLogger(__name__)
14
-
15
-
16
- @final
17
- @callback_registry.register
18
- class LogEpochCallbackConfig(CallbackConfigBase):
19
- name: Literal["log_epoch"] = "log_epoch"
20
-
21
- @override
22
- def create_callbacks(self, trainer_config):
23
- yield LogEpochCallback()
24
-
25
-
26
- class LogEpochCallback(Callback):
27
- def __init__(self, metric_name: str = "computed_epoch"):
28
- super().__init__()
29
-
30
- self.metric_name = metric_name
31
-
32
- @override
33
- def on_train_batch_start(
34
- self, trainer: Trainer, pl_module: LightningModule, batch: Any, batch_idx: int
35
- ):
36
- if trainer.logger is None:
37
- return
38
-
39
- # If trainer.num_training_batches is not set or is nan/inf, we cannot calculate the epoch
40
- if (
41
- not trainer.num_training_batches
42
- or math.isnan(trainer.num_training_batches)
43
- or math.isinf(trainer.num_training_batches)
44
- ):
45
- log.warning("Trainer has no valid num_training_batches. Cannot log epoch.")
46
- return
47
-
48
- epoch = pl_module.global_step / trainer.num_training_batches
49
- pl_module.log(self.metric_name, epoch, on_step=True, on_epoch=False)
File without changes