nshtrainer 1.0.0b42__tar.gz → 1.0.0b44__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/PKG-INFO +1 -1
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/pyproject.toml +1 -1
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/_checkpoint/metadata.py +1 -2
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/_checkpoint/saver.py +4 -1
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/__init__.py +4 -0
- nshtrainer-1.0.0b44/src/nshtrainer/callbacks/metric_validation.py +75 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/__init__.py +4 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/__init__.py +6 -0
- nshtrainer-1.0.0b44/src/nshtrainer/configs/callbacks/metric_validation/__init__.py +21 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/__init__.py +4 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/_config/__init__.py +4 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/trainer/_config.py +6 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/trainer/trainer.py +2 -2
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/util/path.py +5 -0
- nshtrainer-1.0.0b42/src/nshtrainer/nn/tests/test_mlp.py +0 -120
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/README.md +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/.nshconfig.generated.json +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/_callback.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/_directory.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/_experimental/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/_hf_hub.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/actsave.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/base.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/checkpoint/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/checkpoint/_base.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/checkpoint/best_checkpoint.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/checkpoint/last_checkpoint.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/checkpoint/on_exception_checkpoint.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/debug_flag.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/directory_setup.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/early_stopping.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/ema.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/finite_checks.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/gradient_skipping.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/interval.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/log_epoch.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/lr_monitor.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/norm_logging.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/print_table.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/rlp_sanity_checks.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/shared_parameters.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/timer.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/wandb_upload_code.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/wandb_watch.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/.gitattributes +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/_checkpoint/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/_checkpoint/metadata/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/_directory/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/_hf_hub/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/actsave/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/base/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/checkpoint/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/checkpoint/_base/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/checkpoint/best_checkpoint/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/checkpoint/last_checkpoint/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/checkpoint/on_exception_checkpoint/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/debug_flag/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/directory_setup/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/early_stopping/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/ema/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/finite_checks/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/gradient_skipping/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/log_epoch/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/lr_monitor/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/norm_logging/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/print_table/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/rlp_sanity_checks/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/shared_parameters/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/timer/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/wandb_upload_code/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/wandb_watch/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/loggers/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/loggers/actsave/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/loggers/base/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/loggers/csv/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/loggers/tensorboard/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/loggers/wandb/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/lr_scheduler/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/lr_scheduler/base/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/lr_scheduler/linear_warmup_cosine/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/lr_scheduler/reduce_lr_on_plateau/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/metrics/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/metrics/_config/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/nn/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/nn/mlp/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/nn/nonlinearity/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/optimizer/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/profiler/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/profiler/_base/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/profiler/advanced/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/profiler/pytorch/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/profiler/simple/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/accelerator/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/plugin/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/plugin/base/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/plugin/environment/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/plugin/io/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/plugin/layer_sync/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/plugin/precision/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/strategy/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/trainer/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/util/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/util/_environment_info/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/util/config/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/util/config/dtype/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/util/config/duration/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/data/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/data/balanced_batch_sampler.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/data/datamodule.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/data/transform.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/loggers/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/loggers/actsave.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/loggers/base.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/loggers/csv.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/loggers/tensorboard.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/loggers/wandb.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/lr_scheduler/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/lr_scheduler/base.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/lr_scheduler/linear_warmup_cosine.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/lr_scheduler/reduce_lr_on_plateau.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/metrics/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/metrics/_config.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/model/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/model/base.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/model/mixins/callback.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/model/mixins/debug.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/model/mixins/logger.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/nn/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/nn/mlp.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/nn/module_dict.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/nn/module_list.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/nn/nonlinearity.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/optimizer.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/profiler/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/profiler/_base.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/profiler/advanced.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/profiler/pytorch.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/profiler/simple.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/trainer/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/trainer/_runtime_callback.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/trainer/accelerator.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/trainer/plugin/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/trainer/plugin/base.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/trainer/plugin/environment.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/trainer/plugin/io.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/trainer/plugin/layer_sync.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/trainer/plugin/precision.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/trainer/signal_connector.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/trainer/strategy.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/util/_environment_info.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/util/bf16.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/util/config/__init__.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/util/config/dtype.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/util/config/duration.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/util/environment.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/util/seed.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/util/slurm.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/util/typed.py +0 -0
- {nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/util/typing_utils.py +0 -0
@@ -3,7 +3,6 @@ from __future__ import annotations
|
|
3
3
|
import copy
|
4
4
|
import datetime
|
5
5
|
import logging
|
6
|
-
from collections.abc import Callable
|
7
6
|
from pathlib import Path
|
8
7
|
from typing import TYPE_CHECKING, Any, ClassVar
|
9
8
|
|
@@ -115,7 +114,7 @@ def _metadata_path(checkpoint_path: Path):
|
|
115
114
|
return checkpoint_path.with_suffix(CheckpointMetadata.PATH_SUFFIX)
|
116
115
|
|
117
116
|
|
118
|
-
def
|
117
|
+
def write_checkpoint_metadata(trainer: Trainer, checkpoint_path: Path):
|
119
118
|
metadata_path = _metadata_path(checkpoint_path)
|
120
119
|
metadata = _generate_checkpoint_metadata(trainer, checkpoint_path, metadata_path)
|
121
120
|
|
@@ -25,7 +25,10 @@ def link_checkpoint(
|
|
25
25
|
|
26
26
|
if remove_existing:
|
27
27
|
try:
|
28
|
-
if linkpath.exists():
|
28
|
+
if linkpath.exists(follow_symlinks=False):
|
29
|
+
# follow_symlinks=False is EXTREMELY important here
|
30
|
+
# Otherwise, we've already deleted the file that the symlink
|
31
|
+
# used to point to, so this always returns False
|
29
32
|
if linkpath.is_dir():
|
30
33
|
shutil.rmtree(linkpath)
|
31
34
|
else:
|
@@ -40,6 +40,10 @@ from .log_epoch import LogEpochCallback as LogEpochCallback
|
|
40
40
|
from .log_epoch import LogEpochCallbackConfig as LogEpochCallbackConfig
|
41
41
|
from .lr_monitor import LearningRateMonitor as LearningRateMonitor
|
42
42
|
from .lr_monitor import LearningRateMonitorConfig as LearningRateMonitorConfig
|
43
|
+
from .metric_validation import MetricValidationCallback as MetricValidationCallback
|
44
|
+
from .metric_validation import (
|
45
|
+
MetricValidationCallbackConfig as MetricValidationCallbackConfig,
|
46
|
+
)
|
43
47
|
from .norm_logging import NormLoggingCallback as NormLoggingCallback
|
44
48
|
from .norm_logging import NormLoggingCallbackConfig as NormLoggingCallbackConfig
|
45
49
|
from .print_table import PrintTableMetricsCallback as PrintTableMetricsCallback
|
@@ -0,0 +1,75 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import logging
|
4
|
+
from typing import Literal
|
5
|
+
|
6
|
+
from lightning.pytorch.utilities.exceptions import MisconfigurationException
|
7
|
+
from typing_extensions import final, override, assert_never
|
8
|
+
|
9
|
+
from .._callback import NTCallbackBase
|
10
|
+
from ..metrics import MetricConfig
|
11
|
+
from .base import CallbackConfigBase, callback_registry
|
12
|
+
|
13
|
+
log = logging.getLogger(__name__)
|
14
|
+
|
15
|
+
|
16
|
+
@final
|
17
|
+
@callback_registry.register
|
18
|
+
class MetricValidationCallbackConfig(CallbackConfigBase):
|
19
|
+
name: Literal["metric_validation"] = "metric_validation"
|
20
|
+
|
21
|
+
error_behavior: Literal["raise", "warn"] = "raise"
|
22
|
+
"""
|
23
|
+
Behavior when an error occurs during validation:
|
24
|
+
- "raise": Raise an error and stop the training.
|
25
|
+
- "warn": Log a warning and continue the training.
|
26
|
+
"""
|
27
|
+
|
28
|
+
validate_default_metric: bool = True
|
29
|
+
"""Whether to validate the default metric from the root config."""
|
30
|
+
|
31
|
+
metrics: list[MetricConfig] = []
|
32
|
+
"""List of metrics to validate."""
|
33
|
+
|
34
|
+
@override
|
35
|
+
def create_callbacks(self, trainer_config):
|
36
|
+
metrics = self.metrics.copy()
|
37
|
+
if (
|
38
|
+
self.validate_default_metric
|
39
|
+
and (default_metric := trainer_config.primary_metric) is not None
|
40
|
+
):
|
41
|
+
metrics.append(default_metric)
|
42
|
+
|
43
|
+
yield MetricValidationCallback(self, metrics)
|
44
|
+
|
45
|
+
|
46
|
+
class MetricValidationCallback(NTCallbackBase):
|
47
|
+
def __init__(
|
48
|
+
self, config: MetricValidationCallbackConfig, metrics: list[MetricConfig]
|
49
|
+
):
|
50
|
+
super().__init__()
|
51
|
+
|
52
|
+
self.config = config
|
53
|
+
self.metrics = metrics
|
54
|
+
|
55
|
+
@override
|
56
|
+
def on_sanity_check_end(self, trainer, pl_module):
|
57
|
+
super().on_sanity_check_end(trainer, pl_module)
|
58
|
+
|
59
|
+
log.debug("Validating metrics...")
|
60
|
+
logged_metrics = set(trainer.logged_metrics.keys())
|
61
|
+
for metric in self.metrics:
|
62
|
+
if metric.validation_monitor in logged_metrics:
|
63
|
+
continue
|
64
|
+
|
65
|
+
match self.config.error_behavior:
|
66
|
+
case "raise":
|
67
|
+
raise MisconfigurationException(
|
68
|
+
f"Metric '{metric.validation_monitor}' not found in logged metrics."
|
69
|
+
)
|
70
|
+
case "warn":
|
71
|
+
log.warning(
|
72
|
+
f"Metric '{metric.validation_monitor}' not found in logged metrics."
|
73
|
+
)
|
74
|
+
case _:
|
75
|
+
assert_never(self.config.error_behavior)
|
@@ -39,6 +39,9 @@ from nshtrainer.callbacks import (
|
|
39
39
|
)
|
40
40
|
from nshtrainer.callbacks import LearningRateMonitorConfig as LearningRateMonitorConfig
|
41
41
|
from nshtrainer.callbacks import LogEpochCallbackConfig as LogEpochCallbackConfig
|
42
|
+
from nshtrainer.callbacks import (
|
43
|
+
MetricValidationCallbackConfig as MetricValidationCallbackConfig,
|
44
|
+
)
|
42
45
|
from nshtrainer.callbacks import NormLoggingCallbackConfig as NormLoggingCallbackConfig
|
43
46
|
from nshtrainer.callbacks import (
|
44
47
|
OnExceptionCheckpointCallbackConfig as OnExceptionCheckpointCallbackConfig,
|
@@ -287,6 +290,7 @@ __all__ = [
|
|
287
290
|
"MPIEnvironmentPlugin",
|
288
291
|
"MPSAcceleratorConfig",
|
289
292
|
"MetricConfig",
|
293
|
+
"MetricValidationCallbackConfig",
|
290
294
|
"MishNonlinearityConfig",
|
291
295
|
"MixedPrecisionPluginConfig",
|
292
296
|
"NonlinearityConfig",
|
@@ -28,6 +28,9 @@ from nshtrainer.callbacks import (
|
|
28
28
|
)
|
29
29
|
from nshtrainer.callbacks import LearningRateMonitorConfig as LearningRateMonitorConfig
|
30
30
|
from nshtrainer.callbacks import LogEpochCallbackConfig as LogEpochCallbackConfig
|
31
|
+
from nshtrainer.callbacks import (
|
32
|
+
MetricValidationCallbackConfig as MetricValidationCallbackConfig,
|
33
|
+
)
|
31
34
|
from nshtrainer.callbacks import NormLoggingCallbackConfig as NormLoggingCallbackConfig
|
32
35
|
from nshtrainer.callbacks import (
|
33
36
|
OnExceptionCheckpointCallbackConfig as OnExceptionCheckpointCallbackConfig,
|
@@ -65,6 +68,7 @@ from . import finite_checks as finite_checks
|
|
65
68
|
from . import gradient_skipping as gradient_skipping
|
66
69
|
from . import log_epoch as log_epoch
|
67
70
|
from . import lr_monitor as lr_monitor
|
71
|
+
from . import metric_validation as metric_validation
|
68
72
|
from . import norm_logging as norm_logging
|
69
73
|
from . import print_table as print_table
|
70
74
|
from . import rlp_sanity_checks as rlp_sanity_checks
|
@@ -91,6 +95,7 @@ __all__ = [
|
|
91
95
|
"LearningRateMonitorConfig",
|
92
96
|
"LogEpochCallbackConfig",
|
93
97
|
"MetricConfig",
|
98
|
+
"MetricValidationCallbackConfig",
|
94
99
|
"NormLoggingCallbackConfig",
|
95
100
|
"OnExceptionCheckpointCallbackConfig",
|
96
101
|
"PrintTableMetricsCallbackConfig",
|
@@ -110,6 +115,7 @@ __all__ = [
|
|
110
115
|
"gradient_skipping",
|
111
116
|
"log_epoch",
|
112
117
|
"lr_monitor",
|
118
|
+
"metric_validation",
|
113
119
|
"norm_logging",
|
114
120
|
"print_table",
|
115
121
|
"rlp_sanity_checks",
|
@@ -0,0 +1,21 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
__codegen__ = True
|
4
|
+
|
5
|
+
from nshtrainer.callbacks.metric_validation import (
|
6
|
+
CallbackConfigBase as CallbackConfigBase,
|
7
|
+
)
|
8
|
+
from nshtrainer.callbacks.metric_validation import MetricConfig as MetricConfig
|
9
|
+
from nshtrainer.callbacks.metric_validation import (
|
10
|
+
MetricValidationCallbackConfig as MetricValidationCallbackConfig,
|
11
|
+
)
|
12
|
+
from nshtrainer.callbacks.metric_validation import (
|
13
|
+
callback_registry as callback_registry,
|
14
|
+
)
|
15
|
+
|
16
|
+
__all__ = [
|
17
|
+
"CallbackConfigBase",
|
18
|
+
"MetricConfig",
|
19
|
+
"MetricValidationCallbackConfig",
|
20
|
+
"callback_registry",
|
21
|
+
]
|
@@ -38,6 +38,9 @@ from nshtrainer.trainer._config import LogEpochCallbackConfig as LogEpochCallbac
|
|
38
38
|
from nshtrainer.trainer._config import LoggerConfig as LoggerConfig
|
39
39
|
from nshtrainer.trainer._config import LoggerConfigBase as LoggerConfigBase
|
40
40
|
from nshtrainer.trainer._config import MetricConfig as MetricConfig
|
41
|
+
from nshtrainer.trainer._config import (
|
42
|
+
MetricValidationCallbackConfig as MetricValidationCallbackConfig,
|
43
|
+
)
|
41
44
|
from nshtrainer.trainer._config import (
|
42
45
|
NormLoggingCallbackConfig as NormLoggingCallbackConfig,
|
43
46
|
)
|
@@ -164,6 +167,7 @@ __all__ = [
|
|
164
167
|
"MPIEnvironmentPlugin",
|
165
168
|
"MPSAcceleratorConfig",
|
166
169
|
"MetricConfig",
|
170
|
+
"MetricValidationCallbackConfig",
|
167
171
|
"MixedPrecisionPluginConfig",
|
168
172
|
"NormLoggingCallbackConfig",
|
169
173
|
"OnExceptionCheckpointCallbackConfig",
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/_config/__init__.py
RENAMED
@@ -34,6 +34,9 @@ from nshtrainer.trainer._config import LogEpochCallbackConfig as LogEpochCallbac
|
|
34
34
|
from nshtrainer.trainer._config import LoggerConfig as LoggerConfig
|
35
35
|
from nshtrainer.trainer._config import LoggerConfigBase as LoggerConfigBase
|
36
36
|
from nshtrainer.trainer._config import MetricConfig as MetricConfig
|
37
|
+
from nshtrainer.trainer._config import (
|
38
|
+
MetricValidationCallbackConfig as MetricValidationCallbackConfig,
|
39
|
+
)
|
37
40
|
from nshtrainer.trainer._config import (
|
38
41
|
NormLoggingCallbackConfig as NormLoggingCallbackConfig,
|
39
42
|
)
|
@@ -77,6 +80,7 @@ __all__ = [
|
|
77
80
|
"LoggerConfig",
|
78
81
|
"LoggerConfigBase",
|
79
82
|
"MetricConfig",
|
83
|
+
"MetricValidationCallbackConfig",
|
80
84
|
"NormLoggingCallbackConfig",
|
81
85
|
"OnExceptionCheckpointCallbackConfig",
|
82
86
|
"PluginConfig",
|
@@ -40,6 +40,7 @@ from ..callbacks.base import CallbackConfigBase
|
|
40
40
|
from ..callbacks.debug_flag import DebugFlagCallbackConfig
|
41
41
|
from ..callbacks.log_epoch import LogEpochCallbackConfig
|
42
42
|
from ..callbacks.lr_monitor import LearningRateMonitorConfig
|
43
|
+
from ..callbacks.metric_validation import MetricValidationCallbackConfig
|
43
44
|
from ..callbacks.rlp_sanity_checks import RLPSanityChecksCallbackConfig
|
44
45
|
from ..callbacks.shared_parameters import SharedParametersCallbackConfig
|
45
46
|
from ..loggers import (
|
@@ -697,6 +698,10 @@ class TrainerConfig(C.Config):
|
|
697
698
|
- The trainer is running in fast_dev_run mode.
|
698
699
|
- The trainer is running a sanity check (which happens before starting the training routine).
|
699
700
|
"""
|
701
|
+
auto_validate_metrics: MetricValidationCallbackConfig | None = (
|
702
|
+
MetricValidationCallbackConfig()
|
703
|
+
)
|
704
|
+
"""If enabled, will automatically validate the metrics before starting the training routine."""
|
700
705
|
|
701
706
|
lightning_kwargs: LightningTrainerKwargs = LightningTrainerKwargs()
|
702
707
|
"""
|
@@ -768,6 +773,7 @@ class TrainerConfig(C.Config):
|
|
768
773
|
yield self.shared_parameters
|
769
774
|
yield self.reduce_lr_on_plateau_sanity_checking
|
770
775
|
yield self.auto_set_debug_flag
|
776
|
+
yield self.auto_validate_metrics
|
771
777
|
yield from self.callbacks
|
772
778
|
|
773
779
|
def _nshtrainer_all_logger_configs(self) -> Iterable[LoggerConfigBase | None]:
|
@@ -18,7 +18,7 @@ from lightning.pytorch.trainer.states import TrainerFn
|
|
18
18
|
from lightning.pytorch.utilities.types import _EVALUATE_OUTPUT, _PREDICT_OUTPUT
|
19
19
|
from typing_extensions import Never, Unpack, assert_never, deprecated, override
|
20
20
|
|
21
|
-
from .._checkpoint.metadata import
|
21
|
+
from .._checkpoint.metadata import write_checkpoint_metadata
|
22
22
|
from ..callbacks.base import resolve_all_callbacks
|
23
23
|
from ..util._environment_info import EnvironmentConfig
|
24
24
|
from ..util.bf16 import is_bf16_supported_no_emulation
|
@@ -478,7 +478,7 @@ class Trainer(LightningTrainer):
|
|
478
478
|
metadata_path = None
|
479
479
|
if self.hparams.save_checkpoint_metadata and self.is_global_zero:
|
480
480
|
# Generate the metadata and write to disk
|
481
|
-
metadata_path =
|
481
|
+
metadata_path = write_checkpoint_metadata(self, filepath)
|
482
482
|
|
483
483
|
# Call the `on_checkpoint_saved` method on all callbacks
|
484
484
|
from .. import _callback
|
@@ -85,11 +85,16 @@ def try_symlink_or_copy(
|
|
85
85
|
target_is_directory: bool = False,
|
86
86
|
relative: bool = True,
|
87
87
|
remove_existing: bool = True,
|
88
|
+
throw_on_invalid_target: bool = False,
|
88
89
|
):
|
89
90
|
"""
|
90
91
|
Symlinks on Unix, copies on Windows.
|
91
92
|
"""
|
92
93
|
|
94
|
+
# Check if the target file exists
|
95
|
+
if throw_on_invalid_target and not file_path.exists():
|
96
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
97
|
+
|
93
98
|
# If the link already exists, remove it
|
94
99
|
if remove_existing:
|
95
100
|
try:
|
@@ -1,120 +0,0 @@
|
|
1
|
-
from __future__ import annotations
|
2
|
-
|
3
|
-
from typing import cast
|
4
|
-
|
5
|
-
import pytest
|
6
|
-
import torch
|
7
|
-
|
8
|
-
from nshtrainer.nn.mlp import MLP, custom_seed_context
|
9
|
-
|
10
|
-
|
11
|
-
def test_mlp_seed_reproducibility():
|
12
|
-
"""Test that the seed parameter in MLP ensures reproducible weights."""
|
13
|
-
|
14
|
-
# Test dimensions
|
15
|
-
dims = [10, 20, 5]
|
16
|
-
|
17
|
-
# Create two MLPs with the same seed
|
18
|
-
seed1 = 42
|
19
|
-
mlp1 = MLP(dims, activation=torch.nn.ReLU(), seed=seed1)
|
20
|
-
mlp2 = MLP(dims, activation=torch.nn.ReLU(), seed=seed1)
|
21
|
-
|
22
|
-
# Create an MLP with a different seed
|
23
|
-
seed2 = 123
|
24
|
-
mlp3 = MLP(dims, activation=torch.nn.ReLU(), seed=seed2)
|
25
|
-
|
26
|
-
# Check first layer weights
|
27
|
-
layer1_weights1 = cast(torch.Tensor, mlp1[0].weight)
|
28
|
-
layer1_weights2 = cast(torch.Tensor, mlp2[0].weight)
|
29
|
-
layer1_weights3 = cast(torch.Tensor, mlp3[0].weight)
|
30
|
-
|
31
|
-
# Same seed should produce identical weights
|
32
|
-
assert torch.allclose(layer1_weights1, layer1_weights2)
|
33
|
-
|
34
|
-
# Different seeds should produce different weights
|
35
|
-
assert not torch.allclose(layer1_weights1, layer1_weights3)
|
36
|
-
|
37
|
-
# Check second layer weights
|
38
|
-
layer2_weights1 = cast(torch.Tensor, mlp1[2].weight)
|
39
|
-
layer2_weights2 = cast(torch.Tensor, mlp2[2].weight)
|
40
|
-
layer2_weights3 = cast(torch.Tensor, mlp3[2].weight)
|
41
|
-
|
42
|
-
# Same seed should produce identical weights for all layers
|
43
|
-
assert torch.allclose(layer2_weights1, layer2_weights2)
|
44
|
-
|
45
|
-
# Different seeds should produce different weights for all layers
|
46
|
-
assert not torch.allclose(layer2_weights1, layer2_weights3)
|
47
|
-
|
48
|
-
# Test that not providing a seed gives different results each time
|
49
|
-
mlp4 = MLP(dims, activation=torch.nn.ReLU(), seed=None)
|
50
|
-
mlp5 = MLP(dims, activation=torch.nn.ReLU(), seed=None)
|
51
|
-
|
52
|
-
# Without seeds, weights should be different
|
53
|
-
assert not torch.allclose(
|
54
|
-
cast(torch.Tensor, mlp4[0].weight), cast(torch.Tensor, mlp5[0].weight)
|
55
|
-
)
|
56
|
-
|
57
|
-
|
58
|
-
def test_custom_seed_context():
|
59
|
-
"""Test that custom_seed_context properly controls random number generation."""
|
60
|
-
|
61
|
-
# Test that the same seed produces the same random numbers
|
62
|
-
with custom_seed_context(42):
|
63
|
-
tensor1 = torch.randn(10)
|
64
|
-
|
65
|
-
with custom_seed_context(42):
|
66
|
-
tensor2 = torch.randn(10)
|
67
|
-
|
68
|
-
# Same seed should produce identical random tensors
|
69
|
-
assert torch.allclose(tensor1, tensor2)
|
70
|
-
|
71
|
-
# Test that different seeds produce different random numbers
|
72
|
-
with custom_seed_context(123):
|
73
|
-
tensor3 = torch.randn(10)
|
74
|
-
|
75
|
-
# Different seeds should produce different random tensors
|
76
|
-
assert not torch.allclose(tensor1, tensor3)
|
77
|
-
|
78
|
-
|
79
|
-
def test_custom_seed_context_preserves_state():
|
80
|
-
"""Test that custom_seed_context preserves the original random state."""
|
81
|
-
|
82
|
-
# Set a known seed for the test
|
83
|
-
original_seed = 789
|
84
|
-
torch.manual_seed(original_seed)
|
85
|
-
|
86
|
-
# Generate a tensor with the original seed
|
87
|
-
original_tensor = torch.randn(10)
|
88
|
-
|
89
|
-
# Use a different seed in the context
|
90
|
-
with custom_seed_context(42):
|
91
|
-
# This should use the temporary seed
|
92
|
-
context_tensor = torch.randn(10)
|
93
|
-
|
94
|
-
# After exiting the context, we should be back to the original seed state
|
95
|
-
# Reset the generator to get the same sequence again
|
96
|
-
torch.manual_seed(original_seed)
|
97
|
-
expected_tensor = torch.randn(10)
|
98
|
-
|
99
|
-
# The tensor generated after the context should match what we would get
|
100
|
-
# if we had just set the original seed again
|
101
|
-
assert torch.allclose(original_tensor, expected_tensor)
|
102
|
-
|
103
|
-
# And it should be different from the tensor generated inside the context
|
104
|
-
assert not torch.allclose(original_tensor, context_tensor)
|
105
|
-
|
106
|
-
|
107
|
-
def test_custom_seed_context_with_none():
|
108
|
-
"""Test that custom_seed_context with None seed doesn't affect randomization."""
|
109
|
-
|
110
|
-
# Set a known seed
|
111
|
-
torch.manual_seed(555)
|
112
|
-
expected_tensor = torch.randn(10)
|
113
|
-
|
114
|
-
# Reset and use None seed in context
|
115
|
-
torch.manual_seed(555)
|
116
|
-
with custom_seed_context(None):
|
117
|
-
actual_tensor = torch.randn(10)
|
118
|
-
|
119
|
-
# With None seed, the context should not affect the random state
|
120
|
-
assert torch.allclose(expected_tensor, actual_tensor)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/checkpoint/best_checkpoint.py
RENAMED
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/callbacks/checkpoint/last_checkpoint.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/_checkpoint/metadata/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/actsave/__init__.py
RENAMED
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/base/__init__.py
RENAMED
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/checkpoint/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/debug_flag/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/ema/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/log_epoch/__init__.py
RENAMED
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/lr_monitor/__init__.py
RENAMED
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/print_table/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/timer/__init__.py
RENAMED
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/callbacks/wandb_watch/__init__.py
RENAMED
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/loggers/actsave/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/loggers/tensorboard/__init__.py
RENAMED
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/loggers/wandb/__init__.py
RENAMED
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/lr_scheduler/base/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/metrics/_config/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/nn/nonlinearity/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/profiler/_base/__init__.py
RENAMED
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/profiler/advanced/__init__.py
RENAMED
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/profiler/pytorch/__init__.py
RENAMED
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/profiler/simple/__init__.py
RENAMED
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/accelerator/__init__.py
RENAMED
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/plugin/__init__.py
RENAMED
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/plugin/base/__init__.py
RENAMED
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/plugin/io/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/strategy/__init__.py
RENAMED
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/trainer/trainer/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/util/config/dtype/__init__.py
RENAMED
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/configs/util/config/duration/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/lr_scheduler/linear_warmup_cosine.py
RENAMED
File without changes
|
{nshtrainer-1.0.0b42 → nshtrainer-1.0.0b44}/src/nshtrainer/lr_scheduler/reduce_lr_on_plateau.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|