nshtrainer 0.41.1__py3-none-any.whl → 0.42.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nshtrainer/config/__init__.py +406 -95
- nshtrainer/config/_checkpoint/loader/__init__.py +55 -13
- nshtrainer/config/_checkpoint/metadata/__init__.py +22 -8
- nshtrainer/config/_directory/__init__.py +21 -9
- nshtrainer/config/_hf_hub/__init__.py +25 -9
- nshtrainer/config/callbacks/__init__.py +114 -29
- nshtrainer/config/callbacks/actsave/__init__.py +20 -8
- nshtrainer/config/callbacks/base/__init__.py +17 -7
- nshtrainer/config/callbacks/checkpoint/__init__.py +62 -13
- nshtrainer/config/callbacks/checkpoint/_base/__init__.py +33 -9
- nshtrainer/config/callbacks/checkpoint/best_checkpoint/__init__.py +40 -10
- nshtrainer/config/callbacks/checkpoint/last_checkpoint/__init__.py +33 -9
- nshtrainer/config/callbacks/checkpoint/on_exception_checkpoint/__init__.py +26 -8
- nshtrainer/config/callbacks/debug_flag/__init__.py +24 -8
- nshtrainer/config/callbacks/directory_setup/__init__.py +26 -8
- nshtrainer/config/callbacks/early_stopping/__init__.py +31 -9
- nshtrainer/config/callbacks/ema/__init__.py +20 -8
- nshtrainer/config/callbacks/finite_checks/__init__.py +26 -8
- nshtrainer/config/callbacks/gradient_skipping/__init__.py +26 -8
- nshtrainer/config/callbacks/norm_logging/__init__.py +24 -8
- nshtrainer/config/callbacks/print_table/__init__.py +26 -8
- nshtrainer/config/callbacks/rlp_sanity_checks/__init__.py +26 -8
- nshtrainer/config/callbacks/shared_parameters/__init__.py +26 -8
- nshtrainer/config/callbacks/throughput_monitor/__init__.py +26 -8
- nshtrainer/config/callbacks/timer/__init__.py +22 -8
- nshtrainer/config/callbacks/wandb_upload_code/__init__.py +26 -8
- nshtrainer/config/callbacks/wandb_watch/__init__.py +24 -8
- nshtrainer/config/loggers/__init__.py +41 -14
- nshtrainer/config/loggers/_base/__init__.py +15 -7
- nshtrainer/config/loggers/csv/__init__.py +18 -8
- nshtrainer/config/loggers/tensorboard/__init__.py +24 -8
- nshtrainer/config/loggers/wandb/__init__.py +31 -11
- nshtrainer/config/lr_scheduler/__init__.py +49 -12
- nshtrainer/config/lr_scheduler/_base/__init__.py +19 -7
- nshtrainer/config/lr_scheduler/linear_warmup_cosine/__init__.py +33 -9
- nshtrainer/config/lr_scheduler/reduce_lr_on_plateau/__init__.py +33 -9
- nshtrainer/config/metrics/__init__.py +16 -7
- nshtrainer/config/metrics/_config/__init__.py +15 -7
- nshtrainer/config/model/__init__.py +31 -12
- nshtrainer/config/model/base/__init__.py +18 -8
- nshtrainer/config/model/config/__init__.py +30 -12
- nshtrainer/config/model/mixins/logger/__init__.py +15 -7
- nshtrainer/config/nn/__init__.py +68 -23
- nshtrainer/config/nn/mlp/__init__.py +21 -9
- nshtrainer/config/nn/nonlinearity/__init__.py +118 -22
- nshtrainer/config/optimizer/__init__.py +21 -9
- nshtrainer/config/profiler/__init__.py +28 -11
- nshtrainer/config/profiler/_base/__init__.py +17 -7
- nshtrainer/config/profiler/advanced/__init__.py +24 -8
- nshtrainer/config/profiler/pytorch/__init__.py +24 -8
- nshtrainer/config/profiler/simple/__init__.py +22 -8
- nshtrainer/config/runner/__init__.py +15 -7
- nshtrainer/config/trainer/_config/__init__.py +144 -30
- nshtrainer/config/trainer/checkpoint_connector/__init__.py +19 -7
- nshtrainer/config/util/_environment_info/__init__.py +87 -17
- nshtrainer/config/util/config/__init__.py +25 -10
- nshtrainer/config/util/config/dtype/__init__.py +15 -7
- nshtrainer/config/util/config/duration/__init__.py +27 -9
- {nshtrainer-0.41.1.dist-info → nshtrainer-0.42.0.dist-info}/METADATA +1 -1
- {nshtrainer-0.41.1.dist-info → nshtrainer-0.42.0.dist-info}/RECORD +61 -61
- {nshtrainer-0.41.1.dist-info → nshtrainer-0.42.0.dist-info}/WHEEL +0 -0
|
@@ -1,14 +1,36 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.callbacks.early_stopping import (
|
|
9
|
+
CallbackConfigBase as CallbackConfigBase,
|
|
10
|
+
)
|
|
11
|
+
from nshtrainer.callbacks.early_stopping import (
|
|
12
|
+
EarlyStoppingConfig as EarlyStoppingConfig,
|
|
13
|
+
)
|
|
14
|
+
from nshtrainer.callbacks.early_stopping import MetricConfig as MetricConfig
|
|
15
|
+
else:
|
|
16
|
+
|
|
17
|
+
def __getattr__(name):
|
|
18
|
+
import importlib
|
|
11
19
|
|
|
12
|
-
|
|
20
|
+
if name in globals():
|
|
21
|
+
return globals()[name]
|
|
22
|
+
if name == "CallbackConfigBase":
|
|
23
|
+
return importlib.import_module(
|
|
24
|
+
"nshtrainer.callbacks.early_stopping"
|
|
25
|
+
).CallbackConfigBase
|
|
26
|
+
if name == "MetricConfig":
|
|
27
|
+
return importlib.import_module(
|
|
28
|
+
"nshtrainer.callbacks.early_stopping"
|
|
29
|
+
).MetricConfig
|
|
30
|
+
if name == "EarlyStoppingConfig":
|
|
31
|
+
return importlib.import_module(
|
|
32
|
+
"nshtrainer.callbacks.early_stopping"
|
|
33
|
+
).EarlyStoppingConfig
|
|
34
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
13
35
|
|
|
14
36
|
# Submodule exports
|
|
@@ -1,13 +1,25 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.callbacks.ema import CallbackConfigBase as CallbackConfigBase
|
|
9
|
+
from nshtrainer.callbacks.ema import EMAConfig as EMAConfig
|
|
10
|
+
else:
|
|
11
|
+
|
|
12
|
+
def __getattr__(name):
|
|
13
|
+
import importlib
|
|
10
14
|
|
|
11
|
-
|
|
15
|
+
if name in globals():
|
|
16
|
+
return globals()[name]
|
|
17
|
+
if name == "CallbackConfigBase":
|
|
18
|
+
return importlib.import_module(
|
|
19
|
+
"nshtrainer.callbacks.ema"
|
|
20
|
+
).CallbackConfigBase
|
|
21
|
+
if name == "EMAConfig":
|
|
22
|
+
return importlib.import_module("nshtrainer.callbacks.ema").EMAConfig
|
|
23
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
12
24
|
|
|
13
25
|
# Submodule exports
|
|
@@ -1,13 +1,31 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.callbacks.finite_checks import (
|
|
9
|
+
CallbackConfigBase as CallbackConfigBase,
|
|
10
|
+
)
|
|
11
|
+
from nshtrainer.callbacks.finite_checks import (
|
|
12
|
+
FiniteChecksConfig as FiniteChecksConfig,
|
|
13
|
+
)
|
|
14
|
+
else:
|
|
15
|
+
|
|
16
|
+
def __getattr__(name):
|
|
17
|
+
import importlib
|
|
10
18
|
|
|
11
|
-
|
|
19
|
+
if name in globals():
|
|
20
|
+
return globals()[name]
|
|
21
|
+
if name == "CallbackConfigBase":
|
|
22
|
+
return importlib.import_module(
|
|
23
|
+
"nshtrainer.callbacks.finite_checks"
|
|
24
|
+
).CallbackConfigBase
|
|
25
|
+
if name == "FiniteChecksConfig":
|
|
26
|
+
return importlib.import_module(
|
|
27
|
+
"nshtrainer.callbacks.finite_checks"
|
|
28
|
+
).FiniteChecksConfig
|
|
29
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
12
30
|
|
|
13
31
|
# Submodule exports
|
|
@@ -1,13 +1,31 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.callbacks.gradient_skipping import (
|
|
9
|
+
CallbackConfigBase as CallbackConfigBase,
|
|
10
|
+
)
|
|
11
|
+
from nshtrainer.callbacks.gradient_skipping import (
|
|
12
|
+
GradientSkippingConfig as GradientSkippingConfig,
|
|
13
|
+
)
|
|
14
|
+
else:
|
|
15
|
+
|
|
16
|
+
def __getattr__(name):
|
|
17
|
+
import importlib
|
|
10
18
|
|
|
11
|
-
|
|
19
|
+
if name in globals():
|
|
20
|
+
return globals()[name]
|
|
21
|
+
if name == "CallbackConfigBase":
|
|
22
|
+
return importlib.import_module(
|
|
23
|
+
"nshtrainer.callbacks.gradient_skipping"
|
|
24
|
+
).CallbackConfigBase
|
|
25
|
+
if name == "GradientSkippingConfig":
|
|
26
|
+
return importlib.import_module(
|
|
27
|
+
"nshtrainer.callbacks.gradient_skipping"
|
|
28
|
+
).GradientSkippingConfig
|
|
29
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
12
30
|
|
|
13
31
|
# Submodule exports
|
|
@@ -1,13 +1,29 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.callbacks.norm_logging import (
|
|
9
|
+
CallbackConfigBase as CallbackConfigBase,
|
|
10
|
+
)
|
|
11
|
+
from nshtrainer.callbacks.norm_logging import NormLoggingConfig as NormLoggingConfig
|
|
12
|
+
else:
|
|
13
|
+
|
|
14
|
+
def __getattr__(name):
|
|
15
|
+
import importlib
|
|
10
16
|
|
|
11
|
-
|
|
17
|
+
if name in globals():
|
|
18
|
+
return globals()[name]
|
|
19
|
+
if name == "CallbackConfigBase":
|
|
20
|
+
return importlib.import_module(
|
|
21
|
+
"nshtrainer.callbacks.norm_logging"
|
|
22
|
+
).CallbackConfigBase
|
|
23
|
+
if name == "NormLoggingConfig":
|
|
24
|
+
return importlib.import_module(
|
|
25
|
+
"nshtrainer.callbacks.norm_logging"
|
|
26
|
+
).NormLoggingConfig
|
|
27
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
12
28
|
|
|
13
29
|
# Submodule exports
|
|
@@ -1,13 +1,31 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.callbacks.print_table import (
|
|
9
|
+
CallbackConfigBase as CallbackConfigBase,
|
|
10
|
+
)
|
|
11
|
+
from nshtrainer.callbacks.print_table import (
|
|
12
|
+
PrintTableMetricsConfig as PrintTableMetricsConfig,
|
|
13
|
+
)
|
|
14
|
+
else:
|
|
15
|
+
|
|
16
|
+
def __getattr__(name):
|
|
17
|
+
import importlib
|
|
10
18
|
|
|
11
|
-
|
|
19
|
+
if name in globals():
|
|
20
|
+
return globals()[name]
|
|
21
|
+
if name == "CallbackConfigBase":
|
|
22
|
+
return importlib.import_module(
|
|
23
|
+
"nshtrainer.callbacks.print_table"
|
|
24
|
+
).CallbackConfigBase
|
|
25
|
+
if name == "PrintTableMetricsConfig":
|
|
26
|
+
return importlib.import_module(
|
|
27
|
+
"nshtrainer.callbacks.print_table"
|
|
28
|
+
).PrintTableMetricsConfig
|
|
29
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
12
30
|
|
|
13
31
|
# Submodule exports
|
|
@@ -1,13 +1,31 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.callbacks.rlp_sanity_checks import (
|
|
9
|
+
CallbackConfigBase as CallbackConfigBase,
|
|
10
|
+
)
|
|
11
|
+
from nshtrainer.callbacks.rlp_sanity_checks import (
|
|
12
|
+
RLPSanityChecksConfig as RLPSanityChecksConfig,
|
|
13
|
+
)
|
|
14
|
+
else:
|
|
15
|
+
|
|
16
|
+
def __getattr__(name):
|
|
17
|
+
import importlib
|
|
10
18
|
|
|
11
|
-
|
|
19
|
+
if name in globals():
|
|
20
|
+
return globals()[name]
|
|
21
|
+
if name == "CallbackConfigBase":
|
|
22
|
+
return importlib.import_module(
|
|
23
|
+
"nshtrainer.callbacks.rlp_sanity_checks"
|
|
24
|
+
).CallbackConfigBase
|
|
25
|
+
if name == "RLPSanityChecksConfig":
|
|
26
|
+
return importlib.import_module(
|
|
27
|
+
"nshtrainer.callbacks.rlp_sanity_checks"
|
|
28
|
+
).RLPSanityChecksConfig
|
|
29
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
12
30
|
|
|
13
31
|
# Submodule exports
|
|
@@ -1,13 +1,31 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.callbacks.shared_parameters import (
|
|
9
|
+
CallbackConfigBase as CallbackConfigBase,
|
|
10
|
+
)
|
|
11
|
+
from nshtrainer.callbacks.shared_parameters import (
|
|
12
|
+
SharedParametersConfig as SharedParametersConfig,
|
|
13
|
+
)
|
|
14
|
+
else:
|
|
15
|
+
|
|
16
|
+
def __getattr__(name):
|
|
17
|
+
import importlib
|
|
10
18
|
|
|
11
|
-
|
|
19
|
+
if name in globals():
|
|
20
|
+
return globals()[name]
|
|
21
|
+
if name == "CallbackConfigBase":
|
|
22
|
+
return importlib.import_module(
|
|
23
|
+
"nshtrainer.callbacks.shared_parameters"
|
|
24
|
+
).CallbackConfigBase
|
|
25
|
+
if name == "SharedParametersConfig":
|
|
26
|
+
return importlib.import_module(
|
|
27
|
+
"nshtrainer.callbacks.shared_parameters"
|
|
28
|
+
).SharedParametersConfig
|
|
29
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
12
30
|
|
|
13
31
|
# Submodule exports
|
|
@@ -1,13 +1,31 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.callbacks.throughput_monitor import (
|
|
9
|
+
CallbackConfigBase as CallbackConfigBase,
|
|
10
|
+
)
|
|
11
|
+
from nshtrainer.callbacks.throughput_monitor import (
|
|
12
|
+
ThroughputMonitorConfig as ThroughputMonitorConfig,
|
|
13
|
+
)
|
|
14
|
+
else:
|
|
15
|
+
|
|
16
|
+
def __getattr__(name):
|
|
17
|
+
import importlib
|
|
10
18
|
|
|
11
|
-
|
|
19
|
+
if name in globals():
|
|
20
|
+
return globals()[name]
|
|
21
|
+
if name == "CallbackConfigBase":
|
|
22
|
+
return importlib.import_module(
|
|
23
|
+
"nshtrainer.callbacks.throughput_monitor"
|
|
24
|
+
).CallbackConfigBase
|
|
25
|
+
if name == "ThroughputMonitorConfig":
|
|
26
|
+
return importlib.import_module(
|
|
27
|
+
"nshtrainer.callbacks.throughput_monitor"
|
|
28
|
+
).ThroughputMonitorConfig
|
|
29
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
12
30
|
|
|
13
31
|
# Submodule exports
|
|
@@ -1,13 +1,27 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.callbacks.timer import CallbackConfigBase as CallbackConfigBase
|
|
9
|
+
from nshtrainer.callbacks.timer import EpochTimerConfig as EpochTimerConfig
|
|
10
|
+
else:
|
|
11
|
+
|
|
12
|
+
def __getattr__(name):
|
|
13
|
+
import importlib
|
|
10
14
|
|
|
11
|
-
|
|
15
|
+
if name in globals():
|
|
16
|
+
return globals()[name]
|
|
17
|
+
if name == "CallbackConfigBase":
|
|
18
|
+
return importlib.import_module(
|
|
19
|
+
"nshtrainer.callbacks.timer"
|
|
20
|
+
).CallbackConfigBase
|
|
21
|
+
if name == "EpochTimerConfig":
|
|
22
|
+
return importlib.import_module(
|
|
23
|
+
"nshtrainer.callbacks.timer"
|
|
24
|
+
).EpochTimerConfig
|
|
25
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
12
26
|
|
|
13
27
|
# Submodule exports
|
|
@@ -1,13 +1,31 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.callbacks.wandb_upload_code import (
|
|
9
|
+
CallbackConfigBase as CallbackConfigBase,
|
|
10
|
+
)
|
|
11
|
+
from nshtrainer.callbacks.wandb_upload_code import (
|
|
12
|
+
WandbUploadCodeConfig as WandbUploadCodeConfig,
|
|
13
|
+
)
|
|
14
|
+
else:
|
|
15
|
+
|
|
16
|
+
def __getattr__(name):
|
|
17
|
+
import importlib
|
|
10
18
|
|
|
11
|
-
|
|
19
|
+
if name in globals():
|
|
20
|
+
return globals()[name]
|
|
21
|
+
if name == "CallbackConfigBase":
|
|
22
|
+
return importlib.import_module(
|
|
23
|
+
"nshtrainer.callbacks.wandb_upload_code"
|
|
24
|
+
).CallbackConfigBase
|
|
25
|
+
if name == "WandbUploadCodeConfig":
|
|
26
|
+
return importlib.import_module(
|
|
27
|
+
"nshtrainer.callbacks.wandb_upload_code"
|
|
28
|
+
).WandbUploadCodeConfig
|
|
29
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
12
30
|
|
|
13
31
|
# Submodule exports
|
|
@@ -1,13 +1,29 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.callbacks.wandb_watch import (
|
|
9
|
+
CallbackConfigBase as CallbackConfigBase,
|
|
10
|
+
)
|
|
11
|
+
from nshtrainer.callbacks.wandb_watch import WandbWatchConfig as WandbWatchConfig
|
|
12
|
+
else:
|
|
13
|
+
|
|
14
|
+
def __getattr__(name):
|
|
15
|
+
import importlib
|
|
10
16
|
|
|
11
|
-
|
|
17
|
+
if name in globals():
|
|
18
|
+
return globals()[name]
|
|
19
|
+
if name == "CallbackConfigBase":
|
|
20
|
+
return importlib.import_module(
|
|
21
|
+
"nshtrainer.callbacks.wandb_watch"
|
|
22
|
+
).CallbackConfigBase
|
|
23
|
+
if name == "WandbWatchConfig":
|
|
24
|
+
return importlib.import_module(
|
|
25
|
+
"nshtrainer.callbacks.wandb_watch"
|
|
26
|
+
).WandbWatchConfig
|
|
27
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
12
28
|
|
|
13
29
|
# Submodule exports
|
|
@@ -1,20 +1,47 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
from nshtrainer.loggers
|
|
13
|
-
from nshtrainer.loggers
|
|
14
|
-
from nshtrainer.loggers import
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.loggers import BaseLoggerConfig as BaseLoggerConfig
|
|
9
|
+
from nshtrainer.loggers import CSVLoggerConfig as CSVLoggerConfig
|
|
10
|
+
from nshtrainer.loggers import LoggerConfig as LoggerConfig
|
|
11
|
+
from nshtrainer.loggers import TensorboardLoggerConfig as TensorboardLoggerConfig
|
|
12
|
+
from nshtrainer.loggers import WandbLoggerConfig as WandbLoggerConfig
|
|
13
|
+
from nshtrainer.loggers.wandb import CallbackConfigBase as CallbackConfigBase
|
|
14
|
+
from nshtrainer.loggers.wandb import WandbUploadCodeConfig as WandbUploadCodeConfig
|
|
15
|
+
from nshtrainer.loggers.wandb import WandbWatchConfig as WandbWatchConfig
|
|
16
|
+
else:
|
|
17
|
+
|
|
18
|
+
def __getattr__(name):
|
|
19
|
+
import importlib
|
|
20
|
+
|
|
21
|
+
if name in globals():
|
|
22
|
+
return globals()[name]
|
|
23
|
+
if name == "BaseLoggerConfig":
|
|
24
|
+
return importlib.import_module("nshtrainer.loggers").BaseLoggerConfig
|
|
25
|
+
if name == "TensorboardLoggerConfig":
|
|
26
|
+
return importlib.import_module("nshtrainer.loggers").TensorboardLoggerConfig
|
|
27
|
+
if name == "CallbackConfigBase":
|
|
28
|
+
return importlib.import_module(
|
|
29
|
+
"nshtrainer.loggers.wandb"
|
|
30
|
+
).CallbackConfigBase
|
|
31
|
+
if name == "WandbLoggerConfig":
|
|
32
|
+
return importlib.import_module("nshtrainer.loggers").WandbLoggerConfig
|
|
33
|
+
if name == "WandbUploadCodeConfig":
|
|
34
|
+
return importlib.import_module(
|
|
35
|
+
"nshtrainer.loggers.wandb"
|
|
36
|
+
).WandbUploadCodeConfig
|
|
37
|
+
if name == "WandbWatchConfig":
|
|
38
|
+
return importlib.import_module("nshtrainer.loggers.wandb").WandbWatchConfig
|
|
39
|
+
if name == "CSVLoggerConfig":
|
|
40
|
+
return importlib.import_module("nshtrainer.loggers").CSVLoggerConfig
|
|
41
|
+
if name == "LoggerConfig":
|
|
42
|
+
return importlib.import_module("nshtrainer.loggers").LoggerConfig
|
|
43
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
15
44
|
|
|
16
|
-
# Type aliases
|
|
17
|
-
from nshtrainer.loggers import LoggerConfig as LoggerConfig
|
|
18
45
|
|
|
19
46
|
# Submodule exports
|
|
20
47
|
from . import _base as _base
|
|
@@ -1,12 +1,20 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.loggers._base import BaseLoggerConfig as BaseLoggerConfig
|
|
9
|
+
else:
|
|
10
|
+
|
|
11
|
+
def __getattr__(name):
|
|
12
|
+
import importlib
|
|
9
13
|
|
|
10
|
-
|
|
14
|
+
if name in globals():
|
|
15
|
+
return globals()[name]
|
|
16
|
+
if name == "BaseLoggerConfig":
|
|
17
|
+
return importlib.import_module("nshtrainer.loggers._base").BaseLoggerConfig
|
|
18
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
11
19
|
|
|
12
20
|
# Submodule exports
|
|
@@ -1,13 +1,23 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.loggers.csv import BaseLoggerConfig as BaseLoggerConfig
|
|
9
|
+
from nshtrainer.loggers.csv import CSVLoggerConfig as CSVLoggerConfig
|
|
10
|
+
else:
|
|
11
|
+
|
|
12
|
+
def __getattr__(name):
|
|
13
|
+
import importlib
|
|
10
14
|
|
|
11
|
-
|
|
15
|
+
if name in globals():
|
|
16
|
+
return globals()[name]
|
|
17
|
+
if name == "CSVLoggerConfig":
|
|
18
|
+
return importlib.import_module("nshtrainer.loggers.csv").CSVLoggerConfig
|
|
19
|
+
if name == "BaseLoggerConfig":
|
|
20
|
+
return importlib.import_module("nshtrainer.loggers.csv").BaseLoggerConfig
|
|
21
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
12
22
|
|
|
13
23
|
# Submodule exports
|
|
@@ -1,13 +1,29 @@
|
|
|
1
|
-
# fmt: off
|
|
2
|
-
# ruff: noqa
|
|
3
|
-
# type: ignore
|
|
4
|
-
|
|
5
1
|
__codegen__ = True
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
# Config/alias imports
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from nshtrainer.loggers.tensorboard import BaseLoggerConfig as BaseLoggerConfig
|
|
9
|
+
from nshtrainer.loggers.tensorboard import (
|
|
10
|
+
TensorboardLoggerConfig as TensorboardLoggerConfig,
|
|
11
|
+
)
|
|
12
|
+
else:
|
|
13
|
+
|
|
14
|
+
def __getattr__(name):
|
|
15
|
+
import importlib
|
|
10
16
|
|
|
11
|
-
|
|
17
|
+
if name in globals():
|
|
18
|
+
return globals()[name]
|
|
19
|
+
if name == "BaseLoggerConfig":
|
|
20
|
+
return importlib.import_module(
|
|
21
|
+
"nshtrainer.loggers.tensorboard"
|
|
22
|
+
).BaseLoggerConfig
|
|
23
|
+
if name == "TensorboardLoggerConfig":
|
|
24
|
+
return importlib.import_module(
|
|
25
|
+
"nshtrainer.loggers.tensorboard"
|
|
26
|
+
).TensorboardLoggerConfig
|
|
27
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
12
28
|
|
|
13
29
|
# Submodule exports
|