nshtrainer 0.4.0__tar.gz → 0.4.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/PKG-INFO +2 -2
  2. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/pyproject.toml +2 -2
  3. nshtrainer-0.4.2/src/nshtrainer/ll/__init__.py +42 -0
  4. nshtrainer-0.4.2/src/nshtrainer/runner.py +97 -0
  5. nshtrainer-0.4.0/src/nshtrainer/runner.py +0 -31
  6. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/README.md +0 -0
  7. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/__init__.py +0 -0
  8. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/_experimental/__init__.py +0 -0
  9. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/_experimental/flops/__init__.py +0 -0
  10. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/_experimental/flops/flop_counter.py +0 -0
  11. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/_experimental/flops/module_tracker.py +0 -0
  12. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/_snoop.py +0 -0
  13. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/actsave/__init__.py +0 -0
  14. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/actsave/_callback.py +0 -0
  15. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/__init__.py +0 -0
  16. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/_throughput_monitor_callback.py +0 -0
  17. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/base.py +0 -0
  18. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/early_stopping.py +0 -0
  19. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/ema.py +0 -0
  20. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/finite_checks.py +0 -0
  21. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/gradient_skipping.py +0 -0
  22. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/interval.py +0 -0
  23. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/latest_epoch_checkpoint.py +0 -0
  24. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/log_epoch.py +0 -0
  25. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/norm_logging.py +0 -0
  26. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/on_exception_checkpoint.py +0 -0
  27. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/print_table.py +0 -0
  28. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/throughput_monitor.py +0 -0
  29. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/timer.py +0 -0
  30. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/callbacks/wandb_watch.py +0 -0
  31. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/config.py +0 -0
  32. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/data/__init__.py +0 -0
  33. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/data/balanced_batch_sampler.py +0 -0
  34. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/data/transform.py +0 -0
  35. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/lr_scheduler/__init__.py +0 -0
  36. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/lr_scheduler/_base.py +0 -0
  37. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/lr_scheduler/linear_warmup_cosine.py +0 -0
  38. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/lr_scheduler/reduce_lr_on_plateau.py +0 -0
  39. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/model/__init__.py +0 -0
  40. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/model/base.py +0 -0
  41. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/model/config.py +0 -0
  42. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/model/modules/callback.py +0 -0
  43. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/model/modules/debug.py +0 -0
  44. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/model/modules/distributed.py +0 -0
  45. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/model/modules/logger.py +0 -0
  46. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/model/modules/profiler.py +0 -0
  47. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/model/modules/rlp_sanity_checks.py +0 -0
  48. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/model/modules/shared_parameters.py +0 -0
  49. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/nn/__init__.py +0 -0
  50. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/nn/mlp.py +0 -0
  51. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/nn/module_dict.py +0 -0
  52. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/nn/module_list.py +0 -0
  53. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/nn/nonlinearity.py +0 -0
  54. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/optimizer.py +0 -0
  55. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/scripts/check_env.py +0 -0
  56. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/scripts/find_packages.py +0 -0
  57. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/trainer/__init__.py +0 -0
  58. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/trainer/signal_connector.py +0 -0
  59. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/trainer/trainer.py +0 -0
  60. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/typecheck.py +0 -0
  61. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/util/environment.py +0 -0
  62. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/util/seed.py +0 -0
  63. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/util/slurm.py +0 -0
  64. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/util/typed.py +0 -0
  65. {nshtrainer-0.4.0 → nshtrainer-0.4.2}/src/nshtrainer/util/typing_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: nshtrainer
3
- Version: 0.4.0
3
+ Version: 0.4.2
4
4
  Summary:
5
5
  Author: Nima Shoghi
6
6
  Author-email: nimashoghi@gmail.com
@@ -13,7 +13,7 @@ Requires-Dist: lightning
13
13
  Requires-Dist: lovely-numpy (>=0.2.13,<0.3.0)
14
14
  Requires-Dist: lovely-tensors (>=0.1.16,<0.2.0)
15
15
  Requires-Dist: nshconfig (>=0.2.0,<0.3.0)
16
- Requires-Dist: nshrunner (>=0.5.4,<0.6.0)
16
+ Requires-Dist: nshrunner (>=0.5.5,<0.6.0)
17
17
  Requires-Dist: nshutils (>=0.3.0,<0.4.0)
18
18
  Requires-Dist: numpy
19
19
  Requires-Dist: pytorch-lightning
@@ -1,13 +1,13 @@
1
1
  [tool.poetry]
2
2
  name = "nshtrainer"
3
- version = "0.4.0"
3
+ version = "0.4.2"
4
4
  description = ""
5
5
  authors = ["Nima Shoghi <nimashoghi@gmail.com>"]
6
6
  readme = "README.md"
7
7
 
8
8
  [tool.poetry.dependencies]
9
9
  python = "^3.10"
10
- nshrunner = "^0.5.4"
10
+ nshrunner = "^0.5.5"
11
11
  nshconfig = "^0.2.0"
12
12
  torch = "*"
13
13
  typing-extensions = "*"
@@ -0,0 +1,42 @@
1
+ import importlib
2
+ import sys
3
+ from types import ModuleType
4
+
5
+ # The name of your new package
6
+ NEW_PACKAGE = "nshtrainer"
7
+
8
+ # Import the new package
9
+ new_package = importlib.import_module(NEW_PACKAGE)
10
+
11
+
12
+ # Create a custom module class that inherits from ModuleType
13
+ class ProxyModule(ModuleType):
14
+ def __getattr__(self, name):
15
+ return getattr(new_package, name)
16
+
17
+ def __dir__(self):
18
+ return dir(new_package)
19
+
20
+
21
+ # Create a new module instance
22
+ old_module = ProxyModule(__name__)
23
+
24
+ # Copy attributes from new_package to old_module
25
+ for attr in dir(new_package):
26
+ if not attr.startswith("__"):
27
+ setattr(old_module, attr, getattr(new_package, attr))
28
+
29
+ # Replace the module in sys.modules
30
+ sys.modules[__name__] = old_module
31
+
32
+
33
+ # Handle submodule imports
34
+ class SubmoduleProxy:
35
+ def __getattr__(self, name):
36
+ return importlib.import_module(f"{NEW_PACKAGE}.{name}")
37
+
38
+
39
+ # Add submodule handling to the proxy module
40
+ old_module.__class__ = type(
41
+ "ProxyModuleWithSubmodules", (ProxyModule, SubmoduleProxy), {}
42
+ )
@@ -0,0 +1,97 @@
1
+ import copy
2
+ import functools
3
+ from collections.abc import Callable, Mapping, Sequence
4
+ from typing import Generic
5
+
6
+ from nshrunner import RunInfo
7
+ from nshrunner import Runner as _Runner
8
+ from nshrunner._runner import SnapshotArgType
9
+ from typing_extensions import TypeVar, TypeVarTuple, Unpack, override
10
+
11
+ from .model.config import BaseConfig
12
+
13
+ TConfig = TypeVar("TConfig", bound=BaseConfig, infer_variance=True)
14
+ TArguments = TypeVarTuple("TArguments", default=Unpack[tuple[()]])
15
+ TReturn = TypeVar("TReturn", infer_variance=True)
16
+
17
+
18
+ class Runner(
19
+ _Runner[TReturn, TConfig, Unpack[TArguments]],
20
+ Generic[TReturn, TConfig, Unpack[TArguments]],
21
+ ):
22
+ @override
23
+ @classmethod
24
+ def default_validate_fn(cls, config: TConfig, *args: Unpack[TArguments]) -> None:
25
+ super().default_validate_fn(config, *args)
26
+
27
+ @override
28
+ @classmethod
29
+ def default_info_fn(cls, config: TConfig, *args: Unpack[TArguments]) -> RunInfo:
30
+ run_info = super().default_info_fn(config, *args)
31
+ return {
32
+ **run_info,
33
+ "id": config.id,
34
+ "base_dir": config.directory.project_root,
35
+ }
36
+
37
+ def _fast_dev_run_transform(
38
+ self,
39
+ config: TConfig,
40
+ *args: Unpack[TArguments],
41
+ n_batches: int,
42
+ ):
43
+ config = copy.deepcopy(config)
44
+ config.trainer.fast_dev_run = n_batches
45
+ return (config, *args)
46
+
47
+ def fast_dev_run(
48
+ self,
49
+ runs: Sequence[tuple[TConfig, Unpack[TArguments]]],
50
+ n_batches: int = 1,
51
+ *,
52
+ env: Mapping[str, str] | None = None,
53
+ transforms: list[
54
+ Callable[[TConfig, Unpack[TArguments]], tuple[TConfig, Unpack[TArguments]]]
55
+ ]
56
+ | None = None,
57
+ ):
58
+ transforms = transforms or []
59
+ transforms.append(
60
+ functools.partial(self._fast_dev_run_transform, n_batches=n_batches)
61
+ )
62
+ return self.local(runs, env=env, transforms=transforms)
63
+
64
+ def fast_dev_run_session(
65
+ self,
66
+ runs: Sequence[tuple[TConfig, Unpack[TArguments]]],
67
+ n_batches: int = 1,
68
+ *,
69
+ snapshot: SnapshotArgType,
70
+ setup_commands: Sequence[str] | None = None,
71
+ env: Mapping[str, str] | None = None,
72
+ transforms: list[
73
+ Callable[[TConfig, Unpack[TArguments]], tuple[TConfig, Unpack[TArguments]]]
74
+ ]
75
+ | None = None,
76
+ activate_venv: bool = True,
77
+ session_name: str = "nshrunner",
78
+ attach: bool = True,
79
+ print_command: bool = True,
80
+ pause_before_exit: bool = False,
81
+ ):
82
+ transforms = transforms or []
83
+ transforms.append(
84
+ functools.partial(self._fast_dev_run_transform, n_batches=n_batches)
85
+ )
86
+ return self.session(
87
+ runs,
88
+ snapshot=snapshot,
89
+ setup_commands=setup_commands,
90
+ env=env,
91
+ transforms=transforms,
92
+ activate_venv=activate_venv,
93
+ session_name=session_name,
94
+ attach=attach,
95
+ print_command=print_command,
96
+ pause_before_exit=pause_before_exit,
97
+ )
@@ -1,31 +0,0 @@
1
- from typing import Generic
2
-
3
- from nshrunner import RunInfo
4
- from nshrunner import Runner as _Runner
5
- from typing_extensions import TypeVar, TypeVarTuple, Unpack, override
6
-
7
- from .model.config import BaseConfig
8
-
9
- TConfig = TypeVar("TConfig", bound=BaseConfig, infer_variance=True)
10
- TArguments = TypeVarTuple("TArguments", default=Unpack[tuple[()]])
11
- TReturn = TypeVar("TReturn", infer_variance=True)
12
-
13
-
14
- class Runner(
15
- _Runner[TReturn, TConfig, Unpack[TArguments]],
16
- Generic[TReturn, TConfig, Unpack[TArguments]],
17
- ):
18
- @override
19
- @classmethod
20
- def default_validate_fn(cls, config: TConfig, *args: Unpack[TArguments]) -> None:
21
- super().default_validate_fn(config, *args)
22
-
23
- @override
24
- @classmethod
25
- def default_info_fn(cls, config: TConfig, *args: Unpack[TArguments]) -> RunInfo:
26
- run_info = super().default_info_fn(config, *args)
27
- return {
28
- **run_info,
29
- "id": config.id,
30
- "base_dir": config.directory.project_root,
31
- }
File without changes