kostyl-toolkit 0.1.7__tar.gz → 0.1.9__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/PKG-INFO +1 -1
  2. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/clearml/config_mixin.py +3 -10
  3. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/clearml/pulling_utils.py +5 -2
  4. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/configs/__init__.py +4 -2
  5. kostyl_toolkit-0.1.7/kostyl/ml_core/configs/base.py → kostyl_toolkit-0.1.9/kostyl/ml_core/configs/base_model.py +6 -0
  6. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/configs/training_settings.py +7 -3
  7. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/extenstions/custom_module.py +4 -1
  8. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/pyproject.toml +1 -1
  9. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/README.md +0 -0
  10. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/__init__.py +0 -0
  11. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/__init__.py +0 -0
  12. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/clearml/__init__.py +0 -0
  13. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/clearml/dataset_utils.py +0 -0
  14. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/clearml/logging_utils.py +0 -0
  15. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/configs/hyperparams.py +0 -0
  16. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/dist_utils.py +0 -0
  17. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/__init__.py +0 -0
  18. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/callbacks/__init__.py +0 -0
  19. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/callbacks/checkpoint.py +0 -0
  20. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/callbacks/early_stopping.py +0 -0
  21. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/callbacks/registry_uploading.py +0 -0
  22. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/extenstions/__init__.py +0 -0
  23. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/extenstions/pretrained_model.py +0 -0
  24. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/loggers/__init__.py +0 -0
  25. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/loggers/tb_logger.py +0 -0
  26. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/steps_estimation.py +0 -0
  27. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/metrics_formatting.py +0 -0
  28. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/params_groups.py +0 -0
  29. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/schedulers/__init__.py +0 -0
  30. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/schedulers/base.py +0 -0
  31. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/schedulers/composite.py +0 -0
  32. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/schedulers/cosine.py +0 -0
  33. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/utils/__init__.py +0 -0
  34. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/utils/dict_manipulations.py +0 -0
  35. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/utils/fs.py +0 -0
  36. {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/utils/logging.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: kostyl-toolkit
3
- Version: 0.1.7
3
+ Version: 0.1.9
4
4
  Summary: Kickass Orchestration System for Training, Yielding & Logging
5
5
  Requires-Dist: case-converter>=1.2.0
6
6
  Requires-Dist: loguru>=0.7.3
@@ -4,18 +4,17 @@ from typing import TypeVar
4
4
  import clearml
5
5
  from caseconverter import pascalcase
6
6
  from caseconverter import snakecase
7
- from pydantic import BaseModel as PydanticBaseModel
8
7
 
9
- from kostyl.ml_core.configs.base import ConfigLoadingMixin
8
+ from kostyl.ml_core.configs.base_model import KostylBaseModel
10
9
  from kostyl.utils.dict_manipulations import convert_to_flat_dict
11
10
  from kostyl.utils.dict_manipulations import flattened_dict_to_nested
12
11
  from kostyl.utils.fs import load_config
13
12
 
14
13
 
15
- TModel = TypeVar("TModel", bound="_ClearMLBaseModel")
14
+ TModel = TypeVar("TModel", bound="KostylBaseModel")
16
15
 
17
16
 
18
- class ClearMLConfigMixin(ConfigLoadingMixin):
17
+ class ClearMLConfigMixin:
19
18
  """Pydantic mixin class providing ClearML configuration loading and syncing functionality."""
20
19
 
21
20
  @classmethod
@@ -90,9 +89,3 @@ class ClearMLConfigMixin(ConfigLoadingMixin):
90
89
 
91
90
  model = cls.from_dict(state_dict=config)
92
91
  return model
93
-
94
-
95
- class _ClearMLBaseModel(PydanticBaseModel, ClearMLConfigMixin):
96
- """A Pydantic model class with ClearML configuration loading and syncing functionality."""
97
-
98
- pass
@@ -2,6 +2,7 @@ from pathlib import Path
2
2
 
3
3
  from clearml import InputModel
4
4
  from clearml import Task
5
+ from transformers import AutoModel
5
6
  from transformers import AutoTokenizer
6
7
  from transformers import PreTrainedModel
7
8
  from transformers import PreTrainedTokenizerBase
@@ -39,12 +40,14 @@ def get_tokenizer_from_clearml(
39
40
  return tokenizer
40
41
 
41
42
 
42
- def get_model_from_clearml[TModel: PreTrainedModel | LightningCheckpointLoaderMixin](
43
+ def get_model_from_clearml[
44
+ TModel: PreTrainedModel | LightningCheckpointLoaderMixin | AutoModel
45
+ ](
43
46
  model_id: str,
44
47
  model: type[TModel],
45
48
  task: Task | None = None,
46
49
  ignore_remote_overrides: bool = True,
47
- ) -> TModel:
50
+ ) -> PreTrainedModel:
48
51
  """
49
52
  Retrieve a pretrained model from ClearML and instantiate it using the appropriate loader.
50
53
 
@@ -1,4 +1,4 @@
1
- from .base import ConfigLoadingMixin
1
+ from .base_model import KostylBaseModel
2
2
  from .hyperparams import HyperparamsConfig
3
3
  from .hyperparams import Lr
4
4
  from .hyperparams import Optimizer
@@ -9,6 +9,7 @@ from .training_settings import DataConfig
9
9
  from .training_settings import DDPStrategyConfig
10
10
  from .training_settings import EarlyStoppingConfig
11
11
  from .training_settings import FSDP1StrategyConfig
12
+ from .training_settings import LightningTrainerParameters
12
13
  from .training_settings import SingleDeviceStrategyConfig
13
14
  from .training_settings import TrainingSettings
14
15
 
@@ -16,12 +17,13 @@ from .training_settings import TrainingSettings
16
17
  __all__ = [
17
18
  "CheckpointConfig",
18
19
  "ClearMLTrainingSettings",
19
- "ConfigLoadingMixin",
20
20
  "DDPStrategyConfig",
21
21
  "DataConfig",
22
22
  "EarlyStoppingConfig",
23
23
  "FSDP1StrategyConfig",
24
24
  "HyperparamsConfig",
25
+ "KostylBaseModel",
26
+ "LightningTrainerParameters",
25
27
  "Lr",
26
28
  "Optimizer",
27
29
  "SingleDeviceStrategyConfig",
@@ -52,3 +52,9 @@ class ConfigLoadingMixin:
52
52
  """
53
53
  instance = cls.model_validate(state_dict)
54
54
  return instance
55
+
56
+
57
+ class KostylBaseModel(PydanticBaseModel, ConfigLoadingMixin):
58
+ """A Pydantic model class with basic configuration loading functionality."""
59
+
60
+ pass
@@ -6,7 +6,7 @@ from pydantic import Field
6
6
  from kostyl.ml_core.clearml.config_mixin import ClearMLConfigMixin
7
7
  from kostyl.utils.logging import setup_logger
8
8
 
9
- from .base import ConfigLoadingMixin
9
+ from .base_model import KostylBaseModel
10
10
 
11
11
 
12
12
  logger = setup_logger(fmt="only_message")
@@ -63,6 +63,10 @@ class LightningTrainerParameters(BaseModel):
63
63
  precision: PRECISION
64
64
  log_every_n_steps: int = Field(default=50, ge=1)
65
65
  accumulate_grad_batches: int = Field(default=1, ge=1)
66
+ limit_train_batches: int | float | None = None
67
+ limit_val_batches: int | float | None = None
68
+ limit_test_batches: int | float | None = None
69
+ limit_predict_batches: int | float | None = None
66
70
 
67
71
 
68
72
  class EarlyStoppingConfig(BaseModel):
@@ -92,7 +96,7 @@ class DataConfig(BaseModel):
92
96
  data_columns: list[str]
93
97
 
94
98
 
95
- class TrainingSettings(BaseModel, ConfigLoadingMixin):
99
+ class TrainingSettings(KostylBaseModel):
96
100
  """Training parameters configuration."""
97
101
 
98
102
  trainer: LightningTrainerParameters
@@ -102,8 +106,8 @@ class TrainingSettings(BaseModel, ConfigLoadingMixin):
102
106
 
103
107
 
104
108
  class ClearMLTrainingSettings(
105
- ClearMLConfigMixin,
106
109
  TrainingSettings,
110
+ ClearMLConfigMixin,
107
111
  ):
108
112
  """Training parameters configuration with ClearML features support (config syncing, model identifiers tracking and etc)."""
109
113
 
@@ -21,7 +21,7 @@ from kostyl.ml_core.schedulers.base import BaseScheduler
21
21
  from kostyl.utils import setup_logger
22
22
 
23
23
 
24
- logger = setup_logger()
24
+ logger = setup_logger(fmt="only_message")
25
25
 
26
26
 
27
27
  class KostylLightningModule(L.LightningModule):
@@ -93,6 +93,9 @@ class KostylLightningModule(L.LightningModule):
93
93
  def on_before_optimizer_step(self, optimizer) -> None:
94
94
  if self.model is None:
95
95
  raise ValueError("Model must be configured before optimizer step.")
96
+ if not hasattr(self, "hyperparams"):
97
+ logger.warning_once("cannot clip gradients, hyperparams attr missing")
98
+ return
96
99
  if self.hyperparams.grad_clip_val is None:
97
100
  return
98
101
 
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "kostyl-toolkit"
3
- version = "0.1.7"
3
+ version = "0.1.9"
4
4
  description = "Kickass Orchestration System for Training, Yielding & Logging "
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.12"
File without changes