kostyl-toolkit 0.1.7__tar.gz → 0.1.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/PKG-INFO +1 -1
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/clearml/config_mixin.py +3 -10
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/clearml/pulling_utils.py +5 -2
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/configs/__init__.py +4 -2
- kostyl_toolkit-0.1.7/kostyl/ml_core/configs/base.py → kostyl_toolkit-0.1.9/kostyl/ml_core/configs/base_model.py +6 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/configs/training_settings.py +7 -3
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/extenstions/custom_module.py +4 -1
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/pyproject.toml +1 -1
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/README.md +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/__init__.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/__init__.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/clearml/__init__.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/clearml/dataset_utils.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/clearml/logging_utils.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/configs/hyperparams.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/dist_utils.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/__init__.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/callbacks/__init__.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/callbacks/checkpoint.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/callbacks/early_stopping.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/callbacks/registry_uploading.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/extenstions/__init__.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/extenstions/pretrained_model.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/loggers/__init__.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/loggers/tb_logger.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/steps_estimation.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/metrics_formatting.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/params_groups.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/schedulers/__init__.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/schedulers/base.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/schedulers/composite.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/schedulers/cosine.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/utils/__init__.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/utils/dict_manipulations.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/utils/fs.py +0 -0
- {kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/utils/logging.py +0 -0
|
@@ -4,18 +4,17 @@ from typing import TypeVar
|
|
|
4
4
|
import clearml
|
|
5
5
|
from caseconverter import pascalcase
|
|
6
6
|
from caseconverter import snakecase
|
|
7
|
-
from pydantic import BaseModel as PydanticBaseModel
|
|
8
7
|
|
|
9
|
-
from kostyl.ml_core.configs.
|
|
8
|
+
from kostyl.ml_core.configs.base_model import KostylBaseModel
|
|
10
9
|
from kostyl.utils.dict_manipulations import convert_to_flat_dict
|
|
11
10
|
from kostyl.utils.dict_manipulations import flattened_dict_to_nested
|
|
12
11
|
from kostyl.utils.fs import load_config
|
|
13
12
|
|
|
14
13
|
|
|
15
|
-
TModel = TypeVar("TModel", bound="
|
|
14
|
+
TModel = TypeVar("TModel", bound="KostylBaseModel")
|
|
16
15
|
|
|
17
16
|
|
|
18
|
-
class ClearMLConfigMixin
|
|
17
|
+
class ClearMLConfigMixin:
|
|
19
18
|
"""Pydantic mixin class providing ClearML configuration loading and syncing functionality."""
|
|
20
19
|
|
|
21
20
|
@classmethod
|
|
@@ -90,9 +89,3 @@ class ClearMLConfigMixin(ConfigLoadingMixin):
|
|
|
90
89
|
|
|
91
90
|
model = cls.from_dict(state_dict=config)
|
|
92
91
|
return model
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
class _ClearMLBaseModel(PydanticBaseModel, ClearMLConfigMixin):
|
|
96
|
-
"""A Pydantic model class with ClearML configuration loading and syncing functionality."""
|
|
97
|
-
|
|
98
|
-
pass
|
|
@@ -2,6 +2,7 @@ from pathlib import Path
|
|
|
2
2
|
|
|
3
3
|
from clearml import InputModel
|
|
4
4
|
from clearml import Task
|
|
5
|
+
from transformers import AutoModel
|
|
5
6
|
from transformers import AutoTokenizer
|
|
6
7
|
from transformers import PreTrainedModel
|
|
7
8
|
from transformers import PreTrainedTokenizerBase
|
|
@@ -39,12 +40,14 @@ def get_tokenizer_from_clearml(
|
|
|
39
40
|
return tokenizer
|
|
40
41
|
|
|
41
42
|
|
|
42
|
-
def get_model_from_clearml[
|
|
43
|
+
def get_model_from_clearml[
|
|
44
|
+
TModel: PreTrainedModel | LightningCheckpointLoaderMixin | AutoModel
|
|
45
|
+
](
|
|
43
46
|
model_id: str,
|
|
44
47
|
model: type[TModel],
|
|
45
48
|
task: Task | None = None,
|
|
46
49
|
ignore_remote_overrides: bool = True,
|
|
47
|
-
) ->
|
|
50
|
+
) -> PreTrainedModel:
|
|
48
51
|
"""
|
|
49
52
|
Retrieve a pretrained model from ClearML and instantiate it using the appropriate loader.
|
|
50
53
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from .
|
|
1
|
+
from .base_model import KostylBaseModel
|
|
2
2
|
from .hyperparams import HyperparamsConfig
|
|
3
3
|
from .hyperparams import Lr
|
|
4
4
|
from .hyperparams import Optimizer
|
|
@@ -9,6 +9,7 @@ from .training_settings import DataConfig
|
|
|
9
9
|
from .training_settings import DDPStrategyConfig
|
|
10
10
|
from .training_settings import EarlyStoppingConfig
|
|
11
11
|
from .training_settings import FSDP1StrategyConfig
|
|
12
|
+
from .training_settings import LightningTrainerParameters
|
|
12
13
|
from .training_settings import SingleDeviceStrategyConfig
|
|
13
14
|
from .training_settings import TrainingSettings
|
|
14
15
|
|
|
@@ -16,12 +17,13 @@ from .training_settings import TrainingSettings
|
|
|
16
17
|
__all__ = [
|
|
17
18
|
"CheckpointConfig",
|
|
18
19
|
"ClearMLTrainingSettings",
|
|
19
|
-
"ConfigLoadingMixin",
|
|
20
20
|
"DDPStrategyConfig",
|
|
21
21
|
"DataConfig",
|
|
22
22
|
"EarlyStoppingConfig",
|
|
23
23
|
"FSDP1StrategyConfig",
|
|
24
24
|
"HyperparamsConfig",
|
|
25
|
+
"KostylBaseModel",
|
|
26
|
+
"LightningTrainerParameters",
|
|
25
27
|
"Lr",
|
|
26
28
|
"Optimizer",
|
|
27
29
|
"SingleDeviceStrategyConfig",
|
|
@@ -6,7 +6,7 @@ from pydantic import Field
|
|
|
6
6
|
from kostyl.ml_core.clearml.config_mixin import ClearMLConfigMixin
|
|
7
7
|
from kostyl.utils.logging import setup_logger
|
|
8
8
|
|
|
9
|
-
from .
|
|
9
|
+
from .base_model import KostylBaseModel
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
logger = setup_logger(fmt="only_message")
|
|
@@ -63,6 +63,10 @@ class LightningTrainerParameters(BaseModel):
|
|
|
63
63
|
precision: PRECISION
|
|
64
64
|
log_every_n_steps: int = Field(default=50, ge=1)
|
|
65
65
|
accumulate_grad_batches: int = Field(default=1, ge=1)
|
|
66
|
+
limit_train_batches: int | float | None = None
|
|
67
|
+
limit_val_batches: int | float | None = None
|
|
68
|
+
limit_test_batches: int | float | None = None
|
|
69
|
+
limit_predict_batches: int | float | None = None
|
|
66
70
|
|
|
67
71
|
|
|
68
72
|
class EarlyStoppingConfig(BaseModel):
|
|
@@ -92,7 +96,7 @@ class DataConfig(BaseModel):
|
|
|
92
96
|
data_columns: list[str]
|
|
93
97
|
|
|
94
98
|
|
|
95
|
-
class TrainingSettings(
|
|
99
|
+
class TrainingSettings(KostylBaseModel):
|
|
96
100
|
"""Training parameters configuration."""
|
|
97
101
|
|
|
98
102
|
trainer: LightningTrainerParameters
|
|
@@ -102,8 +106,8 @@ class TrainingSettings(BaseModel, ConfigLoadingMixin):
|
|
|
102
106
|
|
|
103
107
|
|
|
104
108
|
class ClearMLTrainingSettings(
|
|
105
|
-
ClearMLConfigMixin,
|
|
106
109
|
TrainingSettings,
|
|
110
|
+
ClearMLConfigMixin,
|
|
107
111
|
):
|
|
108
112
|
"""Training parameters configuration with ClearML features support (config syncing, model identifiers tracking and etc)."""
|
|
109
113
|
|
{kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/extenstions/custom_module.py
RENAMED
|
@@ -21,7 +21,7 @@ from kostyl.ml_core.schedulers.base import BaseScheduler
|
|
|
21
21
|
from kostyl.utils import setup_logger
|
|
22
22
|
|
|
23
23
|
|
|
24
|
-
logger = setup_logger()
|
|
24
|
+
logger = setup_logger(fmt="only_message")
|
|
25
25
|
|
|
26
26
|
|
|
27
27
|
class KostylLightningModule(L.LightningModule):
|
|
@@ -93,6 +93,9 @@ class KostylLightningModule(L.LightningModule):
|
|
|
93
93
|
def on_before_optimizer_step(self, optimizer) -> None:
|
|
94
94
|
if self.model is None:
|
|
95
95
|
raise ValueError("Model must be configured before optimizer step.")
|
|
96
|
+
if not hasattr(self, "hyperparams"):
|
|
97
|
+
logger.warning_once("cannot clip gradients, hyperparams attr missing")
|
|
98
|
+
return
|
|
96
99
|
if self.hyperparams.grad_clip_val is None:
|
|
97
100
|
return
|
|
98
101
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/callbacks/__init__.py
RENAMED
|
File without changes
|
{kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/callbacks/checkpoint.py
RENAMED
|
File without changes
|
{kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/callbacks/early_stopping.py
RENAMED
|
File without changes
|
|
File without changes
|
{kostyl_toolkit-0.1.7 → kostyl_toolkit-0.1.9}/kostyl/ml_core/lightning/extenstions/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|