kostyl-toolkit 0.1.6__tar.gz → 0.1.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/PKG-INFO +1 -1
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/clearml/dataset_utils.py +4 -7
- kostyl_toolkit-0.1.7/kostyl/ml_core/configs/__init__.py +30 -0
- kostyl_toolkit-0.1.6/kostyl/ml_core/configs/training_params.py → kostyl_toolkit-0.1.7/kostyl/ml_core/configs/training_settings.py +4 -3
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/extenstions/pretrained_model.py +1 -1
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/pyproject.toml +1 -1
- kostyl_toolkit-0.1.6/kostyl/ml_core/configs/__init__.py +0 -30
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/README.md +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/__init__.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/__init__.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/clearml/__init__.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/clearml/config_mixin.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/clearml/logging_utils.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/clearml/pulling_utils.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/configs/base.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/configs/hyperparams.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/dist_utils.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/__init__.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/callbacks/__init__.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/callbacks/checkpoint.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/callbacks/early_stopping.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/callbacks/registry_uploading.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/extenstions/__init__.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/extenstions/custom_module.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/loggers/__init__.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/loggers/tb_logger.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/steps_estimation.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/metrics_formatting.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/params_groups.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/schedulers/__init__.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/schedulers/base.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/schedulers/composite.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/schedulers/cosine.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/utils/__init__.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/utils/dict_manipulations.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/utils/fs.py +0 -0
- {kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/utils/logging.py +0 -0
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from collections.abc import Collection
|
|
1
2
|
from concurrent.futures import ThreadPoolExecutor
|
|
2
3
|
from concurrent.futures import as_completed
|
|
3
4
|
from pathlib import Path
|
|
@@ -26,20 +27,16 @@ def collect_clearml_datasets(
|
|
|
26
27
|
return datasets_list
|
|
27
28
|
|
|
28
29
|
|
|
29
|
-
def download_clearml_datasets(
|
|
30
|
+
def download_clearml_datasets(datasets: Collection[ClearMLDataset]) -> None:
|
|
30
31
|
"""
|
|
31
32
|
Download all ClearML datasets in parallel.
|
|
32
33
|
|
|
33
34
|
Args:
|
|
34
|
-
|
|
35
|
-
`ClearMLDataset` instances whose contents must be downloaded
|
|
36
|
-
locally.
|
|
35
|
+
datasets: Collection of initialized `ClearMLDataset` instances to download.
|
|
37
36
|
|
|
38
37
|
"""
|
|
39
38
|
with ThreadPoolExecutor() as executor:
|
|
40
|
-
futures = [
|
|
41
|
-
executor.submit(ds.get_local_copy) for ds in datasets_mapping.values()
|
|
42
|
-
]
|
|
39
|
+
futures = [executor.submit(ds.get_local_copy) for ds in datasets]
|
|
43
40
|
for future in as_completed(futures):
|
|
44
41
|
future.result()
|
|
45
42
|
return
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from .base import ConfigLoadingMixin
|
|
2
|
+
from .hyperparams import HyperparamsConfig
|
|
3
|
+
from .hyperparams import Lr
|
|
4
|
+
from .hyperparams import Optimizer
|
|
5
|
+
from .hyperparams import WeightDecay
|
|
6
|
+
from .training_settings import CheckpointConfig
|
|
7
|
+
from .training_settings import ClearMLTrainingSettings
|
|
8
|
+
from .training_settings import DataConfig
|
|
9
|
+
from .training_settings import DDPStrategyConfig
|
|
10
|
+
from .training_settings import EarlyStoppingConfig
|
|
11
|
+
from .training_settings import FSDP1StrategyConfig
|
|
12
|
+
from .training_settings import SingleDeviceStrategyConfig
|
|
13
|
+
from .training_settings import TrainingSettings
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
"CheckpointConfig",
|
|
18
|
+
"ClearMLTrainingSettings",
|
|
19
|
+
"ConfigLoadingMixin",
|
|
20
|
+
"DDPStrategyConfig",
|
|
21
|
+
"DataConfig",
|
|
22
|
+
"EarlyStoppingConfig",
|
|
23
|
+
"FSDP1StrategyConfig",
|
|
24
|
+
"HyperparamsConfig",
|
|
25
|
+
"Lr",
|
|
26
|
+
"Optimizer",
|
|
27
|
+
"SingleDeviceStrategyConfig",
|
|
28
|
+
"TrainingSettings",
|
|
29
|
+
"WeightDecay",
|
|
30
|
+
]
|
|
@@ -49,6 +49,7 @@ class DDPStrategyConfig(BaseModel):
|
|
|
49
49
|
"""Distributed Data Parallel (DDP) strategy configuration."""
|
|
50
50
|
|
|
51
51
|
type: Literal["ddp"]
|
|
52
|
+
find_unused_parameters: bool = False
|
|
52
53
|
|
|
53
54
|
|
|
54
55
|
class LightningTrainerParameters(BaseModel):
|
|
@@ -91,7 +92,7 @@ class DataConfig(BaseModel):
|
|
|
91
92
|
data_columns: list[str]
|
|
92
93
|
|
|
93
94
|
|
|
94
|
-
class
|
|
95
|
+
class TrainingSettings(BaseModel, ConfigLoadingMixin):
|
|
95
96
|
"""Training parameters configuration."""
|
|
96
97
|
|
|
97
98
|
trainer: LightningTrainerParameters
|
|
@@ -100,9 +101,9 @@ class TrainingParams(BaseModel, ConfigLoadingMixin):
|
|
|
100
101
|
data: DataConfig
|
|
101
102
|
|
|
102
103
|
|
|
103
|
-
class
|
|
104
|
+
class ClearMLTrainingSettings(
|
|
104
105
|
ClearMLConfigMixin,
|
|
105
|
-
|
|
106
|
+
TrainingSettings,
|
|
106
107
|
):
|
|
107
108
|
"""Training parameters configuration with ClearML features support (config syncing, model identifiers tracking and etc)."""
|
|
108
109
|
|
|
@@ -9,7 +9,7 @@ from transformers import PreTrainedModel
|
|
|
9
9
|
try:
|
|
10
10
|
from peft import PeftConfig
|
|
11
11
|
except ImportError:
|
|
12
|
-
PeftConfig = None
|
|
12
|
+
PeftConfig = None # ty: ignore
|
|
13
13
|
|
|
14
14
|
from kostyl.utils.logging import log_incompatible_keys
|
|
15
15
|
from kostyl.utils.logging import setup_logger
|
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
from .base import ConfigLoadingMixin
|
|
2
|
-
from .hyperparams import HyperparamsConfig
|
|
3
|
-
from .hyperparams import Lr
|
|
4
|
-
from .hyperparams import Optimizer
|
|
5
|
-
from .hyperparams import WeightDecay
|
|
6
|
-
from .training_params import CheckpointConfig
|
|
7
|
-
from .training_params import ClearMLTrainingParameters
|
|
8
|
-
from .training_params import DataConfig
|
|
9
|
-
from .training_params import DDPStrategyConfig
|
|
10
|
-
from .training_params import EarlyStoppingConfig
|
|
11
|
-
from .training_params import FSDP1StrategyConfig
|
|
12
|
-
from .training_params import SingleDeviceStrategyConfig
|
|
13
|
-
from .training_params import TrainingParams
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
__all__ = [
|
|
17
|
-
"CheckpointConfig",
|
|
18
|
-
"ClearMLTrainingParameters",
|
|
19
|
-
"ConfigLoadingMixin",
|
|
20
|
-
"DDPStrategyConfig",
|
|
21
|
-
"DataConfig",
|
|
22
|
-
"EarlyStoppingConfig",
|
|
23
|
-
"FSDP1StrategyConfig",
|
|
24
|
-
"HyperparamsConfig",
|
|
25
|
-
"Lr",
|
|
26
|
-
"Optimizer",
|
|
27
|
-
"SingleDeviceStrategyConfig",
|
|
28
|
-
"TrainingParams",
|
|
29
|
-
"WeightDecay",
|
|
30
|
-
]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/callbacks/__init__.py
RENAMED
|
File without changes
|
{kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/callbacks/checkpoint.py
RENAMED
|
File without changes
|
{kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/callbacks/early_stopping.py
RENAMED
|
File without changes
|
|
File without changes
|
{kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/extenstions/__init__.py
RENAMED
|
File without changes
|
{kostyl_toolkit-0.1.6 → kostyl_toolkit-0.1.7}/kostyl/ml_core/lightning/extenstions/custom_module.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|