kostyl-toolkit 0.1.13__py3-none-any.whl → 0.1.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -26,8 +26,6 @@ logger = setup_logger(fmt="only_message")
|
|
|
26
26
|
class KostylLightningModule(L.LightningModule):
|
|
27
27
|
"""Custom PyTorch Lightning Module with logging, checkpointing, and distributed training utilities."""
|
|
28
28
|
|
|
29
|
-
model: PreTrainedModel | nn.Module | None
|
|
30
|
-
|
|
31
29
|
def get_process_group(self) -> ProcessGroup | None:
|
|
32
30
|
"""
|
|
33
31
|
Retrieves the data parallel process group for distributed training.
|
|
@@ -54,16 +52,15 @@ class KostylLightningModule(L.LightningModule):
|
|
|
54
52
|
dp_pg = dist.group.WORLD
|
|
55
53
|
return dp_pg
|
|
56
54
|
|
|
57
|
-
|
|
55
|
+
@property
|
|
56
|
+
def model_instance(self) -> PreTrainedModel | nn.Module:
|
|
58
57
|
"""Returns the underlying model."""
|
|
59
|
-
|
|
60
|
-
raise ValueError("Model is not configured.")
|
|
61
|
-
return self.model
|
|
58
|
+
raise NotImplementedError
|
|
62
59
|
|
|
63
60
|
@property
|
|
64
61
|
def model_config(self) -> PretrainedConfig | None:
|
|
65
62
|
"""Returns the model configuration if available."""
|
|
66
|
-
model = self.
|
|
63
|
+
model = self.model_instance
|
|
67
64
|
if hasattr(model, "config"):
|
|
68
65
|
return model.config # type: ignore
|
|
69
66
|
return None
|
|
@@ -75,7 +72,7 @@ class KostylLightningModule(L.LightningModule):
|
|
|
75
72
|
|
|
76
73
|
@override
|
|
77
74
|
def on_save_checkpoint(self, checkpoint: dict[str, Any]) -> None:
|
|
78
|
-
model = self.
|
|
75
|
+
model = self.model_instance
|
|
79
76
|
if hasattr(model, "config"):
|
|
80
77
|
cfg = model.config
|
|
81
78
|
if hasattr(cfg, "to_dict"):
|
|
@@ -94,9 +91,6 @@ class KostylLightningModule(L.LightningModule):
|
|
|
94
91
|
|
|
95
92
|
@override
|
|
96
93
|
def on_before_optimizer_step(self, optimizer) -> None:
|
|
97
|
-
if self.model is None:
|
|
98
|
-
raise ValueError("Model must be configured before optimizer step.")
|
|
99
|
-
|
|
100
94
|
grad_clip_val = self.grad_clip_val
|
|
101
95
|
if grad_clip_val is None:
|
|
102
96
|
return
|
|
@@ -15,7 +15,7 @@ kostyl/ml/lightning/callbacks/checkpoint.py,sha256=FooGeeUz6TtoXQglpcK16NWAmSX3f
|
|
|
15
15
|
kostyl/ml/lightning/callbacks/early_stopping.py,sha256=D5nyjktCJ9XYAf28-kgXG8jORvXLl1N3nbDQnvValPM,615
|
|
16
16
|
kostyl/ml/lightning/callbacks/registry_uploading.py,sha256=um-fHQUp-TSD_76rVZoNV2IMi05RFjKajGvMV7SAb6s,4691
|
|
17
17
|
kostyl/ml/lightning/extenstions/__init__.py,sha256=OY6QGv1agYgqqKf1xJBrxgp_i8FunVfPzYezfaRrGXU,182
|
|
18
|
-
kostyl/ml/lightning/extenstions/custom_module.py,sha256=
|
|
18
|
+
kostyl/ml/lightning/extenstions/custom_module.py,sha256=nB5jW7cqRD1tyh-q5LD2EtiFQwFkLXpnS9Yu6c5xMRg,5987
|
|
19
19
|
kostyl/ml/lightning/extenstions/pretrained_model.py,sha256=ZOKtrVl095cwvI43wAz-Xdzu4l0v0lHH2mfh4WXwxKQ,5059
|
|
20
20
|
kostyl/ml/lightning/loggers/__init__.py,sha256=e51dszaoJbuzwBkbdugmuDsPldoSO4yaRgmZUg1Bdy0,71
|
|
21
21
|
kostyl/ml/lightning/loggers/tb_logger.py,sha256=I0WQV3SVVIds9kHhAYnN3SXodX64jfTk7_5gH-j3uzA,932
|
|
@@ -30,6 +30,6 @@ kostyl/utils/__init__.py,sha256=hkpmB6c5pr4Ti5BshOROebb7cvjDZfNCw83qZ_FFKMM,240
|
|
|
30
30
|
kostyl/utils/dict_manipulations.py,sha256=e3vBicID74nYP8lHkVTQc4-IQwoJimrbFELy5uSF6Gk,1073
|
|
31
31
|
kostyl/utils/fs.py,sha256=gAQNIU4R_2DhwjgzOS8BOMe0gZymtY1eZwmdgOdDgqo,510
|
|
32
32
|
kostyl/utils/logging.py,sha256=3MvfDPArZhwakHu5nMlp_LpOsWg0E0SP26y41clsBtA,5232
|
|
33
|
-
kostyl_toolkit-0.1.
|
|
34
|
-
kostyl_toolkit-0.1.
|
|
35
|
-
kostyl_toolkit-0.1.
|
|
33
|
+
kostyl_toolkit-0.1.14.dist-info/WHEEL,sha256=3id4o64OvRm9dUknh3mMJNcfoTRK08ua5cU6DFyVy-4,79
|
|
34
|
+
kostyl_toolkit-0.1.14.dist-info/METADATA,sha256=lOiHm_Bd3uf2eHv7unCgagPABhJUF793ZO3FtX5PWNc,4269
|
|
35
|
+
kostyl_toolkit-0.1.14.dist-info/RECORD,,
|
|
File without changes
|