adv-optm 1.1.0.dev3__py3-none-any.whl → 1.1.0.dev5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of adv-optm might be problematic. Click here for more details.
- adv_optm/__init__.py +1 -1
- adv_optm/optim/AdamW_adv.py +3 -3
- adv_optm/optim/Adopt_adv.py +435 -439
- adv_optm/optim/Lion_Prodigy_adv.py +315 -315
- adv_optm/optim/Lion_adv.py +1 -1
- adv_optm/optim/Prodigy_adv.py +13 -6
- adv_optm/optim/Simplified_AdEMAMix.py +3 -3
- adv_optm/util/Kourkoutas.py +71 -36
- {adv_optm-1.1.0.dev3.dist-info → adv_optm-1.1.0.dev5.dist-info}/METADATA +1 -1
- adv_optm-1.1.0.dev5.dist-info/RECORD +20 -0
- adv_optm-1.1.0.dev3.dist-info/RECORD +0 -20
- {adv_optm-1.1.0.dev3.dist-info → adv_optm-1.1.0.dev5.dist-info}/WHEEL +0 -0
- {adv_optm-1.1.0.dev3.dist-info → adv_optm-1.1.0.dev5.dist-info}/licenses/LICENSE +0 -0
- {adv_optm-1.1.0.dev3.dist-info → adv_optm-1.1.0.dev5.dist-info}/top_level.txt +0 -0
adv_optm/__init__.py
CHANGED
adv_optm/optim/AdamW_adv.py
CHANGED
|
@@ -100,8 +100,8 @@ class AdamW_adv(torch.optim.Optimizer):
|
|
|
100
100
|
alpha: float = 5.0,
|
|
101
101
|
t_alpha: int | None = None,
|
|
102
102
|
kourkoutas_beta: bool = False,
|
|
103
|
-
beta2_min: float = 0.
|
|
104
|
-
ema_alpha: float = 0.
|
|
103
|
+
beta2_min: float = 0.9,
|
|
104
|
+
ema_alpha: float = 0.95,
|
|
105
105
|
tiny_spike: float = 1e-9,
|
|
106
106
|
k_warmup_steps: int = 0,
|
|
107
107
|
k_logging: int = 0,
|
|
@@ -167,7 +167,7 @@ class AdamW_adv(torch.optim.Optimizer):
|
|
|
167
167
|
state = self.state[p]
|
|
168
168
|
|
|
169
169
|
# State Initialization
|
|
170
|
-
if
|
|
170
|
+
if 'step' not in state:
|
|
171
171
|
state['step'] = 0
|
|
172
172
|
|
|
173
173
|
should_factor = (
|