adv-optm 0.1.9__tar.gz → 1.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of adv-optm might be problematic. Click here for more details.

Files changed (24) hide show
  1. {adv_optm-0.1.9 → adv_optm-1.0.0}/PKG-INFO +1 -1
  2. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm/__init__.py +1 -1
  3. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm/optim/Prodigy_adv.py +3 -6
  4. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm.egg-info/PKG-INFO +1 -1
  5. {adv_optm-0.1.9 → adv_optm-1.0.0}/setup.py +1 -1
  6. {adv_optm-0.1.9 → adv_optm-1.0.0}/LICENSE +0 -0
  7. {adv_optm-0.1.9 → adv_optm-1.0.0}/README.md +0 -0
  8. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm/optim/AdamW_adv.py +0 -0
  9. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm/optim/Adopt_adv.py +0 -0
  10. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm/optim/Lion_Prodigy_adv.py +0 -0
  11. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm/optim/Lion_adv.py +0 -0
  12. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm/optim/Simplified_AdEMAMix.py +0 -0
  13. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm/optim/__init__.py +0 -0
  14. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm/util/BF16_Stochastic_Rounding.py +0 -0
  15. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm/util/Effective_Shape.py +0 -0
  16. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm/util/NNMF.py +0 -0
  17. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm/util/One_Bit_Boolean.py +0 -0
  18. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm/util/OrthoGrad.py +0 -0
  19. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm/util/__init__.py +0 -0
  20. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm.egg-info/SOURCES.txt +0 -0
  21. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm.egg-info/dependency_links.txt +0 -0
  22. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm.egg-info/requires.txt +0 -0
  23. {adv_optm-0.1.9 → adv_optm-1.0.0}/adv_optm.egg-info/top_level.txt +0 -0
  24. {adv_optm-0.1.9 → adv_optm-1.0.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: adv_optm
3
- Version: 0.1.9
3
+ Version: 1.0.0
4
4
  Summary: A family of highly efficient, lightweight yet powerful optimizers.
5
5
  Home-page: https://github.com/Koratahiu/Advanced_Optimizers
6
6
  Author: Koratahiu
@@ -16,4 +16,4 @@ __all__ = [
16
16
  "Lion_Prodigy_adv",
17
17
  ]
18
18
 
19
- __version__ = "0.1.9"
19
+ __version__ = "1.0.0"
@@ -141,6 +141,9 @@ class Prodigy_adv(torch.optim.Optimizer):
141
141
  if use_atan2 and Simplified_AdEMAMix:
142
142
  print("Warning: use_atan2 is incompatible with Simplified_AdEMAMix. Disabling use_atan2.")
143
143
  use_atan2 = False
144
+ if Simplified_AdEMAMix and alpha_grad > 0:
145
+ # scales d_coef by alpha_grad, this force prodigy to behave well with Simplified_AdEMAMix
146
+ d_coef = d_coef/alpha_grad
144
147
 
145
148
  defaults = {
146
149
  "lr": lr, "betas": betas, "eps": eps, "weight_decay": weight_decay,
@@ -456,12 +459,6 @@ class Prodigy_adv(torch.optim.Optimizer):
456
459
 
457
460
  d_hat = self.d
458
461
  if global_d_denom > 0:
459
- if self.Simplified_AdEMAMix and g_group['alpha_grad'] > 0:
460
- # A simple and effective hack to make prodigy compatible with Simplified_AdEMAMix large step sizes
461
- # by diving by alpha_grad we make sure that d_numerator that was influenced by (alpha_grad * grad)
462
- # are now normalized by /alpha_grad. this is a heuristic way since the update is also influenced by
463
- # the increasing and decaying accumulator but it's effective and it worked for me (for Lora/Finetune).
464
- global_d_numerator /= g_group['alpha_grad']
465
462
  d_hat = d_coef * global_d_numerator / global_d_denom
466
463
  if self.d == g_group['d0']:
467
464
  self.d = max(self.d, d_hat)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: adv_optm
3
- Version: 0.1.9
3
+ Version: 1.0.0
4
4
  Summary: A family of highly efficient, lightweight yet powerful optimizers.
5
5
  Home-page: https://github.com/Koratahiu/Advanced_Optimizers
6
6
  Author: Koratahiu
@@ -5,7 +5,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
5
5
 
6
6
  setup(
7
7
  name="adv_optm",
8
- version="0.1.9",
8
+ version="1.0.0",
9
9
  author="Koratahiu",
10
10
  author_email="hiuhonor@gmail.com",
11
11
  license='Apache 2.0',
File without changes
File without changes
File without changes
File without changes