adv-optm 2.2.dev1__tar.gz → 2.2.dev2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/PKG-INFO +1 -1
  2. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/__init__.py +1 -1
  3. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/util/Muon_util.py +6 -1
  4. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm.egg-info/PKG-INFO +1 -1
  5. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/setup.py +1 -1
  6. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/LICENSE +0 -0
  7. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/README.md +0 -0
  8. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/optim/AdaMuon_adv.py +0 -0
  9. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/optim/AdamW_adv.py +0 -0
  10. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/optim/Adopt_adv.py +0 -0
  11. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/optim/Lion_Prodigy_adv.py +0 -0
  12. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/optim/Lion_adv.py +0 -0
  13. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/optim/Muon_adv.py +0 -0
  14. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/optim/Prodigy_adv.py +0 -0
  15. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/optim/SignSGD_adv.py +0 -0
  16. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/optim/Simplified_AdEMAMix.py +0 -0
  17. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/optim/__init__.py +0 -0
  18. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/util/Kourkoutas.py +0 -0
  19. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/util/Muon_AuxAdam.py +0 -0
  20. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/util/OrthoGrad.py +0 -0
  21. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/util/__init__.py +0 -0
  22. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/util/factorization_util.py +0 -0
  23. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/util/lion_k.py +0 -0
  24. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/util/param_update.py +0 -0
  25. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm/util/update_util.py +0 -0
  26. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm.egg-info/SOURCES.txt +0 -0
  27. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm.egg-info/dependency_links.txt +0 -0
  28. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm.egg-info/requires.txt +0 -0
  29. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/adv_optm.egg-info/top_level.txt +0 -0
  30. {adv_optm-2.2.dev1 → adv_optm-2.2.dev2}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: adv_optm
3
- Version: 2.2.dev1
3
+ Version: 2.2.dev2
4
4
  Summary: A family of highly efficient, lightweight yet powerful optimizers.
5
5
  Home-page: https://github.com/Koratahiu/Advanced_Optimizers
6
6
  Author: Koratahiu
@@ -22,4 +22,4 @@ __all__ = [
22
22
  "SignSGD_adv",
23
23
  ]
24
24
 
25
- __version__ = "2.2.dev1"
25
+ __version__ = "2.2.dev2"
@@ -352,7 +352,12 @@ def spectral_norm_update(update: torch.Tensor, vector_state: torch.Tensor, targe
352
352
  # Normalize v_new to get next state
353
353
  v_norm = torch.linalg.vector_norm(v_new)
354
354
 
355
- vector_state.copy_(v_new.div_(v_norm.clamp_min_(1e-12)).to(vector_state.dtype))
355
+ # if v_norm >= 0.5:
356
+ # vector_state.copy_(v_new.div_(v_norm.clamp_min_(1e-12))).to(vector_state.dtype))
357
+ candidate_v = v_new / v_norm
358
+ next_state = torch.where(v_norm >= 0.5, candidate_v, vector_state)
359
+ vector_state.copy_(next_state.to(vector_state.dtype))
360
+ # Else: We keep the old vector_state (which is a random unit vector at init)
356
361
 
357
362
  # Estimate sigma = ||A @ v|| (since v is unit norm)
358
363
  # Re-compute A @ v_new with the updated vector for better estimate
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: adv_optm
3
- Version: 2.2.dev1
3
+ Version: 2.2.dev2
4
4
  Summary: A family of highly efficient, lightweight yet powerful optimizers.
5
5
  Home-page: https://github.com/Koratahiu/Advanced_Optimizers
6
6
  Author: Koratahiu
@@ -5,7 +5,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
5
5
 
6
6
  setup(
7
7
  name="adv_optm",
8
- version="2.2.dev1",
8
+ version="2.2.dev2",
9
9
  author="Koratahiu",
10
10
  author_email="hiuhonor@gmail.com",
11
11
  license='Apache 2.0',
File without changes
File without changes
File without changes