heavyball 0.24.1__tar.gz → 0.24.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {heavyball-0.24.1 → heavyball-0.24.2}/PKG-INFO +1 -1
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/utils.py +9 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball.egg-info/PKG-INFO +1 -1
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball.egg-info/SOURCES.txt +1 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/setup.py +1 -1
- heavyball-0.24.2/test/test_hook.py +51 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/LICENSE +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/README.md +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/__init__.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/cached_delayed_psgd_kron.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/cached_psgd_kron.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/delayed_psgd.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/foreach_adamw.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/foreach_adopt.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/foreach_laprop.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/foreach_sfadamw.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/foreach_soap.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/p_adam.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/palm_foreach_sfadamw.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/palm_foreach_soap.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/precond_schedule_foreach_soap.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/precond_schedule_palm_foreach_soap.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/precond_schedule_sfpsoap.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/psgd_kron.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/pure_psgd.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball/schedule_free_palm_foreach_soap.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball.egg-info/dependency_links.txt +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball.egg-info/requires.txt +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/heavyball.egg-info/top_level.txt +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/setup.cfg +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/test/test_bf16_params.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/test/test_bf16_q.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/test/test_bf16_storage.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/test/test_caution.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/test/test_channels_last.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/test/test_closure.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/test/test_ema.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/test/test_foreach.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/test/test_mars.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/test/test_memory.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/test/test_merge.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/test/test_no_grad.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/test/test_psgd.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/test/test_soap.py +0 -0
- {heavyball-0.24.1 → heavyball-0.24.2}/test/test_stochastic_updates.py +0 -0
@@ -1129,3 +1129,12 @@ def merge_group(group, *tensors):
|
|
1129
1129
|
append_or_extend(out, dim_merger(t, group['max_size_triangular'] if 'max_size_triangular' in group else group[
|
1130
1130
|
'max_precond_dim'], group.get('split', False)))
|
1131
1131
|
return out
|
1132
|
+
|
1133
|
+
def hook_optimizer_into_model(model, optimizer, *args, **kwargs):
|
1134
|
+
def _step(p: Tensor, o: torch.optim.Optimizer):
|
1135
|
+
o.step()
|
1136
|
+
o.zero_grad()
|
1137
|
+
|
1138
|
+
|
1139
|
+
for p in model.parameters():
|
1140
|
+
p.register_post_accumulate_grad_hook(functools.partial(_step, o=optimizer([p], *args, **kwargs)))
|
@@ -0,0 +1,51 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
os.environ["TORCH_LOGS"] = "+recompiles"
|
4
|
+
|
5
|
+
import heavyball
|
6
|
+
import heavyball.utils
|
7
|
+
import pytest
|
8
|
+
import torch
|
9
|
+
from benchmark.utils import get_optim
|
10
|
+
from heavyball.utils import clean, set_torch, hook_optimizer_into_model
|
11
|
+
from torch import nn
|
12
|
+
from torch._dynamo import config
|
13
|
+
|
14
|
+
heavyball.utils.compile_mode = 'default'
|
15
|
+
config.cache_size_limit = 128
|
16
|
+
|
17
|
+
|
18
|
+
@pytest.mark.parametrize("opt", heavyball.__all__)
|
19
|
+
@pytest.mark.parametrize("size,depth", [(128, 1)])
|
20
|
+
def test_foreach(opt, size, depth: int, iterations: int = 128, outer_iterations: int = 1):
|
21
|
+
set_torch()
|
22
|
+
opt = getattr(heavyball, opt)
|
23
|
+
|
24
|
+
peaks = []
|
25
|
+
losses = []
|
26
|
+
|
27
|
+
for use_hook in [False, True]:
|
28
|
+
torch.manual_seed(0x2131290)
|
29
|
+
peaks.append([])
|
30
|
+
losses.append([])
|
31
|
+
|
32
|
+
for i in range(outer_iterations):
|
33
|
+
model = nn.Sequential(*[nn.Linear(size, size) for _ in range(depth)]).cuda()
|
34
|
+
|
35
|
+
if use_hook:
|
36
|
+
hook_optimizer_into_model(model, opt, lr=1e-3, weight_decay=1e-4, warmup_steps=16)
|
37
|
+
else:
|
38
|
+
o = get_optim(opt, model.parameters(), lr=1e-3, weight_decay=1e-4, warmup_steps=16)
|
39
|
+
for _ in range(iterations):
|
40
|
+
loss = model(torch.randn((1024, size), device='cuda')).square().mean()
|
41
|
+
loss.backward()
|
42
|
+
if not use_hook:
|
43
|
+
o.step()
|
44
|
+
o.zero_grad()
|
45
|
+
losses[-1].append(loss.detach())
|
46
|
+
|
47
|
+
clean()
|
48
|
+
|
49
|
+
for i, (l0, l1) in enumerate(zip(*losses)):
|
50
|
+
print(i, l0.item(), l1.item())
|
51
|
+
assert torch.allclose(l0.float(), l1.float(), rtol=0.1)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|