gradboard 0.2.0__tar.gz → 1.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gradboard might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: gradboard
3
- Version: 0.2.0
3
+ Version: 1.0.1
4
4
  Summary: Easily snowboard down gnarly loss gradients
5
5
  License: MIT
6
6
  Author: Nicholas Bailey
@@ -25,9 +25,7 @@ class AdamS(Optimizer):
25
25
  weight_decay (float, optional): weight decay coefficient (default: 1e-4)
26
26
  """
27
27
 
28
- def __init__(
29
- self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, weight_decay=1e-4
30
- ):
28
+ def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, weight_decay=0.1):
31
29
  if not 0.0 <= lr:
32
30
  raise ValueError("Invalid learning rate: {}".format(lr))
33
31
  if not 0.0 <= eps:
@@ -38,6 +36,7 @@ class AdamS(Optimizer):
38
36
  raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1]))
39
37
  if not 0.0 <= weight_decay:
40
38
  raise ValueError("Invalid weight_decay value: {}".format(weight_decay))
39
+ weight_decay *= lr
41
40
  defaults = {"lr": lr, "betas": betas, "eps": eps, "weight_decay": weight_decay}
42
41
  super().__init__(params, defaults)
43
42
 
@@ -130,7 +129,7 @@ class AdamS(Optimizer):
130
129
  return loss
131
130
 
132
131
 
133
- def get_optimiser(model, optimiser=AdamW, lr=7e-4, weight_decay=5e-2):
132
+ def get_optimiser(model, optimiser=AdamW, lr=1e-3, weight_decay=1e-2):
134
133
  """
135
134
  Defaults are from one of the presets from the accompanying repo to Hassani
136
135
  et al. (2023) "Escaping the Big Data Paradigm with Compact Transformers",
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "gradboard"
3
- version = "0.2.0"
3
+ version = "1.0.1"
4
4
  description = "Easily snowboard down gnarly loss gradients"
5
5
  authors = [
6
6
  {name = "Nicholas Bailey"}
File without changes
File without changes
File without changes