gradboard 3.0.0__py3-none-any.whl → 4.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gradboard might be problematic. Click here for more details.

gradboard/scheduler.py CHANGED
@@ -6,8 +6,6 @@ from typing import Optional
6
6
  import copy
7
7
  import math
8
8
 
9
- from scipy.ndimage import gaussian_filter1d
10
-
11
9
  from torch.amp import GradScaler
12
10
 
13
11
  from .cycles import Cycle
@@ -25,11 +23,12 @@ class PASS:
25
23
  optimiser,
26
24
  scaler: Optional[GradScaler] = None,
27
25
  range_test: bool = False,
28
- max_lr: float = None,
29
26
  cool_point_multiplier: float = 1 / 60,
30
27
  ):
31
- if not range_test:
32
- assert max_lr is not None
28
+ """
29
+ If not using range test, we assume the optimiser has the learning rates
30
+ set as desired.
31
+ """
33
32
 
34
33
  self.model = model
35
34
  self.optimiser = optimiser
@@ -41,7 +40,6 @@ class PASS:
41
40
 
42
41
  self.original_param_groups = copy.deepcopy(optimiser.param_groups)
43
42
 
44
- self.max_lr = max_lr
45
43
  self.cool_point_multiplier = cool_point_multiplier
46
44
 
47
45
  self.original_states = self._saved_states()
@@ -137,8 +135,7 @@ class PASS:
137
135
  learning_rates = [t[0] for t in range_test_results]
138
136
  losses = [t[1] for t in self.range_test_results]
139
137
  losses = losses[:-1] + [10 * max(losses)]
140
- smoothed_losses = gaussian_filter1d(losses, 3)
141
- return list(zip(learning_rates, smoothed_losses, strict=True))
138
+ return list(zip(learning_rates, losses, strict=True))
142
139
 
143
140
  def _plot_range_test(self, range_test_results):
144
141
  """
@@ -160,15 +157,15 @@ class PASS:
160
157
  points_left_of_min = [r for r in range_test_results if r[0] < minimum[0]]
161
158
  max_left_of_min = max(points_left_of_min, key=lambda x: x[1])
162
159
  difference = max_left_of_min[1] - minimum[1]
163
- self.max_lr = None
160
+ max_lr = None
164
161
  for p in sorted(points_left_of_min, key=lambda x: x[0]):
165
- if (self.max_lr is None) and (p[1] < minimum[1] + 0.2 * difference):
166
- self.max_lr = p[0]
162
+ if (max_lr is None) and (p[1] < minimum[1] + 0.2 * difference):
163
+ max_lr = p[0]
167
164
  else:
168
165
  continue
169
- self.set_all_lr(self.max_lr)
166
+ self.set_all_lr(max_lr)
170
167
  self.original_param_groups = copy.deepcopy(self.optimiser.param_groups)
171
- print("High LR", self.max_lr)
168
+ print("High LR", max_lr)
172
169
 
173
170
  def update_learning_rates(self):
174
171
  if not self.finished:
@@ -1,17 +1,18 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: gradboard
3
- Version: 3.0.0
3
+ Version: 4.0.0
4
4
  Summary: Easily snowboard down gnarly loss gradients
5
5
  License: MIT
6
6
  Author: Nicholas Bailey
7
- Requires-Python: >=3.11
7
+ Requires-Python: >=3.8
8
8
  Classifier: License :: OSI Approved :: MIT License
9
9
  Classifier: Programming Language :: Python :: 3
10
+ Classifier: Programming Language :: Python :: 3.8
11
+ Classifier: Programming Language :: Python :: 3.9
12
+ Classifier: Programming Language :: Python :: 3.10
10
13
  Classifier: Programming Language :: Python :: 3.11
11
14
  Classifier: Programming Language :: Python :: 3.12
12
15
  Classifier: Programming Language :: Python :: 3.13
13
- Requires-Dist: numpy (>=2.0.2,<3.0.0)
14
- Requires-Dist: scipy (>=1.15.3,<2.0.0)
15
16
  Description-Content-Type: text/markdown
16
17
 
17
18
  # gradboard
@@ -0,0 +1,8 @@
1
+ gradboard/__init__.py,sha256=57AkHusYwLCsusiVnajH5pMFKioRCj-3IjF9qpdOzE0,69
2
+ gradboard/cycles.py,sha256=iGEW3Rlp-JNyQZLfpaDXxUCAcEV01ANjLF-Fnhug-qA,10120
3
+ gradboard/optimiser.py,sha256=Br7MNiziiwQZhq6UWkBy1zndRuDFznNfrE91usguZHI,6168
4
+ gradboard/scheduler.py,sha256=VbtqSBtSpgLI7qbtz0uGgNmb1NtR77r_Ole9wmdVMPw,6859
5
+ gradboard-4.0.0.dist-info/LICENSE,sha256=0BAzJE5BqQ7Iixp_AFdB2W1uO-HCRX-Qfun8PHt6yVM,1073
6
+ gradboard-4.0.0.dist-info/METADATA,sha256=tDj9jiFskMQygTAZTpTF_OL1ntLLKHiac7_3mbsXhfw,2246
7
+ gradboard-4.0.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
8
+ gradboard-4.0.0.dist-info/RECORD,,
@@ -1,8 +0,0 @@
1
- gradboard/__init__.py,sha256=57AkHusYwLCsusiVnajH5pMFKioRCj-3IjF9qpdOzE0,69
2
- gradboard/cycles.py,sha256=iGEW3Rlp-JNyQZLfpaDXxUCAcEV01ANjLF-Fnhug-qA,10120
3
- gradboard/optimiser.py,sha256=Br7MNiziiwQZhq6UWkBy1zndRuDFznNfrE91usguZHI,6168
4
- gradboard/scheduler.py,sha256=sELx3LdbF_ZiD3HDtxOPIR3xLJHD1Vv7IXgJGYuYdJM,6985
5
- gradboard-3.0.0.dist-info/LICENSE,sha256=0BAzJE5BqQ7Iixp_AFdB2W1uO-HCRX-Qfun8PHt6yVM,1073
6
- gradboard-3.0.0.dist-info/METADATA,sha256=YazOjmOo5Fot5xtGvQSyH600yKfUGsvr0j8NKFkZu0U,2173
7
- gradboard-3.0.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
8
- gradboard-3.0.0.dist-info/RECORD,,