metacontroller-pytorch 0.0.22__tar.gz → 0.0.24__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metacontroller-pytorch might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: metacontroller-pytorch
3
- Version: 0.0.22
3
+ Version: 0.0.24
4
4
  Summary: Transformer Metacontroller
5
5
  Project-URL: Homepage, https://pypi.org/project/metacontroller/
6
6
  Project-URL: Repository, https://github.com/lucidrains/metacontroller
@@ -39,7 +39,8 @@ Requires-Dist: discrete-continuous-embed-readout>=0.1.12
39
39
  Requires-Dist: einops>=0.8.1
40
40
  Requires-Dist: einx>=0.3.0
41
41
  Requires-Dist: loguru
42
- Requires-Dist: memmap-replay-buffer>=0.0.1
42
+ Requires-Dist: memmap-replay-buffer>=0.0.23
43
+ Requires-Dist: torch-einops-utils>=0.0.16
43
44
  Requires-Dist: torch>=2.5
44
45
  Requires-Dist: x-evolution>=0.1.23
45
46
  Requires-Dist: x-mlps-pytorch
@@ -26,6 +26,9 @@ from discrete_continuous_embed_readout import Embed, Readout, EmbedAndReadout
26
26
 
27
27
  from assoc_scan import AssocScan
28
28
 
29
+ from torch_einops_utils import pad_at_dim
30
+ from torch_einops_utils.save_load import save_load
31
+
29
32
  # constants
30
33
 
31
34
  LinearNoBias = partial(Linear, bias = False)
@@ -46,14 +49,6 @@ def default(*args):
46
49
  return arg
47
50
  return None
48
51
 
49
- def pad_at_dim(t, pad: tuple[int, int], dim = -1, value = 0.):
50
- if pad == (0, 0):
51
- return t
52
-
53
- dims_from_right = (- dim - 1) if dim < 0 else (t.ndim - dim - 1)
54
- zeros = ((0, 0) * dims_from_right)
55
- return F.pad(t, (*zeros, *pad), value = value)
56
-
57
52
  # tensor helpers
58
53
 
59
54
  def straight_through(src, tgt):
@@ -69,6 +64,7 @@ MetaControllerOutput = namedtuple('MetaControllerOutput', (
69
64
  'switch_loss'
70
65
  ))
71
66
 
67
+ @save_load()
72
68
  class MetaController(Module):
73
69
  def __init__(
74
70
  self,
@@ -278,6 +274,7 @@ TransformerOutput = namedtuple('TransformerOutput', (
278
274
  'prev_hiddens'
279
275
  ))
280
276
 
277
+ @save_load()
281
278
  class Transformer(Module):
282
279
  def __init__(
283
280
  self,
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "metacontroller-pytorch"
3
- version = "0.0.22"
3
+ version = "0.0.24"
4
4
  description = "Transformer Metacontroller"
5
5
  authors = [
6
6
  { name = "Phil Wang", email = "lucidrains@gmail.com" }
@@ -29,8 +29,9 @@ dependencies = [
29
29
  "einx>=0.3.0",
30
30
  "einops>=0.8.1",
31
31
  "loguru",
32
- "memmap-replay-buffer>=0.0.1",
32
+ "memmap-replay-buffer>=0.0.23",
33
33
  "torch>=2.5",
34
+ "torch-einops-utils>=0.0.16",
34
35
  "x-evolution>=0.1.23",
35
36
  "x-mlps-pytorch",
36
37
  "x-transformers"
@@ -1,6 +1,8 @@
1
1
  import pytest
2
2
  param = pytest.mark.parametrize
3
3
 
4
+ from pathlib import Path
5
+
4
6
  import torch
5
7
  from metacontroller.metacontroller import Transformer, MetaController
6
8
 
@@ -68,3 +70,16 @@ def test_metacontroller(
68
70
 
69
71
  model.meta_controller = meta_controller
70
72
  model.evolve(1, lambda _: 1., noise_population_size = 2)
73
+
74
+ # saving and loading
75
+
76
+ meta_controller.save('./meta_controller.pt')
77
+
78
+ rehydrated_meta_controller = MetaController.init_and_load('./meta_controller.pt')
79
+
80
+ model.save('./trained.pt')
81
+
82
+ rehydrated_model = Transformer.init_and_load('./trained.pt', strict = False)
83
+
84
+ Path('./meta_controller.pt').unlink()
85
+ Path('./trained.pt').unlink()
@@ -3,7 +3,7 @@
3
3
  # "fire",
4
4
  # "gymnasium",
5
5
  # "gymnasium[other]",
6
- # "memmap-replay-buffer>=0.0.10",
6
+ # "memmap-replay-buffer>=0.0.12",
7
7
  # "metacontroller-pytorch",
8
8
  # "minigrid",
9
9
  # "tqdm"