metacontroller-pytorch 0.0.23__tar.gz → 0.0.24__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: metacontroller-pytorch
3
- Version: 0.0.23
3
+ Version: 0.0.24
4
4
  Summary: Transformer Metacontroller
5
5
  Project-URL: Homepage, https://pypi.org/project/metacontroller/
6
6
  Project-URL: Repository, https://github.com/lucidrains/metacontroller
@@ -39,8 +39,8 @@ Requires-Dist: discrete-continuous-embed-readout>=0.1.12
39
39
  Requires-Dist: einops>=0.8.1
40
40
  Requires-Dist: einx>=0.3.0
41
41
  Requires-Dist: loguru
42
- Requires-Dist: memmap-replay-buffer>=0.0.1
43
- Requires-Dist: torch-einops-utils>=0.0.7
42
+ Requires-Dist: memmap-replay-buffer>=0.0.23
43
+ Requires-Dist: torch-einops-utils>=0.0.16
44
44
  Requires-Dist: torch>=2.5
45
45
  Requires-Dist: x-evolution>=0.1.23
46
46
  Requires-Dist: x-mlps-pytorch
@@ -27,6 +27,7 @@ from discrete_continuous_embed_readout import Embed, Readout, EmbedAndReadout
27
27
  from assoc_scan import AssocScan
28
28
 
29
29
  from torch_einops_utils import pad_at_dim
30
+ from torch_einops_utils.save_load import save_load
30
31
 
31
32
  # constants
32
33
 
@@ -63,6 +64,7 @@ MetaControllerOutput = namedtuple('MetaControllerOutput', (
63
64
  'switch_loss'
64
65
  ))
65
66
 
67
+ @save_load()
66
68
  class MetaController(Module):
67
69
  def __init__(
68
70
  self,
@@ -272,6 +274,7 @@ TransformerOutput = namedtuple('TransformerOutput', (
272
274
  'prev_hiddens'
273
275
  ))
274
276
 
277
+ @save_load()
275
278
  class Transformer(Module):
276
279
  def __init__(
277
280
  self,
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "metacontroller-pytorch"
3
- version = "0.0.23"
3
+ version = "0.0.24"
4
4
  description = "Transformer Metacontroller"
5
5
  authors = [
6
6
  { name = "Phil Wang", email = "lucidrains@gmail.com" }
@@ -29,9 +29,9 @@ dependencies = [
29
29
  "einx>=0.3.0",
30
30
  "einops>=0.8.1",
31
31
  "loguru",
32
- "memmap-replay-buffer>=0.0.1",
32
+ "memmap-replay-buffer>=0.0.23",
33
33
  "torch>=2.5",
34
- "torch-einops-utils>=0.0.7",
34
+ "torch-einops-utils>=0.0.16",
35
35
  "x-evolution>=0.1.23",
36
36
  "x-mlps-pytorch",
37
37
  "x-transformers"
@@ -1,6 +1,8 @@
1
1
  import pytest
2
2
  param = pytest.mark.parametrize
3
3
 
4
+ from pathlib import Path
5
+
4
6
  import torch
5
7
  from metacontroller.metacontroller import Transformer, MetaController
6
8
 
@@ -68,3 +70,16 @@ def test_metacontroller(
68
70
 
69
71
  model.meta_controller = meta_controller
70
72
  model.evolve(1, lambda _: 1., noise_population_size = 2)
73
+
74
+ # saving and loading
75
+
76
+ meta_controller.save('./meta_controller.pt')
77
+
78
+ rehydrated_meta_controller = MetaController.init_and_load('./meta_controller.pt')
79
+
80
+ model.save('./trained.pt')
81
+
82
+ rehydrated_model = Transformer.init_and_load('./trained.pt', strict = False)
83
+
84
+ Path('./meta_controller.pt').unlink()
85
+ Path('./trained.pt').unlink()
@@ -3,7 +3,7 @@
3
3
  # "fire",
4
4
  # "gymnasium",
5
5
  # "gymnasium[other]",
6
- # "memmap-replay-buffer>=0.0.10",
6
+ # "memmap-replay-buffer>=0.0.12",
7
7
  # "metacontroller-pytorch",
8
8
  # "minigrid",
9
9
  # "tqdm"