hippoformer 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
File without changes
@@ -0,0 +1,116 @@
1
+ from __future__ import annotations
2
+
3
+ import torch
4
+ from torch import nn, Tensor, stack, einsum
5
+ import torch.nn.functional as F
6
+ from torch.nn import Module
7
+ from torch.jit import ScriptModule, script_method
8
+
9
+ from einops import repeat, rearrange
10
+ from einops.layers.torch import Rearrange
11
+
12
+ from x_mlps_pytorch import create_mlp
13
+
14
+ from assoc_scan import AssocScan
15
+
16
+ # helpers
17
+
18
+ def exists(v):
19
+ return v is not None
20
+
21
+ def default(v, d):
22
+ return v if exists(v) else d
23
+
24
+ def l2norm(t):
25
+ return F.normalize(t, dim = -1)
26
+
27
+ # path integration
28
+
29
+ class RNN(ScriptModule):
30
+ def __init__(
31
+ self,
32
+ dim,
33
+ ):
34
+ super().__init__()
35
+ self.init_hidden = nn.Parameter(torch.randn(1, dim) * 1e-2)
36
+
37
+ @script_method
38
+ def forward(
39
+ self,
40
+ transitions: Tensor,
41
+ hidden: Tensor | None = None
42
+ ) -> Tensor:
43
+
44
+ batch, seq_len = transitions.shape[:2]
45
+
46
+ if hidden is None:
47
+ hidden = l2norm(self.init_hidden)
48
+ hidden = hidden.expand(batch, -1)
49
+
50
+ hiddens: list[Tensor] = []
51
+
52
+ for i in range(seq_len):
53
+ transition = transitions[:, i]
54
+
55
+ hidden = einsum('b i, b i j -> b j', hidden, transition)
56
+ hidden = F.relu(hidden)
57
+ hidden = l2norm(hidden)
58
+
59
+ hiddens.append(hidden)
60
+
61
+ return stack(hiddens, dim = 1)
62
+
63
+ class PathIntegration(Module):
64
+ def __init__(
65
+ self,
66
+ dim_action,
67
+ dim_structure,
68
+ mlp_hidden_dim = None,
69
+ mlp_depth = 2
70
+ ):
71
+ # they use the same approach from Ruiqi Gao's paper from 2021
72
+ super().__init__()
73
+
74
+ self.init_structure = nn.Parameter(torch.randn(dim_structure))
75
+
76
+ self.to_transitions = create_mlp(
77
+ default(mlp_hidden_dim, dim_action * 4),
78
+ dim_in = dim_action,
79
+ dim_out = dim_structure * dim_structure,
80
+ depth = mlp_depth
81
+ )
82
+
83
+ self.mlp_out_to_weights = Rearrange('... (i j) -> ... i j', j = dim_structure)
84
+
85
+ self.rnn = RNN(dim_structure)
86
+
87
+ def forward(
88
+ self,
89
+ actions, # (b n d)
90
+ prev_structural = None # (b n d) | (b d)
91
+ ):
92
+ batch = actions.shape[0]
93
+
94
+ transitions = self.to_transitions(actions)
95
+ transitions = self.mlp_out_to_weights(transitions)
96
+
97
+ if exists(prev_structural) and prev_structural.ndim == 3:
98
+ prev_structural = prev_structural[:, -1]
99
+
100
+ return self.rnn(transitions, prev_structural)
101
+
102
+ # proposed mmTEM
103
+
104
+ class mmTEM(Module):
105
+ def __init__(
106
+ self,
107
+ dim
108
+ ):
109
+ super().__init__()
110
+
111
+
112
+ def forward(
113
+ self,
114
+ data
115
+ ):
116
+ raise NotImplementedError
@@ -0,0 +1,65 @@
1
+ Metadata-Version: 2.4
2
+ Name: hippoformer
3
+ Version: 0.0.1
4
+ Summary: hippoformer
5
+ Project-URL: Homepage, https://pypi.org/project/hippoformer/
6
+ Project-URL: Repository, https://github.com/lucidrains/hippoformer
7
+ Author-email: Phil Wang <lucidrains@gmail.com>
8
+ License: MIT License
9
+
10
+ Copyright (c) 2025 Phil Wang
11
+
12
+ Permission is hereby granted, free of charge, to any person obtaining a copy
13
+ of this software and associated documentation files (the "Software"), to deal
14
+ in the Software without restriction, including without limitation the rights
15
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
16
+ copies of the Software, and to permit persons to whom the Software is
17
+ furnished to do so, subject to the following conditions:
18
+
19
+ The above copyright notice and this permission notice shall be included in all
20
+ copies or substantial portions of the Software.
21
+
22
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
23
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
24
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
25
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
26
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
27
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
28
+ SOFTWARE.
29
+ License-File: LICENSE
30
+ Keywords: artificial intelligence,deep learning,hippocampus,memory
31
+ Classifier: Development Status :: 4 - Beta
32
+ Classifier: Intended Audience :: Developers
33
+ Classifier: License :: OSI Approved :: MIT License
34
+ Classifier: Programming Language :: Python :: 3.9
35
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
36
+ Requires-Python: >=3.9
37
+ Requires-Dist: assoc-scan
38
+ Requires-Dist: einops>=0.8.1
39
+ Requires-Dist: torch>=2.4
40
+ Requires-Dist: x-mlps-pytorch
41
+ Provides-Extra: examples
42
+ Provides-Extra: test
43
+ Requires-Dist: pytest; extra == 'test'
44
+ Description-Content-Type: text/markdown
45
+
46
+ <img src="./hippoformer-fig6.png" width="400px"></img>
47
+
48
+ ## Hippoformer (wip)
49
+
50
+ Implementation of [Hippoformer](https://openreview.net/forum?id=hxwV5EubAw), Integrating Hippocampus-inspired Spatial Memory with Transformers
51
+
52
+ [Temporary Discord](https://discord.gg/MkACrrkrYR)
53
+
54
+ ## Citations
55
+
56
+ ```bibtex
57
+ @inproceedings{anonymous2025hippoformer,
58
+ title = {Hippoformer: Integrating Hippocampus-inspired Spatial Memory with Transformers},
59
+ author = {Anonymous},
60
+ booktitle = {Submitted to The Fourteenth International Conference on Learning Representations},
61
+ year = {2025},
62
+ url = {https://openreview.net/forum?id=hxwV5EubAw},
63
+ note = {under review}
64
+ }
65
+ ```
@@ -0,0 +1,6 @@
1
+ hippoformer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ hippoformer/hippoformer.py,sha256=6tA4ZWYKbzclpeTUhJtr2OguVOyyAGFxuLf9bfnfO_M,2682
3
+ hippoformer-0.0.1.dist-info/METADATA,sha256=4hnfh1oIIlcGsIQ7qD7fZHWfM5ltnHhATAPcN-4vkxQ,2773
4
+ hippoformer-0.0.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
5
+ hippoformer-0.0.1.dist-info/licenses/LICENSE,sha256=1yCiA9b5nhslTavxPjsQAO-wpOnwJR9-l8LTVi7GJuk,1066
6
+ hippoformer-0.0.1.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.27.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Phil Wang
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.