PoPE-pytorch 0.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
PoPE_pytorch/PoPE.py ADDED
@@ -0,0 +1,146 @@
1
+ from __future__ import annotations
2
+ from collections import namedtuple
3
+ from math import pi
4
+
5
+ import torch
6
+ from torch import arange, cat, stack, is_tensor, Tensor
7
+ from torch.nn import Module, Parameter
8
+ from torch.amp import autocast
9
+
10
+ import torch.nn.functional as F
11
+
12
+ from einops import einsum, rearrange
13
+
14
+ from torch_einops_utils import slice_right_at_dim
15
+
16
+ # constants
17
+
18
+ PolarEmbedReturn = namedtuple('PolarEmbedReturn', ('freqs', 'bias'))
19
+
20
+ # helper functions
21
+
22
+ def exists(v):
23
+ return v is not None
24
+
25
+ def default(v, d):
26
+ return v if exists(v) else d
27
+
28
+ # applying pope to qk
29
+
30
+ @autocast('cuda', enabled = False)
31
+ def apply_pope_to_qk(
32
+ pope: PolarEmbedReturn,
33
+ q, k,
34
+ to_magnitude = F.softplus,
35
+ return_complex = False
36
+ ):
37
+ freqs, bias = pope
38
+
39
+ q_len, k_len, qk_dim, rotate_dim = q.shape[-2], k.shape[-2], q.shape[-1], freqs.shape[-1]
40
+
41
+ assert q_len <= k_len and rotate_dim <= qk_dim
42
+
43
+ is_partial_rotate = rotate_dim < qk_dim
44
+
45
+ if is_partial_rotate:
46
+ q, q_rest = q[..., :rotate_dim], q[..., rotate_dim:]
47
+ k, k_rest = k[..., :rotate_dim], k[..., rotate_dim:]
48
+
49
+ if return_complex:
50
+ q_rest = torch.polar(q_rest, torch.zeros_like(q_rest))
51
+ k_rest = torch.polar(k_rest, torch.zeros_like(k_rest))
52
+
53
+ if freqs.ndim == 3:
54
+ freqs = rearrange(freqs, 'b n d -> b 1 n d')
55
+
56
+ freqs_with_bias = freqs + rearrange(bias, 'h d -> h 1 d')
57
+
58
+ # convert q and k to polar magnitudes with activation
59
+
60
+ q, k = to_magnitude(q), to_magnitude(k)
61
+
62
+ # apply rotations
63
+
64
+ freqs = slice_right_at_dim(freqs, q_len, dim = -2)
65
+
66
+ if return_complex:
67
+ q = torch.polar(q, freqs)
68
+ else:
69
+ qcos, qsin = freqs.cos(), freqs.sin()
70
+ q = rearrange([q * qcos, q * qsin], 'two ... d -> ... (d two)')
71
+
72
+ # handle inference
73
+
74
+ if return_complex:
75
+ k = torch.polar(k, freqs_with_bias)
76
+ else:
77
+ kcos, ksin = freqs_with_bias.cos(), freqs_with_bias.sin()
78
+ k = rearrange([k * kcos, k * ksin], 'two ... d -> ... (d two)')
79
+
80
+ # concat
81
+
82
+ if is_partial_rotate:
83
+ q = cat((q, q_rest), dim = -1)
84
+ k = cat((k, k_rest), dim = -1)
85
+
86
+ return q, k
87
+
88
+ # main class
89
+
90
+ class PoPE(Module):
91
+ def __init__(
92
+ self,
93
+ dim,
94
+ *,
95
+ heads,
96
+ theta = 10000,
97
+ bias_uniform_init = False
98
+ ):
99
+ super().__init__()
100
+
101
+ # freqs
102
+
103
+ inv_freqs = theta ** -(arange(dim).float() / dim)
104
+ self.register_buffer('inv_freqs', inv_freqs)
105
+
106
+ # the learned bias on the keys
107
+
108
+ self.bias = Parameter(torch.zeros(heads, dim))
109
+
110
+ if bias_uniform_init:
111
+ self.bias.uniform_(-2. * pi, 0.)
112
+
113
+ # convenience
114
+
115
+ self.apply_pope_to_qk = staticmethod(apply_pope_to_qk)
116
+
117
+ @property
118
+ def device(self):
119
+ return self.inv_freqs.device
120
+
121
+ @autocast('cuda', enabled = False)
122
+ def forward(
123
+ self,
124
+ pos_or_seq_len: Tensor | int,
125
+ offset = 0
126
+ ):
127
+
128
+ # get positions depending on input
129
+
130
+ if is_tensor(pos_or_seq_len):
131
+ pos = pos_or_seq_len
132
+ else:
133
+ seq_len = pos_or_seq_len
134
+ pos = arange(seq_len, device = self.device, dtype = self.inv_freqs.dtype)
135
+
136
+ pos = pos + offset
137
+
138
+ # freqs
139
+
140
+ freqs = einsum(pos, self.inv_freqs, '... i, j -> ... i j')
141
+
142
+ # the bias, with clamping
143
+
144
+ bias = self.bias.clamp(-2. * pi, 0.)
145
+
146
+ return PolarEmbedReturn(freqs, bias)
@@ -0,0 +1,4 @@
1
+ from PoPE_pytorch.PoPE import (
2
+ PoPE,
3
+ apply_pope_to_qk
4
+ )
@@ -0,0 +1,97 @@
1
+ Metadata-Version: 2.4
2
+ Name: PoPE-pytorch
3
+ Version: 0.0.6
4
+ Summary: PoPE
5
+ Project-URL: Homepage, https://pypi.org/project/PoPE-pytorch/
6
+ Project-URL: Repository, https://github.com/lucidrains/PoPE-pytorch
7
+ Author-email: Phil Wang <lucidrains@gmail.com>
8
+ License: MIT License
9
+
10
+ Copyright (c) 2026 Phil Wang
11
+
12
+ Permission is hereby granted, free of charge, to any person obtaining a copy
13
+ of this software and associated documentation files (the "Software"), to deal
14
+ in the Software without restriction, including without limitation the rights
15
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
16
+ copies of the Software, and to permit persons to whom the Software is
17
+ furnished to do so, subject to the following conditions:
18
+
19
+ The above copyright notice and this permission notice shall be included in all
20
+ copies or substantial portions of the Software.
21
+
22
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
23
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
24
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
25
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
26
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
27
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
28
+ SOFTWARE.
29
+ License-File: LICENSE
30
+ Keywords: artificial intelligence,deep learning,positional embedding
31
+ Classifier: Development Status :: 4 - Beta
32
+ Classifier: Intended Audience :: Developers
33
+ Classifier: License :: OSI Approved :: MIT License
34
+ Classifier: Programming Language :: Python :: 3.10
35
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
36
+ Requires-Python: >=3.10
37
+ Requires-Dist: einops>=0.8.1
38
+ Requires-Dist: torch-einops-utils>=0.0.14
39
+ Requires-Dist: torch>=2.5
40
+ Provides-Extra: examples
41
+ Provides-Extra: test
42
+ Requires-Dist: pytest; extra == 'test'
43
+ Description-Content-Type: text/markdown
44
+
45
+ <img src="./pope.png" width="400px"></img>
46
+
47
+ ## PoPE-pytorch (wip)
48
+
49
+ Efficient implementation (and explorations) into [polar coordinate positional embedding (PoPE)](https://arxiv.org/abs/2509.10534) - from [Gopalakrishnan](https://agopal42.github.io/) et al. under Schmidhuber
50
+
51
+ ## Install
52
+
53
+ ```shell
54
+ $ pip install PoPE-pytorch
55
+ ```
56
+
57
+ ## Usage
58
+
59
+ ```python
60
+ import torch
61
+ from PoPE_pytorch import PoPE
62
+
63
+ # define pope
64
+
65
+ pope = PoPE(64, heads = 8)
66
+
67
+ # pass in sequence length
68
+
69
+ pos_embed = pope(1024)
70
+
71
+ # queries and keys in attention
72
+
73
+ q = torch.randn(1, 8, 1024, 64)
74
+ k = torch.randn(1, 8, 1024, 64)
75
+
76
+ # training
77
+
78
+ rotated_q, rotated_k = pope.apply_pope_to_qk(pos_embed, q, k)
79
+
80
+ # inference
81
+
82
+ rotated_q, rotated_k = pope.apply_pope_to_qk(pos_embed, q[..., -1:, :], k)
83
+ ```
84
+
85
+ ## Citations
86
+
87
+ ```bibtex
88
+ @misc{gopalakrishnan2025decouplingwhatwherepolar,
89
+ title = {Decoupling the "What" and "Where" With Polar Coordinate Positional Embeddings},
90
+ author = {Anand Gopalakrishnan and Robert Csordás and Jürgen Schmidhuber and Michael C. Mozer},
91
+ year = {2025},
92
+ eprint = {2509.10534},
93
+ archivePrefix = {arXiv},
94
+ primaryClass = {cs.LG},
95
+ url = {https://arxiv.org/abs/2509.10534},
96
+ }
97
+ ```
@@ -0,0 +1,6 @@
1
+ PoPE_pytorch/PoPE.py,sha256=2QeXhQM3-7SNUJ8EID1S3lpP-yKXQsWP0T2mkIGJaiU,3428
2
+ PoPE_pytorch/__init__.py,sha256=mVk9Y0nAmhMxlEQjZKY0OLmTKsz9oyMSqF7n5LoWcbg,65
3
+ pope_pytorch-0.0.6.dist-info/METADATA,sha256=it21ePxjcQQJvsQkxOkAsSsaPMGk-P7Mo6dwZHdoWSg,3270
4
+ pope_pytorch-0.0.6.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
5
+ pope_pytorch-0.0.6.dist-info/licenses/LICENSE,sha256=e6AOF7Z8EFdK3IdcL0x0fLw4cY7Q0d0kNR0o0TmBewM,1066
6
+ pope_pytorch-0.0.6.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.28.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Phil Wang
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.