titans-pytorch 0.1.9__py3-none-any.whl → 0.1.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -593,11 +593,9 @@ class MemoryAsContextTransformer(Module):
593
593
  # apply axial positional embedding
594
594
  # so intra and inter segment can be more easily discerned by the network
595
595
 
596
- neural_mem_windows = ceil(seq_len_with_mem / neural_mem_segment_len)
596
+ pos_emb = self.axial_pos_emb.forward_with_seq_len(seq_len_with_mem, (neural_mem_segment_len,))
597
597
 
598
- pos_emb = self.axial_pos_emb((neural_mem_windows, neural_mem_segment_len), flatten = True)
599
-
600
- x = x + pos_emb[:seq_len_with_mem]
598
+ x = x + pos_emb
601
599
 
602
600
  # prep flex attention
603
601
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: titans-pytorch
3
- Version: 0.1.9
3
+ Version: 0.1.10
4
4
  Summary: Titans
5
5
  Project-URL: Homepage, https://pypi.org/project/titans-pytorch/
6
6
  Project-URL: Repository, https://github.com/lucidrains/titans-pytorch
@@ -35,7 +35,7 @@ Classifier: Programming Language :: Python :: 3.9
35
35
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
36
36
  Requires-Python: >=3.9
37
37
  Requires-Dist: accelerated-scan>=0.2.0
38
- Requires-Dist: axial-positional-embedding>=0.3.5
38
+ Requires-Dist: axial-positional-embedding>=0.3.6
39
39
  Requires-Dist: einops>=0.8.0
40
40
  Requires-Dist: einx>=0.3.0
41
41
  Requires-Dist: hyper-connections>=0.1.8
@@ -62,7 +62,7 @@ Unofficial implementation of [Titans](https://arxiv.org/abs/2501.00663) in Pytor
62
62
 
63
63
  ## Appreciation
64
64
 
65
- - [@sentialx](https://github.com/sentialx) for sharing his early experimental results with me
65
+ - [Eryk](https://github.com/sentialx) for sharing his early experimental results with me, positive for 2 layer MLP
66
66
 
67
67
  ## Install
68
68
 
@@ -0,0 +1,8 @@
1
+ titans_pytorch/__init__.py,sha256=u0tta_KqhOdfzCEDWT9P4_jejJEK2q1XxhsEzB5MnQU,223
2
+ titans_pytorch/associative_scan.py,sha256=Y-iYqmFuG-NoCKu6kgql1mhowXTeJfyawi3eUIXamp0,2650
3
+ titans_pytorch/mac_transformer.py,sha256=zxknstaI_Uz47Y8WvZ3S7geJ-TNdqKV5Rvj0Jlw8njs,19271
4
+ titans_pytorch/titans.py,sha256=gZvYk1j6aBMp0uE6l1a2GH_4ea9W2uXKytJb3CDPTlk,21162
5
+ titans_pytorch-0.1.10.dist-info/METADATA,sha256=o2D4Zau9GLBZmsj2qzq7agWckPnBJhDtIeTj2cMgy7Q,4769
6
+ titans_pytorch-0.1.10.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
7
+ titans_pytorch-0.1.10.dist-info/licenses/LICENSE,sha256=1yCiA9b5nhslTavxPjsQAO-wpOnwJR9-l8LTVi7GJuk,1066
8
+ titans_pytorch-0.1.10.dist-info/RECORD,,
@@ -1,8 +0,0 @@
1
- titans_pytorch/__init__.py,sha256=u0tta_KqhOdfzCEDWT9P4_jejJEK2q1XxhsEzB5MnQU,223
2
- titans_pytorch/associative_scan.py,sha256=Y-iYqmFuG-NoCKu6kgql1mhowXTeJfyawi3eUIXamp0,2650
3
- titans_pytorch/mac_transformer.py,sha256=hAKuyu-dfSD23hZkCJDPf3PmRSWouVr-XLIDkcDp1MU,19364
4
- titans_pytorch/titans.py,sha256=gZvYk1j6aBMp0uE6l1a2GH_4ea9W2uXKytJb3CDPTlk,21162
5
- titans_pytorch-0.1.9.dist-info/METADATA,sha256=J-sURfBOBbNykVWYRPE4RXayhVzEE5vcTStAoznuasM,4747
6
- titans_pytorch-0.1.9.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
7
- titans_pytorch-0.1.9.dist-info/licenses/LICENSE,sha256=1yCiA9b5nhslTavxPjsQAO-wpOnwJR9-l8LTVi7GJuk,1066
8
- titans_pytorch-0.1.9.dist-info/RECORD,,