titans-pytorch 0.3.14__tar.gz → 0.3.15__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: titans-pytorch
3
- Version: 0.3.14
3
+ Version: 0.3.15
4
4
  Summary: Titans
5
5
  Project-URL: Homepage, https://pypi.org/project/titans-pytorch/
6
6
  Project-URL: Repository, https://github.com/lucidrains/titans-pytorch
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "titans-pytorch"
3
- version = "0.3.14"
3
+ version = "0.3.15"
4
4
  description = "Titans"
5
5
  authors = [
6
6
  { name = "Phil Wang", email = "lucidrains@gmail.com" }
@@ -494,7 +494,6 @@ class MemoryAsContextTransformer(Module):
494
494
  sliding_window_attn = False,
495
495
  neural_mem_weight_residual = False,
496
496
  token_emb: Module | None = None,
497
- abs_pos_emb: Module | None = None
498
497
  ):
499
498
  super().__init__()
500
499
 
@@ -503,10 +502,9 @@ class MemoryAsContextTransformer(Module):
503
502
 
504
503
  self.token_emb = token_emb
505
504
 
506
- if not exists(abs_pos_emb):
507
- abs_pos_emb = ContinuousAxialPositionalEmbedding(dim = dim, num_axial_dims = 2)
505
+ # absolute positions
508
506
 
509
- self.abs_pos_emb = abs_pos_emb
507
+ self.axial_pos_emb = ContinuousAxialPositionalEmbedding(dim = dim, num_axial_dims = 2)
510
508
 
511
509
  # long term mem tokens
512
510
 
File without changes