x-transformers 1.32.12__tar.gz → 1.32.14__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (21) hide show
  1. {x_transformers-1.32.12/x_transformers.egg-info → x_transformers-1.32.14}/PKG-INFO +1 -1
  2. {x_transformers-1.32.12 → x_transformers-1.32.14}/setup.py +1 -1
  3. {x_transformers-1.32.12 → x_transformers-1.32.14}/x_transformers/x_transformers.py +7 -2
  4. {x_transformers-1.32.12 → x_transformers-1.32.14/x_transformers.egg-info}/PKG-INFO +1 -1
  5. {x_transformers-1.32.12 → x_transformers-1.32.14}/LICENSE +0 -0
  6. {x_transformers-1.32.12 → x_transformers-1.32.14}/README.md +0 -0
  7. {x_transformers-1.32.12 → x_transformers-1.32.14}/setup.cfg +0 -0
  8. {x_transformers-1.32.12 → x_transformers-1.32.14}/tests/test_x_transformers.py +0 -0
  9. {x_transformers-1.32.12 → x_transformers-1.32.14}/x_transformers/__init__.py +0 -0
  10. {x_transformers-1.32.12 → x_transformers-1.32.14}/x_transformers/attend.py +0 -0
  11. {x_transformers-1.32.12 → x_transformers-1.32.14}/x_transformers/autoregressive_wrapper.py +0 -0
  12. {x_transformers-1.32.12 → x_transformers-1.32.14}/x_transformers/continuous.py +0 -0
  13. {x_transformers-1.32.12 → x_transformers-1.32.14}/x_transformers/dpo.py +0 -0
  14. {x_transformers-1.32.12 → x_transformers-1.32.14}/x_transformers/multi_input.py +0 -0
  15. {x_transformers-1.32.12 → x_transformers-1.32.14}/x_transformers/nonautoregressive_wrapper.py +0 -0
  16. {x_transformers-1.32.12 → x_transformers-1.32.14}/x_transformers/xl_autoregressive_wrapper.py +0 -0
  17. {x_transformers-1.32.12 → x_transformers-1.32.14}/x_transformers/xval.py +0 -0
  18. {x_transformers-1.32.12 → x_transformers-1.32.14}/x_transformers.egg-info/SOURCES.txt +0 -0
  19. {x_transformers-1.32.12 → x_transformers-1.32.14}/x_transformers.egg-info/dependency_links.txt +0 -0
  20. {x_transformers-1.32.12 → x_transformers-1.32.14}/x_transformers.egg-info/requires.txt +0 -0
  21. {x_transformers-1.32.12 → x_transformers-1.32.14}/x_transformers.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.32.12
3
+ Version: 1.32.14
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
  setup(
4
4
  name = 'x-transformers',
5
5
  packages = find_packages(exclude=['examples']),
6
- version = '1.32.12',
6
+ version = '1.32.14',
7
7
  license='MIT',
8
8
  description = 'X-Transformers - Pytorch',
9
9
  author = 'Phil Wang',
@@ -1925,7 +1925,8 @@ class TransformerWrapper(Module):
1925
1925
  attn_z_loss_weight = 1e-4,
1926
1926
  average_pool_embed = False,
1927
1927
  use_cls_token = False,
1928
- squeeze_out_last_dim = False
1928
+ squeeze_out_last_dim = False,
1929
+ token_emb: TokenEmbedding | None = None,
1929
1930
  ):
1930
1931
  super().__init__()
1931
1932
 
@@ -1939,7 +1940,11 @@ class TransformerWrapper(Module):
1939
1940
  self.shift_mem_down = shift_mem_down
1940
1941
 
1941
1942
  self.l2norm_embed = l2norm_embed
1942
- self.token_emb = TokenEmbedding(emb_dim, num_tokens, l2norm_embed = l2norm_embed)
1943
+
1944
+ if not exists(token_emb):
1945
+ token_emb = TokenEmbedding(emb_dim, num_tokens, l2norm_embed = l2norm_embed)
1946
+
1947
+ self.token_emb = token_emb
1943
1948
 
1944
1949
  no_abs_pos_emb = max_seq_len == 0 or not (use_abs_pos_emb and not attn_layers.disable_abs_pos_emb)
1945
1950
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.32.12
3
+ Version: 1.32.14
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang