x-transformers 1.35.2__tar.gz → 1.35.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (21) hide show
  1. {x_transformers-1.35.2/x_transformers.egg-info → x_transformers-1.35.3}/PKG-INFO +1 -1
  2. {x_transformers-1.35.2 → x_transformers-1.35.3}/setup.py +1 -1
  3. {x_transformers-1.35.2 → x_transformers-1.35.3}/x_transformers/x_transformers.py +6 -1
  4. {x_transformers-1.35.2 → x_transformers-1.35.3/x_transformers.egg-info}/PKG-INFO +1 -1
  5. {x_transformers-1.35.2 → x_transformers-1.35.3}/LICENSE +0 -0
  6. {x_transformers-1.35.2 → x_transformers-1.35.3}/README.md +0 -0
  7. {x_transformers-1.35.2 → x_transformers-1.35.3}/setup.cfg +0 -0
  8. {x_transformers-1.35.2 → x_transformers-1.35.3}/tests/test_x_transformers.py +0 -0
  9. {x_transformers-1.35.2 → x_transformers-1.35.3}/x_transformers/__init__.py +0 -0
  10. {x_transformers-1.35.2 → x_transformers-1.35.3}/x_transformers/attend.py +0 -0
  11. {x_transformers-1.35.2 → x_transformers-1.35.3}/x_transformers/autoregressive_wrapper.py +0 -0
  12. {x_transformers-1.35.2 → x_transformers-1.35.3}/x_transformers/continuous.py +0 -0
  13. {x_transformers-1.35.2 → x_transformers-1.35.3}/x_transformers/dpo.py +0 -0
  14. {x_transformers-1.35.2 → x_transformers-1.35.3}/x_transformers/multi_input.py +0 -0
  15. {x_transformers-1.35.2 → x_transformers-1.35.3}/x_transformers/nonautoregressive_wrapper.py +0 -0
  16. {x_transformers-1.35.2 → x_transformers-1.35.3}/x_transformers/xl_autoregressive_wrapper.py +0 -0
  17. {x_transformers-1.35.2 → x_transformers-1.35.3}/x_transformers/xval.py +0 -0
  18. {x_transformers-1.35.2 → x_transformers-1.35.3}/x_transformers.egg-info/SOURCES.txt +0 -0
  19. {x_transformers-1.35.2 → x_transformers-1.35.3}/x_transformers.egg-info/dependency_links.txt +0 -0
  20. {x_transformers-1.35.2 → x_transformers-1.35.3}/x_transformers.egg-info/requires.txt +0 -0
  21. {x_transformers-1.35.2 → x_transformers-1.35.3}/x_transformers.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.35.2
3
+ Version: 1.35.3
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
  setup(
4
4
  name = 'x-transformers',
5
5
  packages = find_packages(exclude=['examples']),
6
- version = '1.35.2',
6
+ version = '1.35.3',
7
7
  license='MIT',
8
8
  description = 'X-Transformers - Pytorch',
9
9
  author = 'Phil Wang',
@@ -920,6 +920,7 @@ class Attention(Module):
920
920
  kv_heads = None,
921
921
  shared_kv = False,
922
922
  value_dim_head = None,
923
+ dim_out = None,
923
924
  tensor_product = False, # https://arxiv.org/abs/2208.06061
924
925
  add_zero_kv = False, # same as add_zero_attn in pytorch
925
926
  rotary_embed_values = False,
@@ -1057,7 +1058,11 @@ class Attention(Module):
1057
1058
  # attention on attention
1058
1059
 
1059
1060
  self.attn_on_attn = on_attn
1060
- self.to_out = nn.Sequential(nn.Linear(out_dim, dim * 2, bias = False), nn.GLU()) if on_attn else nn.Linear(out_dim, dim, bias = False)
1061
+
1062
+ # output dimension by default same as input, but can be overridden
1063
+
1064
+ dim_out = default(dim_out, dim)
1065
+ self.to_out = nn.Sequential(nn.Linear(out_dim, dim_out * 2, bias = False), nn.GLU()) if on_attn else nn.Linear(out_dim, dim_out, bias = False)
1061
1066
 
1062
1067
  # whether to rotate positions into values, for absolute positions in addition to relative
1063
1068
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.35.2
3
+ Version: 1.35.3
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
File without changes