x-transformers 1.40.9__tar.gz → 1.40.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (21) hide show
  1. {x_transformers-1.40.9/x_transformers.egg-info → x_transformers-1.40.10}/PKG-INFO +1 -1
  2. {x_transformers-1.40.9 → x_transformers-1.40.10}/setup.py +1 -1
  3. {x_transformers-1.40.9 → x_transformers-1.40.10}/x_transformers/x_transformers.py +1 -1
  4. {x_transformers-1.40.9 → x_transformers-1.40.10/x_transformers.egg-info}/PKG-INFO +1 -1
  5. {x_transformers-1.40.9 → x_transformers-1.40.10}/LICENSE +0 -0
  6. {x_transformers-1.40.9 → x_transformers-1.40.10}/README.md +0 -0
  7. {x_transformers-1.40.9 → x_transformers-1.40.10}/setup.cfg +0 -0
  8. {x_transformers-1.40.9 → x_transformers-1.40.10}/tests/test_x_transformers.py +0 -0
  9. {x_transformers-1.40.9 → x_transformers-1.40.10}/x_transformers/__init__.py +0 -0
  10. {x_transformers-1.40.9 → x_transformers-1.40.10}/x_transformers/attend.py +0 -0
  11. {x_transformers-1.40.9 → x_transformers-1.40.10}/x_transformers/autoregressive_wrapper.py +0 -0
  12. {x_transformers-1.40.9 → x_transformers-1.40.10}/x_transformers/continuous.py +0 -0
  13. {x_transformers-1.40.9 → x_transformers-1.40.10}/x_transformers/dpo.py +0 -0
  14. {x_transformers-1.40.9 → x_transformers-1.40.10}/x_transformers/multi_input.py +0 -0
  15. {x_transformers-1.40.9 → x_transformers-1.40.10}/x_transformers/nonautoregressive_wrapper.py +0 -0
  16. {x_transformers-1.40.9 → x_transformers-1.40.10}/x_transformers/xl_autoregressive_wrapper.py +0 -0
  17. {x_transformers-1.40.9 → x_transformers-1.40.10}/x_transformers/xval.py +0 -0
  18. {x_transformers-1.40.9 → x_transformers-1.40.10}/x_transformers.egg-info/SOURCES.txt +0 -0
  19. {x_transformers-1.40.9 → x_transformers-1.40.10}/x_transformers.egg-info/dependency_links.txt +0 -0
  20. {x_transformers-1.40.9 → x_transformers-1.40.10}/x_transformers.egg-info/requires.txt +0 -0
  21. {x_transformers-1.40.9 → x_transformers-1.40.10}/x_transformers.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.40.9
3
+ Version: 1.40.10
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
  setup(
4
4
  name = 'x-transformers',
5
5
  packages = find_packages(exclude=['examples']),
6
- version = '1.40.9',
6
+ version = '1.40.10',
7
7
  license='MIT',
8
8
  description = 'X-Transformers - Pytorch',
9
9
  author = 'Phil Wang',
@@ -1019,7 +1019,7 @@ class Attention(Module):
1019
1019
  self.qk_norm_q_scale = self.qk_norm_k_scale = 1
1020
1020
  if qk_norm and qk_norm_dim_scale:
1021
1021
  self.qk_norm_q_scale = nn.Parameter(torch.ones(heads, 1, dim_head))
1022
- self.qk_norm_k_scale = nn.Parameter(torch.ones(heads, 1, dim_head))
1022
+ self.qk_norm_k_scale = nn.Parameter(torch.ones(kv_heads, 1, dim_head))
1023
1023
 
1024
1024
  assert (not qk_norm) or divisible_by(dim_head, qk_norm_groups), 'dimension per attention head must be divisible by the qk norm groups'
1025
1025
  assert not (qk_norm and (dim_head // qk_norm_groups) <= 2), 'the group dimension may be too small (2 was too small in my tests, but 4 still works, surprisingly)'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.40.9
3
+ Version: 1.40.10
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang