x-transformers 1.37.0__tar.gz → 1.37.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (21) hide show
  1. {x_transformers-1.37.0/x_transformers.egg-info → x_transformers-1.37.2}/PKG-INFO +1 -1
  2. {x_transformers-1.37.0 → x_transformers-1.37.2}/setup.py +1 -1
  3. {x_transformers-1.37.0 → x_transformers-1.37.2}/tests/test_x_transformers.py +1 -1
  4. {x_transformers-1.37.0 → x_transformers-1.37.2}/x_transformers/x_transformers.py +2 -0
  5. {x_transformers-1.37.0 → x_transformers-1.37.2/x_transformers.egg-info}/PKG-INFO +1 -1
  6. {x_transformers-1.37.0 → x_transformers-1.37.2}/LICENSE +0 -0
  7. {x_transformers-1.37.0 → x_transformers-1.37.2}/README.md +0 -0
  8. {x_transformers-1.37.0 → x_transformers-1.37.2}/setup.cfg +0 -0
  9. {x_transformers-1.37.0 → x_transformers-1.37.2}/x_transformers/__init__.py +0 -0
  10. {x_transformers-1.37.0 → x_transformers-1.37.2}/x_transformers/attend.py +0 -0
  11. {x_transformers-1.37.0 → x_transformers-1.37.2}/x_transformers/autoregressive_wrapper.py +0 -0
  12. {x_transformers-1.37.0 → x_transformers-1.37.2}/x_transformers/continuous.py +0 -0
  13. {x_transformers-1.37.0 → x_transformers-1.37.2}/x_transformers/dpo.py +0 -0
  14. {x_transformers-1.37.0 → x_transformers-1.37.2}/x_transformers/multi_input.py +0 -0
  15. {x_transformers-1.37.0 → x_transformers-1.37.2}/x_transformers/nonautoregressive_wrapper.py +0 -0
  16. {x_transformers-1.37.0 → x_transformers-1.37.2}/x_transformers/xl_autoregressive_wrapper.py +0 -0
  17. {x_transformers-1.37.0 → x_transformers-1.37.2}/x_transformers/xval.py +0 -0
  18. {x_transformers-1.37.0 → x_transformers-1.37.2}/x_transformers.egg-info/SOURCES.txt +0 -0
  19. {x_transformers-1.37.0 → x_transformers-1.37.2}/x_transformers.egg-info/dependency_links.txt +0 -0
  20. {x_transformers-1.37.0 → x_transformers-1.37.2}/x_transformers.egg-info/requires.txt +0 -0
  21. {x_transformers-1.37.0 → x_transformers-1.37.2}/x_transformers.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.37.0
3
+ Version: 1.37.2
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
  setup(
4
4
  name = 'x-transformers',
5
5
  packages = find_packages(exclude=['examples']),
6
- version = '1.37.0',
6
+ version = '1.37.2',
7
7
  license='MIT',
8
8
  description = 'X-Transformers - Pytorch',
9
9
  author = 'Phil Wang',
@@ -278,4 +278,4 @@ def test_mos():
278
278
 
279
279
  model.eval()
280
280
 
281
- eval_logits = model(x, recycle_steps = 3)
281
+ eval_logits = model(x)
@@ -2258,6 +2258,8 @@ class TransformerWrapper(Module):
2258
2258
  # attention layers
2259
2259
 
2260
2260
  if not self.recycling:
2261
+ assert not exists(recycle_steps) or recycle_steps == 1, 'you did not train with recycling'
2262
+
2261
2263
  # regular
2262
2264
 
2263
2265
  attended, intermediates = self.attn_layers(x, mask = mask, mems = mems, mem_masks = mem_masks, cache = cache, return_hiddens = True, seq_start_pos = seq_start_pos, **kwargs)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.37.0
3
+ Version: 1.37.2
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
File without changes