x-transformers 1.42.19__tar.gz → 1.42.20__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
Files changed (22) hide show
  1. {x_transformers-1.42.19/x_transformers.egg-info → x_transformers-1.42.20}/PKG-INFO +1 -1
  2. {x_transformers-1.42.19 → x_transformers-1.42.20}/setup.py +1 -1
  3. {x_transformers-1.42.19 → x_transformers-1.42.20}/tests/test_x_transformers.py +7 -3
  4. {x_transformers-1.42.19 → x_transformers-1.42.20}/x_transformers/attend.py +1 -1
  5. {x_transformers-1.42.19 → x_transformers-1.42.20/x_transformers.egg-info}/PKG-INFO +1 -1
  6. {x_transformers-1.42.19 → x_transformers-1.42.20}/LICENSE +0 -0
  7. {x_transformers-1.42.19 → x_transformers-1.42.20}/README.md +0 -0
  8. {x_transformers-1.42.19 → x_transformers-1.42.20}/setup.cfg +0 -0
  9. {x_transformers-1.42.19 → x_transformers-1.42.20}/x_transformers/__init__.py +0 -0
  10. {x_transformers-1.42.19 → x_transformers-1.42.20}/x_transformers/autoregressive_wrapper.py +0 -0
  11. {x_transformers-1.42.19 → x_transformers-1.42.20}/x_transformers/continuous.py +0 -0
  12. {x_transformers-1.42.19 → x_transformers-1.42.20}/x_transformers/dpo.py +0 -0
  13. {x_transformers-1.42.19 → x_transformers-1.42.20}/x_transformers/multi_input.py +0 -0
  14. {x_transformers-1.42.19 → x_transformers-1.42.20}/x_transformers/neo_mlp.py +0 -0
  15. {x_transformers-1.42.19 → x_transformers-1.42.20}/x_transformers/nonautoregressive_wrapper.py +0 -0
  16. {x_transformers-1.42.19 → x_transformers-1.42.20}/x_transformers/x_transformers.py +0 -0
  17. {x_transformers-1.42.19 → x_transformers-1.42.20}/x_transformers/xl_autoregressive_wrapper.py +0 -0
  18. {x_transformers-1.42.19 → x_transformers-1.42.20}/x_transformers/xval.py +0 -0
  19. {x_transformers-1.42.19 → x_transformers-1.42.20}/x_transformers.egg-info/SOURCES.txt +0 -0
  20. {x_transformers-1.42.19 → x_transformers-1.42.20}/x_transformers.egg-info/dependency_links.txt +0 -0
  21. {x_transformers-1.42.19 → x_transformers-1.42.20}/x_transformers.egg-info/requires.txt +0 -0
  22. {x_transformers-1.42.19 → x_transformers-1.42.20}/x_transformers.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.42.19
3
+ Version: 1.42.20
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
  setup(
4
4
  name = 'x-transformers',
5
5
  packages = find_packages(exclude=['examples']),
6
- version = '1.42.19',
6
+ version = '1.42.20',
7
7
  license='MIT',
8
8
  description = 'X-Transformers - Pytorch',
9
9
  author = 'Phil Wang',
@@ -388,7 +388,8 @@ def test_neo_mlp():
388
388
  out = mlp(x)
389
389
  assert out.shape == (3, 7)
390
390
 
391
- def test_custom_alibi():
391
+ @pytest.mark.parametrize('flash', (True, False))
392
+ def test_custom_alibi(flash: bool):
392
393
 
393
394
  model = TransformerWrapper(
394
395
  num_tokens = 20_000,
@@ -397,7 +398,8 @@ def test_custom_alibi():
397
398
  dim = 512,
398
399
  depth = 2,
399
400
  heads = 8,
400
- alibi_pos_bias = True
401
+ alibi_pos_bias = True,
402
+ attn_flash = flash
401
403
  )
402
404
  )
403
405
 
@@ -407,7 +409,8 @@ def test_custom_alibi():
407
409
 
408
410
  logits = model(x, pos = pos)
409
411
 
410
- def test_custom_alibi_across_heads():
412
+ @pytest.mark.parametrize('flash', (True, False))
413
+ def test_custom_alibi_across_heads(flash: bool):
411
414
 
412
415
  model = Decoder(
413
416
  dim = 512,
@@ -417,6 +420,7 @@ def test_custom_alibi_across_heads():
417
420
  rel_pos_kwargs = dict(
418
421
  slopes = [1, 1]
419
422
  ),
423
+ attn_flash = flash
420
424
  )
421
425
 
422
426
  x = torch.randn(2, 4, 512)
@@ -370,7 +370,7 @@ class Attend(Module):
370
370
  # convert from bool to float
371
371
 
372
372
  if exists(attn_bias):
373
- attn_bias = rearrange(attn_bias, 'h i j -> 1 h i j').expand(batch, heads, -1, -1)
373
+ attn_bias = attn_bias.expand(batch, heads, -1, -1)
374
374
 
375
375
  # if mask given, the mask would already contain the causal mask from above logic
376
376
  # otherwise, if no mask given but still causal, mask out alibi positional bias to a large negative number
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.42.19
3
+ Version: 1.42.20
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang