x-transformers 1.26.1__tar.gz → 1.26.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (18) hide show
  1. {x-transformers-1.26.1/x_transformers.egg-info → x-transformers-1.26.3}/PKG-INFO +1 -1
  2. {x-transformers-1.26.1 → x-transformers-1.26.3}/README.md +2 -0
  3. {x-transformers-1.26.1 → x-transformers-1.26.3}/setup.py +1 -1
  4. {x-transformers-1.26.1 → x-transformers-1.26.3}/x_transformers/autoregressive_wrapper.py +13 -9
  5. {x-transformers-1.26.1 → x-transformers-1.26.3}/x_transformers/x_transformers.py +4 -2
  6. {x-transformers-1.26.1 → x-transformers-1.26.3/x_transformers.egg-info}/PKG-INFO +1 -1
  7. {x-transformers-1.26.1 → x-transformers-1.26.3}/LICENSE +0 -0
  8. {x-transformers-1.26.1 → x-transformers-1.26.3}/setup.cfg +0 -0
  9. {x-transformers-1.26.1 → x-transformers-1.26.3}/x_transformers/__init__.py +0 -0
  10. {x-transformers-1.26.1 → x-transformers-1.26.3}/x_transformers/attend.py +0 -0
  11. {x-transformers-1.26.1 → x-transformers-1.26.3}/x_transformers/continuous.py +0 -0
  12. {x-transformers-1.26.1 → x-transformers-1.26.3}/x_transformers/nonautoregressive_wrapper.py +0 -0
  13. {x-transformers-1.26.1 → x-transformers-1.26.3}/x_transformers/xl_autoregressive_wrapper.py +0 -0
  14. {x-transformers-1.26.1 → x-transformers-1.26.3}/x_transformers/xval.py +0 -0
  15. {x-transformers-1.26.1 → x-transformers-1.26.3}/x_transformers.egg-info/SOURCES.txt +0 -0
  16. {x-transformers-1.26.1 → x-transformers-1.26.3}/x_transformers.egg-info/dependency_links.txt +0 -0
  17. {x-transformers-1.26.1 → x-transformers-1.26.3}/x_transformers.egg-info/requires.txt +0 -0
  18. {x-transformers-1.26.1 → x-transformers-1.26.3}/x_transformers.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.26.1
3
+ Version: 1.26.3
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -941,6 +941,8 @@ More of Eric's experimental results can be found <a href="https://github.com/bob
941
941
 
942
942
  You can use this type of relative position if you wish to train at smaller sequence lengths and have it generalize to longer ones, for both autoregressive and bidirectional models.
943
943
 
944
+ Update: <a href="https://www.kaggle.com/competitions/stanford-ribonanza-rna-folding/discussion/460121">First place RNA folding using dynamic positional bias</a>
945
+
944
946
  ```python
945
947
  import torch
946
948
  from x_transformers import TransformerWrapper, Decoder
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
  setup(
4
4
  name = 'x-transformers',
5
5
  packages = find_packages(exclude=['examples']),
6
- version = '1.26.1',
6
+ version = '1.26.3',
7
7
  license='MIT',
8
8
  description = 'X-Transformers - Pytorch',
9
9
  author = 'Phil Wang',
@@ -238,15 +238,19 @@ class AutoregressiveWrapper(Module):
238
238
 
239
239
  out = torch.cat((out, sample), dim=-1)
240
240
 
241
- if exists(eos_token):
242
- is_eos_tokens = (out == eos_token)
243
-
244
- if is_eos_tokens.any(dim = -1).all():
245
- # mask out everything after the eos tokens
246
- shifted_is_eos_tokens = F.pad(is_eos_tokens, (1, -1))
247
- mask = shifted_is_eos_tokens.float().cumsum(dim = -1) >= 1
248
- out = out.masked_fill(mask, self.pad_value)
249
- break
241
+ if not exists(eos_token):
242
+ continue
243
+
244
+ is_eos_tokens = (out == eos_token)
245
+
246
+ if is_eos_tokens.any(dim = -1).all():
247
+ break
248
+
249
+ if exists(eos_token):
250
+ # mask out everything after the eos tokens
251
+ shifted_is_eos_tokens = F.pad(is_eos_tokens, (1, -1))
252
+ mask = shifted_is_eos_tokens.float().cumsum(dim = -1) >= 1
253
+ out = out.masked_fill(mask, self.pad_value)
250
254
 
251
255
  out = out[:, t:]
252
256
 
@@ -1559,7 +1559,7 @@ class TransformerWrapper(nn.Module):
1559
1559
  pos = None,
1560
1560
  prepend_embeds = None,
1561
1561
  prepend_mask = None,
1562
- embed_ids: Dict[str, Tensor] = None,
1562
+ embed_ids: Dict[str, Tensor] = dict(),
1563
1563
  sum_embeds = None,
1564
1564
  return_attn_z_loss = False,
1565
1565
  attn_z_loss_weight = 1e-4,
@@ -1578,7 +1578,9 @@ class TransformerWrapper(nn.Module):
1578
1578
 
1579
1579
  # add additional embeddings
1580
1580
 
1581
- if exists(self.embeds) and exists(embed_ids):
1581
+ if exists(self.embeds):
1582
+ assert len(embed_ids) == len(self.embeds)
1583
+
1582
1584
  for name, embed_id in embed_ids.items():
1583
1585
  embed_key = f'{name}_embed'
1584
1586
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.26.1
3
+ Version: 1.26.3
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
File without changes