x-transformers 1.24.7__py3-none-any.whl → 1.25.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,6 +2,7 @@ from x_transformers.x_transformers import (
2
2
  XTransformer,
3
3
  Encoder,
4
4
  Decoder,
5
+ PrefixDecoder,
5
6
  CrossAttender,
6
7
  Attention,
7
8
  TransformerWrapper,
@@ -1331,6 +1331,33 @@ class Decoder(AttentionLayers):
1331
1331
  assert 'causal' not in kwargs, 'cannot set causality on decoder'
1332
1332
  super().__init__(causal = True, **kwargs)
1333
1333
 
1334
+ class PrefixDecoder(AttentionLayers):
1335
+ def __init__(self, **kwargs):
1336
+ assert 'causal' not in kwargs, 'cannot set causality on decoder'
1337
+ super().__init__(causal = False, **kwargs)
1338
+
1339
+ def forward(
1340
+ self,
1341
+ x,
1342
+ *args,
1343
+ attn_mask = None,
1344
+ prefix_len = None,
1345
+ **kwargs
1346
+ ):
1347
+ b, n, device = *x.shape[:2], x.device
1348
+ causal_mask = torch.ones((n, n), device = device, dtype = torch.bool).triu(1)
1349
+
1350
+ forwarded_mask = ~causal_mask
1351
+
1352
+ if exists(prefix_len):
1353
+ prefix_mask = torch.arange(n, device = device) < rearrange(prefix_len, 'b -> b 1 1 1')
1354
+ forwarded_mask = forwarded_mask | prefix_mask
1355
+
1356
+ if exists(attn_mask):
1357
+ forwarded_mask = forwarded_mask & attn_mask
1358
+
1359
+ return super().forward(x, *args, attn_mask = forwarded_mask, **kwargs)
1360
+
1334
1361
  class CrossAttender(AttentionLayers):
1335
1362
  def __init__(self, **kwargs):
1336
1363
  super().__init__(cross_attend = True, only_cross = True, **kwargs)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.24.7
3
+ Version: 1.25.0
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -1,13 +1,13 @@
1
- x_transformers/__init__.py,sha256=xeZcV-MAaT-l8NOktPa_54WkmXLn4nG5E7CyKQC2iAI,610
1
+ x_transformers/__init__.py,sha256=pXc_U4M3ONUQcpNgZySDIlCF1rp7u4FFmcOYjc4WuXw,629
2
2
  x_transformers/attend.py,sha256=MFl_FbgPsm9mziZPTi_s8QbxASETwbGeciMH8sUIwT8,10188
3
3
  x_transformers/autoregressive_wrapper.py,sha256=f2u0usjUfAlXwgTz87O8J8XjGTbsbrx2XEP6K2beSNI,8944
4
4
  x_transformers/continuous.py,sha256=G8mVTan2-YbzkY3YDCTar1oyHPMSl0p4F6iRz3Nl0Is,5497
5
5
  x_transformers/nonautoregressive_wrapper.py,sha256=AQLE4rA_Kh8VNoe9OzpwyeWson34sRkhks4dn4seNjI,10414
6
- x_transformers/x_transformers.py,sha256=exkQkU_3S5RL0OA9tX4-h8ZWPOdGkoszvLkOStYVj_o,58543
6
+ x_transformers/x_transformers.py,sha256=btyWp8_gOX8jkTqVUWQvuVaE8x6R8IMEkM6_Nczxsc0,59402
7
7
  x_transformers/xl_autoregressive_wrapper.py,sha256=DCx4n0_c1tFai4nOqaWVnqx2p9eutsZsDMiMP1ckxNU,4117
8
8
  x_transformers/xval.py,sha256=lS9W_E_RskPQAqVZkPiUzbByoW1Ajsw_phsikA3JDAg,8139
9
- x_transformers-1.24.7.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
10
- x_transformers-1.24.7.dist-info/METADATA,sha256=qLvJYXptB_smV5-qKfv1vU-bhuz9Uh381fKrKfOA6i0,661
11
- x_transformers-1.24.7.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
12
- x_transformers-1.24.7.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
13
- x_transformers-1.24.7.dist-info/RECORD,,
9
+ x_transformers-1.25.0.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
10
+ x_transformers-1.25.0.dist-info/METADATA,sha256=tIF8EPLHiY2CY0Y0TPFEREgo4VOGDWsv5DLOWi4bnBw,661
11
+ x_transformers-1.25.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
12
+ x_transformers-1.25.0.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
13
+ x_transformers-1.25.0.dist-info/RECORD,,