x-transformers 1.24.7__py3-none-any.whl → 1.25.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- x_transformers/__init__.py +1 -0
- x_transformers/x_transformers.py +30 -0
- {x_transformers-1.24.7.dist-info → x_transformers-1.25.1.dist-info}/METADATA +1 -1
- {x_transformers-1.24.7.dist-info → x_transformers-1.25.1.dist-info}/RECORD +7 -7
- {x_transformers-1.24.7.dist-info → x_transformers-1.25.1.dist-info}/LICENSE +0 -0
- {x_transformers-1.24.7.dist-info → x_transformers-1.25.1.dist-info}/WHEEL +0 -0
- {x_transformers-1.24.7.dist-info → x_transformers-1.25.1.dist-info}/top_level.txt +0 -0
x_transformers/__init__.py
CHANGED
x_transformers/x_transformers.py
CHANGED
@@ -1331,6 +1331,36 @@ class Decoder(AttentionLayers):
|
|
1331
1331
|
assert 'causal' not in kwargs, 'cannot set causality on decoder'
|
1332
1332
|
super().__init__(causal = True, **kwargs)
|
1333
1333
|
|
1334
|
+
class PrefixDecoder(AttentionLayers):
|
1335
|
+
def __init__(self, **kwargs):
|
1336
|
+
assert 'causal' not in kwargs, 'cannot set causality on decoder'
|
1337
|
+
super().__init__(causal = False, **kwargs)
|
1338
|
+
|
1339
|
+
def forward(
|
1340
|
+
self,
|
1341
|
+
x,
|
1342
|
+
*args,
|
1343
|
+
attn_mask = None,
|
1344
|
+
prefix_attn_len = None,
|
1345
|
+
**kwargs
|
1346
|
+
):
|
1347
|
+
b, n, device = *x.shape[:2], x.device
|
1348
|
+
causal_mask = torch.ones((n, n), device = device, dtype = torch.bool).triu(1)
|
1349
|
+
|
1350
|
+
forwarded_mask = ~causal_mask
|
1351
|
+
|
1352
|
+
if exists(prefix_attn_len):
|
1353
|
+
if isinstance(prefix_attn_len, int):
|
1354
|
+
prefix_attn_len = torch.full((b,), prefix_attn_len, device = device)
|
1355
|
+
|
1356
|
+
prefix_mask = torch.arange(n, device = device) < rearrange(prefix_attn_len, 'b -> b 1 1 1')
|
1357
|
+
forwarded_mask = forwarded_mask | prefix_mask
|
1358
|
+
|
1359
|
+
if exists(attn_mask):
|
1360
|
+
forwarded_mask = forwarded_mask & attn_mask
|
1361
|
+
|
1362
|
+
return super().forward(x, *args, attn_mask = forwarded_mask, **kwargs)
|
1363
|
+
|
1334
1364
|
class CrossAttender(AttentionLayers):
|
1335
1365
|
def __init__(self, **kwargs):
|
1336
1366
|
super().__init__(cross_attend = True, only_cross = True, **kwargs)
|
@@ -1,13 +1,13 @@
|
|
1
|
-
x_transformers/__init__.py,sha256=
|
1
|
+
x_transformers/__init__.py,sha256=pXc_U4M3ONUQcpNgZySDIlCF1rp7u4FFmcOYjc4WuXw,629
|
2
2
|
x_transformers/attend.py,sha256=MFl_FbgPsm9mziZPTi_s8QbxASETwbGeciMH8sUIwT8,10188
|
3
3
|
x_transformers/autoregressive_wrapper.py,sha256=f2u0usjUfAlXwgTz87O8J8XjGTbsbrx2XEP6K2beSNI,8944
|
4
4
|
x_transformers/continuous.py,sha256=G8mVTan2-YbzkY3YDCTar1oyHPMSl0p4F6iRz3Nl0Is,5497
|
5
5
|
x_transformers/nonautoregressive_wrapper.py,sha256=AQLE4rA_Kh8VNoe9OzpwyeWson34sRkhks4dn4seNjI,10414
|
6
|
-
x_transformers/x_transformers.py,sha256=
|
6
|
+
x_transformers/x_transformers.py,sha256=QF4N6xoVcp8PFMWsRYOMMcqRVjJb5_ArzjPEzWCyffE,59552
|
7
7
|
x_transformers/xl_autoregressive_wrapper.py,sha256=DCx4n0_c1tFai4nOqaWVnqx2p9eutsZsDMiMP1ckxNU,4117
|
8
8
|
x_transformers/xval.py,sha256=lS9W_E_RskPQAqVZkPiUzbByoW1Ajsw_phsikA3JDAg,8139
|
9
|
-
x_transformers-1.
|
10
|
-
x_transformers-1.
|
11
|
-
x_transformers-1.
|
12
|
-
x_transformers-1.
|
13
|
-
x_transformers-1.
|
9
|
+
x_transformers-1.25.1.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
|
10
|
+
x_transformers-1.25.1.dist-info/METADATA,sha256=CnSAhkwS-ZCUYSp0pY1k5PIbIApMw38_75_F1vuof7w,661
|
11
|
+
x_transformers-1.25.1.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
|
12
|
+
x_transformers-1.25.1.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
|
13
|
+
x_transformers-1.25.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|