x-transformers 1.42.16__py3-none-any.whl → 1.42.17__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- x_transformers/x_transformers.py +3 -1
- {x_transformers-1.42.16.dist-info → x_transformers-1.42.17.dist-info}/METADATA +1 -1
- {x_transformers-1.42.16.dist-info → x_transformers-1.42.17.dist-info}/RECORD +6 -6
- {x_transformers-1.42.16.dist-info → x_transformers-1.42.17.dist-info}/LICENSE +0 -0
- {x_transformers-1.42.16.dist-info → x_transformers-1.42.17.dist-info}/WHEEL +0 -0
- {x_transformers-1.42.16.dist-info → x_transformers-1.42.17.dist-info}/top_level.txt +0 -0
x_transformers/x_transformers.py
CHANGED
@@ -1428,13 +1428,15 @@ class Attention(Module):
|
|
1428
1428
|
else:
|
1429
1429
|
attn_bias = rel_pos(i, j)
|
1430
1430
|
|
1431
|
-
attn_bias = pad_at_dim(attn_bias, (num_mem_kv, 0)
|
1431
|
+
attn_bias = pad_at_dim(attn_bias, (num_mem_kv, 0)) # handle memory key / values
|
1432
1432
|
|
1433
1433
|
# prepare data dependent alibi from forgetting transformers paper, if needed
|
1434
1434
|
|
1435
1435
|
if exists(self.data_dependent_alibi):
|
1436
1436
|
attn_bias = self.data_dependent_alibi(x)
|
1437
1437
|
|
1438
|
+
attn_bias = pad_at_dim(attn_bias, (num_mem_kv, 0))
|
1439
|
+
|
1438
1440
|
# attention is all we need
|
1439
1441
|
|
1440
1442
|
out, intermediates = self.attend(
|
@@ -6,11 +6,11 @@ x_transformers/dpo.py,sha256=xt4OuOWhU8pN3OKN2LZAaC2NC8iiEnchqqcrPWVqf0o,3521
|
|
6
6
|
x_transformers/multi_input.py,sha256=tCh-fTJDj2ib4SMGtsa-AM8MxKzJAQSwqAXOu3HU2mg,9252
|
7
7
|
x_transformers/neo_mlp.py,sha256=XCNnnop9WLarcxap1kGuYc1x8GHvwkZiDRnXOxSl3Po,3452
|
8
8
|
x_transformers/nonautoregressive_wrapper.py,sha256=2NU58hYMgn-4Jzg3mie-mXb0XH_dCN7fjlzd3K1rLUY,10510
|
9
|
-
x_transformers/x_transformers.py,sha256=
|
9
|
+
x_transformers/x_transformers.py,sha256=Wvkw4j_78413LdCnCt_wHgcVFiCbzrC8u4TH2iXhkNU,95181
|
10
10
|
x_transformers/xl_autoregressive_wrapper.py,sha256=CvZMJ6A6PA-Y_bQAhnORwjJBSl6Vjq2IdW5KTdk8NI8,4195
|
11
11
|
x_transformers/xval.py,sha256=7S00kCuab4tWQa-vf-z-XfzADjVj48MoFIr7VSIvttg,8575
|
12
|
-
x_transformers-1.42.
|
13
|
-
x_transformers-1.42.
|
14
|
-
x_transformers-1.42.
|
15
|
-
x_transformers-1.42.
|
16
|
-
x_transformers-1.42.
|
12
|
+
x_transformers-1.42.17.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
|
13
|
+
x_transformers-1.42.17.dist-info/METADATA,sha256=T1MDXNdxqdPkqFpGuJVb7vBhniGCbHefm5C-lhb3LJk,717
|
14
|
+
x_transformers-1.42.17.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
15
|
+
x_transformers-1.42.17.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
|
16
|
+
x_transformers-1.42.17.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|