x-transformers 2.3.26__py3-none-any.whl → 2.3.27__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- x_transformers/x_transformers.py +2 -2
- {x_transformers-2.3.26.dist-info → x_transformers-2.3.27.dist-info}/METADATA +1 -1
- {x_transformers-2.3.26.dist-info → x_transformers-2.3.27.dist-info}/RECORD +5 -5
- {x_transformers-2.3.26.dist-info → x_transformers-2.3.27.dist-info}/WHEEL +0 -0
- {x_transformers-2.3.26.dist-info → x_transformers-2.3.27.dist-info}/licenses/LICENSE +0 -0
x_transformers/x_transformers.py
CHANGED
@@ -1926,7 +1926,7 @@ class Attention(Module):
|
|
1926
1926
|
|
1927
1927
|
out = maybe(self.sublayer_dropout)(out)
|
1928
1928
|
|
1929
|
-
if exists(mask):
|
1929
|
+
if exists(mask) and not exists(cache):
|
1930
1930
|
out = einx.where('b n, b n d, -> b n d', mask, out, 0.)
|
1931
1931
|
|
1932
1932
|
if not return_intermediates:
|
@@ -2484,7 +2484,7 @@ class AttentionLayers(Module):
|
|
2484
2484
|
attn_cache = []
|
2485
2485
|
|
2486
2486
|
if exists(cache):
|
2487
|
-
assert self.causal and not
|
2487
|
+
assert self.causal and not exists(attn_mask)
|
2488
2488
|
|
2489
2489
|
prev_cache_length = cache.cache_length
|
2490
2490
|
|
@@ -8,10 +8,10 @@ x_transformers/entropy_based_tokenizer.py,sha256=F2lO8-v3aLIcVDVNhu7RR-UtRdlmaaY
|
|
8
8
|
x_transformers/multi_input.py,sha256=tCh-fTJDj2ib4SMGtsa-AM8MxKzJAQSwqAXOu3HU2mg,9252
|
9
9
|
x_transformers/neo_mlp.py,sha256=XCNnnop9WLarcxap1kGuYc1x8GHvwkZiDRnXOxSl3Po,3452
|
10
10
|
x_transformers/nonautoregressive_wrapper.py,sha256=2NU58hYMgn-4Jzg3mie-mXb0XH_dCN7fjlzd3K1rLUY,10510
|
11
|
-
x_transformers/x_transformers.py,sha256=
|
11
|
+
x_transformers/x_transformers.py,sha256=g7y9U48sirVN6oFq_XxPUDhqKO0U8pdmLYcbT0CoH1E,116223
|
12
12
|
x_transformers/xl_autoregressive_wrapper.py,sha256=CvZMJ6A6PA-Y_bQAhnORwjJBSl6Vjq2IdW5KTdk8NI8,4195
|
13
13
|
x_transformers/xval.py,sha256=AwwYUm8yDAtKQyKJDIhYMsiLTJ_skh3scUFMjp5sda8,8597
|
14
|
-
x_transformers-2.3.
|
15
|
-
x_transformers-2.3.
|
16
|
-
x_transformers-2.3.
|
17
|
-
x_transformers-2.3.
|
14
|
+
x_transformers-2.3.27.dist-info/METADATA,sha256=UNVupcXx-VDnWW5sRWJ4WlxOvUtwDDAy0Lig6s5xG0I,89897
|
15
|
+
x_transformers-2.3.27.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
16
|
+
x_transformers-2.3.27.dist-info/licenses/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
|
17
|
+
x_transformers-2.3.27.dist-info/RECORD,,
|
File without changes
|
File without changes
|