x-transformers 1.42.23__py3-none-any.whl → 1.42.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- x_transformers/x_transformers.py +1 -1
- {x_transformers-1.42.23.dist-info → x_transformers-1.42.24.dist-info}/METADATA +1 -1
- {x_transformers-1.42.23.dist-info → x_transformers-1.42.24.dist-info}/RECORD +6 -6
- {x_transformers-1.42.23.dist-info → x_transformers-1.42.24.dist-info}/LICENSE +0 -0
- {x_transformers-1.42.23.dist-info → x_transformers-1.42.24.dist-info}/WHEEL +0 -0
- {x_transformers-1.42.23.dist-info → x_transformers-1.42.24.dist-info}/top_level.txt +0 -0
x_transformers/x_transformers.py
CHANGED
@@ -1849,7 +1849,7 @@ class AttentionLayers(Module):
|
|
1849
1849
|
is_first_self_attn = False
|
1850
1850
|
elif layer_type == 'c':
|
1851
1851
|
cross_attn_learned_value_residual = learned_value_residual_mix and not is_first_cross_attn
|
1852
|
-
layer = Attention(dim, heads = heads,
|
1852
|
+
layer = Attention(dim, heads = heads, **{**attn_kwargs, **cross_attn_kwargs})
|
1853
1853
|
is_first_cross_attn = False
|
1854
1854
|
elif layer_type == 'f':
|
1855
1855
|
layer = FeedForward(dim, **ff_kwargs)
|
@@ -6,11 +6,11 @@ x_transformers/dpo.py,sha256=xt4OuOWhU8pN3OKN2LZAaC2NC8iiEnchqqcrPWVqf0o,3521
|
|
6
6
|
x_transformers/multi_input.py,sha256=tCh-fTJDj2ib4SMGtsa-AM8MxKzJAQSwqAXOu3HU2mg,9252
|
7
7
|
x_transformers/neo_mlp.py,sha256=XCNnnop9WLarcxap1kGuYc1x8GHvwkZiDRnXOxSl3Po,3452
|
8
8
|
x_transformers/nonautoregressive_wrapper.py,sha256=2NU58hYMgn-4Jzg3mie-mXb0XH_dCN7fjlzd3K1rLUY,10510
|
9
|
-
x_transformers/x_transformers.py,sha256=
|
9
|
+
x_transformers/x_transformers.py,sha256=yaC5Jh2sXDRADTjUZHkrJmcJmb4s-aWjrbamVQLAv0s,95928
|
10
10
|
x_transformers/xl_autoregressive_wrapper.py,sha256=CvZMJ6A6PA-Y_bQAhnORwjJBSl6Vjq2IdW5KTdk8NI8,4195
|
11
11
|
x_transformers/xval.py,sha256=7S00kCuab4tWQa-vf-z-XfzADjVj48MoFIr7VSIvttg,8575
|
12
|
-
x_transformers-1.42.
|
13
|
-
x_transformers-1.42.
|
14
|
-
x_transformers-1.42.
|
15
|
-
x_transformers-1.42.
|
16
|
-
x_transformers-1.42.
|
12
|
+
x_transformers-1.42.24.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
|
13
|
+
x_transformers-1.42.24.dist-info/METADATA,sha256=6gq8sWjWzyazL_0CCyfN05PMNxApuNNLu2AeN3sGYkA,739
|
14
|
+
x_transformers-1.42.24.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
15
|
+
x_transformers-1.42.24.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
|
16
|
+
x_transformers-1.42.24.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|