x-transformers 1.42.14__tar.gz → 1.42.15__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {x_transformers-1.42.14/x_transformers.egg-info → x_transformers-1.42.15}/PKG-INFO +1 -1
- {x_transformers-1.42.14 → x_transformers-1.42.15}/setup.py +1 -1
- {x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers/x_transformers.py +1 -1
- {x_transformers-1.42.14 → x_transformers-1.42.15/x_transformers.egg-info}/PKG-INFO +1 -1
- {x_transformers-1.42.14 → x_transformers-1.42.15}/LICENSE +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/README.md +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/setup.cfg +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/tests/test_x_transformers.py +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers/__init__.py +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers/attend.py +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers/autoregressive_wrapper.py +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers/continuous.py +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers/dpo.py +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers/multi_input.py +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers/neo_mlp.py +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers/nonautoregressive_wrapper.py +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers/xl_autoregressive_wrapper.py +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers/xval.py +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers.egg-info/SOURCES.txt +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers.egg-info/dependency_links.txt +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers.egg-info/requires.txt +0 -0
- {x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers.egg-info/top_level.txt +0 -0
@@ -1821,7 +1821,7 @@ class AttentionLayers(Module):
|
|
1821
1821
|
is_first_self_attn = False
|
1822
1822
|
elif layer_type == 'c':
|
1823
1823
|
cross_attn_learned_value_residual = learned_value_residual_mix and not is_first_cross_attn
|
1824
|
-
layer = Attention(dim, heads = heads, learned_value_residual_mix =
|
1824
|
+
layer = Attention(dim, heads = heads, learned_value_residual_mix = cross_attn_learned_value_residual, **{**attn_kwargs, **cross_attn_kwargs})
|
1825
1825
|
is_first_cross_attn = False
|
1826
1826
|
elif layer_type == 'f':
|
1827
1827
|
layer = FeedForward(dim, **ff_kwargs)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers/nonautoregressive_wrapper.py
RENAMED
File without changes
|
{x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers/xl_autoregressive_wrapper.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{x_transformers-1.42.14 → x_transformers-1.42.15}/x_transformers.egg-info/dependency_links.txt
RENAMED
File without changes
|
File without changes
|
File without changes
|