x-transformers 1.42.15__tar.gz → 1.42.16__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {x_transformers-1.42.15/x_transformers.egg-info → x_transformers-1.42.16}/PKG-INFO +1 -1
- {x_transformers-1.42.15 → x_transformers-1.42.16}/setup.py +1 -1
- {x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers/x_transformers.py +2 -2
- {x_transformers-1.42.15 → x_transformers-1.42.16/x_transformers.egg-info}/PKG-INFO +1 -1
- {x_transformers-1.42.15 → x_transformers-1.42.16}/LICENSE +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/README.md +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/setup.cfg +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/tests/test_x_transformers.py +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers/__init__.py +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers/attend.py +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers/autoregressive_wrapper.py +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers/continuous.py +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers/dpo.py +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers/multi_input.py +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers/neo_mlp.py +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers/nonautoregressive_wrapper.py +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers/xl_autoregressive_wrapper.py +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers/xval.py +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers.egg-info/SOURCES.txt +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers.egg-info/dependency_links.txt +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers.egg-info/requires.txt +0 -0
- {x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers.egg-info/top_level.txt +0 -0
@@ -1235,9 +1235,9 @@ class Attention(Module):
|
|
1235
1235
|
# maybe learned value residual mixer per token
|
1236
1236
|
|
1237
1237
|
self.to_value_residual_mix = nn.Sequential(
|
1238
|
-
nn.Linear(dim,
|
1238
|
+
nn.Linear(dim, heads),
|
1239
1239
|
nn.Sigmoid(),
|
1240
|
-
Rearrange('b n
|
1240
|
+
Rearrange('b n h -> b h n 1')
|
1241
1241
|
) if learned_value_residual_mix else always(0.5)
|
1242
1242
|
|
1243
1243
|
# attention on attention
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers/nonautoregressive_wrapper.py
RENAMED
File without changes
|
{x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers/xl_autoregressive_wrapper.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{x_transformers-1.42.15 → x_transformers-1.42.16}/x_transformers.egg-info/dependency_links.txt
RENAMED
File without changes
|
File without changes
|
File without changes
|