x-transformers 1.42.26__tar.gz → 1.42.27__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {x_transformers-1.42.26/x_transformers.egg-info → x_transformers-1.42.27}/PKG-INFO +1 -1
- {x_transformers-1.42.26 → x_transformers-1.42.27}/setup.py +1 -1
- {x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers/x_transformers.py +1 -1
- {x_transformers-1.42.26 → x_transformers-1.42.27/x_transformers.egg-info}/PKG-INFO +1 -1
- {x_transformers-1.42.26 → x_transformers-1.42.27}/LICENSE +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/README.md +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/setup.cfg +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/tests/test_x_transformers.py +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers/__init__.py +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers/attend.py +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers/autoregressive_wrapper.py +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers/continuous.py +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers/dpo.py +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers/multi_input.py +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers/neo_mlp.py +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers/nonautoregressive_wrapper.py +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers/xl_autoregressive_wrapper.py +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers/xval.py +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers.egg-info/SOURCES.txt +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers.egg-info/dependency_links.txt +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers.egg-info/requires.txt +0 -0
- {x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers.egg-info/top_level.txt +0 -0
@@ -1077,7 +1077,7 @@ class Attention(Module):
|
|
1077
1077
|
logit_softclamp_value = 50.,
|
1078
1078
|
neutreno_value_residual = False, # Nguyen et al. https://arxiv.org/abs/2312.00751
|
1079
1079
|
neutreno_alpha = 0.4,
|
1080
|
-
learned_value_residual_mix =
|
1080
|
+
learned_value_residual_mix = True,
|
1081
1081
|
laser = False, # https://arxiv.org/abs/2411.03493v1
|
1082
1082
|
laser_softclamp_value = 15.,
|
1083
1083
|
onnxable = False,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers/nonautoregressive_wrapper.py
RENAMED
File without changes
|
{x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers/xl_autoregressive_wrapper.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{x_transformers-1.42.26 → x_transformers-1.42.27}/x_transformers.egg-info/dependency_links.txt
RENAMED
File without changes
|
File without changes
|
File without changes
|