x-transformers 1.42.22__py3-none-any.whl → 1.42.24__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -1079,6 +1079,7 @@ class Attention(Module):
1079
1079
  neutreno_alpha = 0.4,
1080
1080
  learned_value_residual_mix = False,
1081
1081
  laser = False, # https://arxiv.org/abs/2411.03493v1
1082
+ laser_softclamp_value = 15.,
1082
1083
  onnxable = False,
1083
1084
  attend_sdp_kwargs: dict = dict(
1084
1085
  enable_flash = True,
@@ -1119,9 +1120,9 @@ class Attention(Module):
1119
1120
  self.to_v = LinearNoBias(dim_kv, v_dim) if not shared_kv else None
1120
1121
 
1121
1122
  # enhancing gradients to attention through exponentiated values
1122
- # todo - compare it to `attn = attn * large_value + attn.detach() * (1. - large_value)`
1123
1123
 
1124
1124
  self.laser = laser
1125
+ self.laser_softclamp_value = laser_softclamp_value
1125
1126
 
1126
1127
  # relations projection from tp-attention
1127
1128
 
@@ -1449,8 +1450,7 @@ class Attention(Module):
1449
1450
  attn_bias = pad_at_dim(attn_bias, (num_mem_kv, 0))
1450
1451
 
1451
1452
  if self.laser:
1452
- values_max = v.amax(dim = -2, keepdim = True).detach() # numerical stability
1453
- v = v - values_max
1453
+ v = softclamp(v, self.laser_softclamp_value)
1454
1454
  v = v.exp()
1455
1455
 
1456
1456
  # attention is all we need
@@ -1465,7 +1465,7 @@ class Attention(Module):
1465
1465
  # laser
1466
1466
 
1467
1467
  if self.laser:
1468
- out = log(out) + values_max
1468
+ out = log(out)
1469
1469
 
1470
1470
  # store the values for resformer or Neutreno
1471
1471
 
@@ -1849,7 +1849,7 @@ class AttentionLayers(Module):
1849
1849
  is_first_self_attn = False
1850
1850
  elif layer_type == 'c':
1851
1851
  cross_attn_learned_value_residual = learned_value_residual_mix and not is_first_cross_attn
1852
- layer = Attention(dim, heads = heads, learned_value_residual_mix = cross_attn_learned_value_residual, **{**attn_kwargs, **cross_attn_kwargs})
1852
+ layer = Attention(dim, heads = heads, **{**attn_kwargs, **cross_attn_kwargs})
1853
1853
  is_first_cross_attn = False
1854
1854
  elif layer_type == 'f':
1855
1855
  layer = FeedForward(dim, **ff_kwargs)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.42.22
3
+ Version: 1.42.24
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -6,11 +6,11 @@ x_transformers/dpo.py,sha256=xt4OuOWhU8pN3OKN2LZAaC2NC8iiEnchqqcrPWVqf0o,3521
6
6
  x_transformers/multi_input.py,sha256=tCh-fTJDj2ib4SMGtsa-AM8MxKzJAQSwqAXOu3HU2mg,9252
7
7
  x_transformers/neo_mlp.py,sha256=XCNnnop9WLarcxap1kGuYc1x8GHvwkZiDRnXOxSl3Po,3452
8
8
  x_transformers/nonautoregressive_wrapper.py,sha256=2NU58hYMgn-4Jzg3mie-mXb0XH_dCN7fjlzd3K1rLUY,10510
9
- x_transformers/x_transformers.py,sha256=AtxLfcaVabAKJdJ9xOKVrATDcyjxG-tFXx6lg941WB8,96068
9
+ x_transformers/x_transformers.py,sha256=yaC5Jh2sXDRADTjUZHkrJmcJmb4s-aWjrbamVQLAv0s,95928
10
10
  x_transformers/xl_autoregressive_wrapper.py,sha256=CvZMJ6A6PA-Y_bQAhnORwjJBSl6Vjq2IdW5KTdk8NI8,4195
11
11
  x_transformers/xval.py,sha256=7S00kCuab4tWQa-vf-z-XfzADjVj48MoFIr7VSIvttg,8575
12
- x_transformers-1.42.22.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
13
- x_transformers-1.42.22.dist-info/METADATA,sha256=M3wgytCy3B8zW_g2qUesrZAgzhZLBBy-60HjPDpHjNM,739
14
- x_transformers-1.42.22.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
15
- x_transformers-1.42.22.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
16
- x_transformers-1.42.22.dist-info/RECORD,,
12
+ x_transformers-1.42.24.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
13
+ x_transformers-1.42.24.dist-info/METADATA,sha256=6gq8sWjWzyazL_0CCyfN05PMNxApuNNLu2AeN3sGYkA,739
14
+ x_transformers-1.42.24.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
15
+ x_transformers-1.42.24.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
16
+ x_transformers-1.42.24.dist-info/RECORD,,