x-transformers 1.31.8__py3-none-any.whl → 1.31.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
x_transformers/attend.py CHANGED
@@ -82,7 +82,7 @@ class Attend(Module):
82
82
  qk_norm = False,
83
83
  flash = False,
84
84
  softclamp_logits = False,
85
- logit_softclamp_value = 30.,
85
+ logit_softclamp_value = 50.,
86
86
  add_zero_kv = False,
87
87
  cope = None,
88
88
  onnxable = False,
@@ -268,6 +268,11 @@ class Attend(Module):
268
268
 
269
269
  causal = self.causal
270
270
 
271
+ # handle key padding mask
272
+
273
+ if exists(mask) and mask.ndim == 2:
274
+ mask = rearrange(mask, 'b j -> b 1 1 j')
275
+
271
276
  # handle kv cached decoding
272
277
 
273
278
  if n == 1 and causal:
@@ -1272,7 +1272,7 @@ class AttentionLayers(Module):
1272
1272
  shift_tokens = 0,
1273
1273
  sandwich_norm = False,
1274
1274
  softclamp_output = False,
1275
- softclamp_output_value = 50.,
1275
+ softclamp_output_value = 30.,
1276
1276
  resi_dual = False,
1277
1277
  resi_dual_scale = 1.,
1278
1278
  zero_init_branch_output = False,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.31.8
3
+ Version: 1.31.10
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -1,14 +1,14 @@
1
1
  x_transformers/__init__.py,sha256=8LQl-dNL6vj8VHRx5LMSOlRDTXQvYOuM21PDXz8WdiI,703
2
- x_transformers/attend.py,sha256=UWq0bElvJf-_j1N2QbJ2yg28xkWlhnOrLjMJt3If3ao,10956
2
+ x_transformers/attend.py,sha256=oAS0vSy5qH7iTCXzHKfM4k7m_fvuZIR49PStZO8OFJo,11089
3
3
  x_transformers/autoregressive_wrapper.py,sha256=uX8Mb0zLsQrZECt_9UGt35g7tC05Rk3nPqO6xp2FFCc,9619
4
4
  x_transformers/continuous.py,sha256=WO52n9lFAXv5-SGadi2cApGF8dkouN8QSTEOuC7erj8,6180
5
5
  x_transformers/dpo.py,sha256=LjvWgCkqTl-UuehrzQ8nkX5guLr4whYwsmm7SKSwdls,3450
6
6
  x_transformers/nonautoregressive_wrapper.py,sha256=ys_p8obc7lTeeodCqvkRKxOXQ1C9T3j5Jwr-JbVgnXk,10432
7
- x_transformers/x_transformers.py,sha256=B5qRTmvCwUmntxLjntSBLxhVEb32Jgrc9iKTgjb1S74,76030
7
+ x_transformers/x_transformers.py,sha256=qGZ67jBeynItbfgnKd5g2VNxUFSCpx9fy5A8zN6wMeg,76030
8
8
  x_transformers/xl_autoregressive_wrapper.py,sha256=DCx4n0_c1tFai4nOqaWVnqx2p9eutsZsDMiMP1ckxNU,4117
9
9
  x_transformers/xval.py,sha256=QE1ltYZTR_eGgIHPP2BrMWVWVLqMW-OpDZh87BSmQEg,8563
10
- x_transformers-1.31.8.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
11
- x_transformers-1.31.8.dist-info/METADATA,sha256=lOvj-GVFiiUTz4CMxWuUPo6Cw0Sf_DFASNnmFdF8BIQ,661
12
- x_transformers-1.31.8.dist-info/WHEEL,sha256=y4mX-SOX4fYIkonsAGA5N0Oy-8_gI4FXw5HNI1xqvWg,91
13
- x_transformers-1.31.8.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
14
- x_transformers-1.31.8.dist-info/RECORD,,
10
+ x_transformers-1.31.10.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
11
+ x_transformers-1.31.10.dist-info/METADATA,sha256=U15CT3ilR-FAFQHrT5Gc92JWSZxgXIlGO1SLQFBdTAY,662
12
+ x_transformers-1.31.10.dist-info/WHEEL,sha256=y4mX-SOX4fYIkonsAGA5N0Oy-8_gI4FXw5HNI1xqvWg,91
13
+ x_transformers-1.31.10.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
14
+ x_transformers-1.31.10.dist-info/RECORD,,