x-transformers 1.32.0__py3-none-any.whl → 1.32.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
x_transformers/attend.py CHANGED
@@ -206,10 +206,6 @@ class Attend(Module):
206
206
  if exists(mask):
207
207
  row_is_entirely_masked = ~mask.any(dim = -1)
208
208
 
209
- if row_is_entirely_masked.any():
210
- mask = mask.clone()
211
- mask[..., 0] = mask[..., 0] | row_is_entirely_masked
212
-
213
209
  # handle alibi positional bias
214
210
  # convert from bool to float
215
211
 
@@ -245,7 +241,7 @@ class Attend(Module):
245
241
 
246
242
  # for a row that is entirely masked out, should zero out the output of that row token
247
243
 
248
- if exists(row_is_entirely_masked):
244
+ if exists(row_is_entirely_masked) and row_is_entirely_masked.any():
249
245
  out = out.masked_fill(row_is_entirely_masked[..., None], 0.)
250
246
 
251
247
  return out, Intermediates()
@@ -342,10 +338,6 @@ class Attend(Module):
342
338
  if exists(mask):
343
339
  row_is_entirely_masked = ~mask.any(dim = -1)
344
340
 
345
- if row_is_entirely_masked.any():
346
- mask = mask.clone()
347
- mask[..., 0] = mask[..., 0] | row_is_entirely_masked
348
-
349
341
  if exists(self.cope):
350
342
  sim = sim + self.cope(q, sim)
351
343
 
@@ -369,7 +361,7 @@ class Attend(Module):
369
361
  post_softmax_attn = post_softmax_attn
370
362
  )
371
363
 
372
- if exists(row_is_entirely_masked):
364
+ if exists(row_is_entirely_masked) and row_is_entirely_masked.any():
373
365
  out = out.masked_fill(row_is_entirely_masked[..., None], 0.)
374
366
 
375
367
  return out, intermediates
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.32.0
3
+ Version: 1.32.2
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -1,5 +1,5 @@
1
1
  x_transformers/__init__.py,sha256=5ms39Df8osTUHQ-XTCgP4vSUA4UiNpim9VXJtrLrIvQ,724
2
- x_transformers/attend.py,sha256=2nN708coYLzvTy937KCKR1iI_uhgfmTnY9GWGsSXjHw,11587
2
+ x_transformers/attend.py,sha256=MI-m91wumBFqFqr_KK9MLgsLk_vPeaVbFMyDr_mWdmY,11349
3
3
  x_transformers/autoregressive_wrapper.py,sha256=uX8Mb0zLsQrZECt_9UGt35g7tC05Rk3nPqO6xp2FFCc,9619
4
4
  x_transformers/continuous.py,sha256=WO52n9lFAXv5-SGadi2cApGF8dkouN8QSTEOuC7erj8,6180
5
5
  x_transformers/dpo.py,sha256=LjvWgCkqTl-UuehrzQ8nkX5guLr4whYwsmm7SKSwdls,3450
@@ -7,8 +7,8 @@ x_transformers/nonautoregressive_wrapper.py,sha256=ys_p8obc7lTeeodCqvkRKxOXQ1C9T
7
7
  x_transformers/x_transformers.py,sha256=1QG7zUe89h1R5VDMoKEAkvdRRDkzQ7h6npkqblxxR6g,76312
8
8
  x_transformers/xl_autoregressive_wrapper.py,sha256=DCx4n0_c1tFai4nOqaWVnqx2p9eutsZsDMiMP1ckxNU,4117
9
9
  x_transformers/xval.py,sha256=QE1ltYZTR_eGgIHPP2BrMWVWVLqMW-OpDZh87BSmQEg,8563
10
- x_transformers-1.32.0.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
11
- x_transformers-1.32.0.dist-info/METADATA,sha256=3GkF9cqLxmReELQRflSZpqSXP9tt10A23eiR6wRGzIs,661
12
- x_transformers-1.32.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
13
- x_transformers-1.32.0.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
14
- x_transformers-1.32.0.dist-info/RECORD,,
10
+ x_transformers-1.32.2.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
11
+ x_transformers-1.32.2.dist-info/METADATA,sha256=U0Kh4e7UiL-0hLDZb0P3McdvTnzTeFyVwtoXFffzQ-M,661
12
+ x_transformers-1.32.2.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
13
+ x_transformers-1.32.2.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
14
+ x_transformers-1.32.2.dist-info/RECORD,,