x-transformers 2.10.0__py3-none-any.whl → 2.10.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of x-transformers might be problematic. Click here for more details.

x_transformers/attend.py CHANGED
@@ -520,6 +520,7 @@ class Attend(Module):
520
520
 
521
521
  if self.cog_signed:
522
522
  sim_sign = sim.sign()
523
+ sim = sim.abs()
523
524
 
524
525
  # masking
525
526
 
@@ -552,9 +553,6 @@ class Attend(Module):
552
553
 
553
554
  pre_softmax_attn = sim
554
555
 
555
- if self.cog_signed:
556
- sim = sim.abs()
557
-
558
556
  attn = self.attn_fn(sim)
559
557
 
560
558
  attn = attn.type(dtype)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: x-transformers
3
- Version: 2.10.0
3
+ Version: 2.10.1
4
4
  Summary: X-Transformers
5
5
  Project-URL: Homepage, https://pypi.org/project/x-transformers/
6
6
  Project-URL: Repository, https://github.com/lucidrains/x-transformers
@@ -2587,13 +2587,14 @@ ids_out, num_out, is_number_mask = model.generate(start_ids, start_nums, 17)
2587
2587
  ```
2588
2588
 
2589
2589
  ```bibtex
2590
- @inproceedings{anonymous2025more,
2591
- title = {More Expressive Attention with Negative Weights},
2592
- author = {Anonymous},
2593
- booktitle = {Submitted to The Fourteenth International Conference on Learning Representations},
2590
+ @misc{lv2025expressiveattentionnegativeweights,
2591
+ title = {More Expressive Attention with Negative Weights},
2592
+ author = {Ang Lv and Ruobing Xie and Shuaipeng Li and Jiayi Liao and Xingwu Sun and Zhanhui Kang and Di Wang and Rui Yan},
2594
2593
  year = {2025},
2595
- url = {https://openreview.net/forum?id=ezRrwwbxd0},
2596
- note = {under review}
2594
+ eprint = {2411.07176},
2595
+ archivePrefix = {arXiv},
2596
+ primaryClass = {cs.CL},
2597
+ url = {https://arxiv.org/abs/2411.07176},
2597
2598
  }
2598
2599
  ```
2599
2600
 
@@ -1,5 +1,5 @@
1
1
  x_transformers/__init__.py,sha256=aVuhUU0572TJHW88BVc4yA2tla0Zb8l3NH7W4RZ1AEs,1005
2
- x_transformers/attend.py,sha256=l968RkOaypWMb_Ba-n82zKms4b62Ng337wtigvPAums,19236
2
+ x_transformers/attend.py,sha256=uu4lIEfiwzZLeuBY2dJLG9709DZbWK8-on4ds8SCCJ0,19207
3
3
  x_transformers/autoregressive_wrapper.py,sha256=BsGO9xfVYkvynqbU1__tu_S_cxl7gss0YwnkhIa2baY,18401
4
4
  x_transformers/belief_state_wrapper.py,sha256=YLUMk6t2MhFBEw5lHDDHJHcoCxTIkHvxTNY__GGZEKU,13374
5
5
  x_transformers/continuous.py,sha256=WwpQCjyVY4PtuEAOFY68zqgklbF9I7AL5w6874YlDe8,13249
@@ -13,7 +13,7 @@ x_transformers/up_wrapper.py,sha256=YC2LN14_7Xx9Wtiek2rtEJ_qHqdfSmKlh3d7Cgxwd80,
13
13
  x_transformers/x_transformers.py,sha256=ADr83Fz2cehj_F7N1bMwxhAg-r48fGhlaZqw3hxoxMQ,125765
14
14
  x_transformers/xl_autoregressive_wrapper.py,sha256=CvZMJ6A6PA-Y_bQAhnORwjJBSl6Vjq2IdW5KTdk8NI8,4195
15
15
  x_transformers/xval.py,sha256=AwwYUm8yDAtKQyKJDIhYMsiLTJ_skh3scUFMjp5sda8,8597
16
- x_transformers-2.10.0.dist-info/METADATA,sha256=1tiahG4NWO99cWEZ_qRgdgKHSWRIUKdf0xl2j0BfIXQ,95736
17
- x_transformers-2.10.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
18
- x_transformers-2.10.0.dist-info/licenses/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
19
- x_transformers-2.10.0.dist-info/RECORD,,
16
+ x_transformers-2.10.1.dist-info/METADATA,sha256=sEfcxJr3l0W4Yga0NLHq1sMk90Zr5-Lpr-9fIlmG9H4,95799
17
+ x_transformers-2.10.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
18
+ x_transformers-2.10.1.dist-info/licenses/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
19
+ x_transformers-2.10.1.dist-info/RECORD,,