x-transformers 2.10.0__py3-none-any.whl → 2.10.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of x-transformers might be problematic. Click here for more details.
- x_transformers/attend.py +6 -3
- {x_transformers-2.10.0.dist-info → x_transformers-2.10.2.dist-info}/METADATA +8 -7
- {x_transformers-2.10.0.dist-info → x_transformers-2.10.2.dist-info}/RECORD +5 -5
- {x_transformers-2.10.0.dist-info → x_transformers-2.10.2.dist-info}/WHEEL +0 -0
- {x_transformers-2.10.0.dist-info → x_transformers-2.10.2.dist-info}/licenses/LICENSE +0 -0
x_transformers/attend.py
CHANGED
|
@@ -520,6 +520,7 @@ class Attend(Module):
|
|
|
520
520
|
|
|
521
521
|
if self.cog_signed:
|
|
522
522
|
sim_sign = sim.sign()
|
|
523
|
+
sim = sim.abs()
|
|
523
524
|
|
|
524
525
|
# masking
|
|
525
526
|
|
|
@@ -548,13 +549,15 @@ class Attend(Module):
|
|
|
548
549
|
if self.head_learned_sink:
|
|
549
550
|
# add learned attention sink
|
|
550
551
|
attn_sink = repeat(self.head_attn_sink, 'h -> b h i 1', b = sim.shape[0], i = sim.shape[2])
|
|
552
|
+
|
|
553
|
+
if self.cog_signed:
|
|
554
|
+
attn_sink, attn_sink_sign = attn_sink.abs(), attn_sink.sign()
|
|
555
|
+
sim_sign = cat((attn_sink_sign, sim_sign), dim = -1)
|
|
556
|
+
|
|
551
557
|
sim = cat((attn_sink, sim), dim = -1)
|
|
552
558
|
|
|
553
559
|
pre_softmax_attn = sim
|
|
554
560
|
|
|
555
|
-
if self.cog_signed:
|
|
556
|
-
sim = sim.abs()
|
|
557
|
-
|
|
558
561
|
attn = self.attn_fn(sim)
|
|
559
562
|
|
|
560
563
|
attn = attn.type(dtype)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: x-transformers
|
|
3
|
-
Version: 2.10.
|
|
3
|
+
Version: 2.10.2
|
|
4
4
|
Summary: X-Transformers
|
|
5
5
|
Project-URL: Homepage, https://pypi.org/project/x-transformers/
|
|
6
6
|
Project-URL: Repository, https://github.com/lucidrains/x-transformers
|
|
@@ -2587,13 +2587,14 @@ ids_out, num_out, is_number_mask = model.generate(start_ids, start_nums, 17)
|
|
|
2587
2587
|
```
|
|
2588
2588
|
|
|
2589
2589
|
```bibtex
|
|
2590
|
-
@
|
|
2591
|
-
title = {More Expressive Attention with Negative Weights},
|
|
2592
|
-
author = {
|
|
2593
|
-
booktitle = {Submitted to The Fourteenth International Conference on Learning Representations},
|
|
2590
|
+
@misc{lv2025expressiveattentionnegativeweights,
|
|
2591
|
+
title = {More Expressive Attention with Negative Weights},
|
|
2592
|
+
author = {Ang Lv and Ruobing Xie and Shuaipeng Li and Jiayi Liao and Xingwu Sun and Zhanhui Kang and Di Wang and Rui Yan},
|
|
2594
2593
|
year = {2025},
|
|
2595
|
-
|
|
2596
|
-
|
|
2594
|
+
eprint = {2411.07176},
|
|
2595
|
+
archivePrefix = {arXiv},
|
|
2596
|
+
primaryClass = {cs.CL},
|
|
2597
|
+
url = {https://arxiv.org/abs/2411.07176},
|
|
2597
2598
|
}
|
|
2598
2599
|
```
|
|
2599
2600
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
x_transformers/__init__.py,sha256=aVuhUU0572TJHW88BVc4yA2tla0Zb8l3NH7W4RZ1AEs,1005
|
|
2
|
-
x_transformers/attend.py,sha256=
|
|
2
|
+
x_transformers/attend.py,sha256=vrFPCfr3WwsyMZJxn1Pox_8VHZVLVSMuXThW3eZmd5Q,19388
|
|
3
3
|
x_transformers/autoregressive_wrapper.py,sha256=BsGO9xfVYkvynqbU1__tu_S_cxl7gss0YwnkhIa2baY,18401
|
|
4
4
|
x_transformers/belief_state_wrapper.py,sha256=YLUMk6t2MhFBEw5lHDDHJHcoCxTIkHvxTNY__GGZEKU,13374
|
|
5
5
|
x_transformers/continuous.py,sha256=WwpQCjyVY4PtuEAOFY68zqgklbF9I7AL5w6874YlDe8,13249
|
|
@@ -13,7 +13,7 @@ x_transformers/up_wrapper.py,sha256=YC2LN14_7Xx9Wtiek2rtEJ_qHqdfSmKlh3d7Cgxwd80,
|
|
|
13
13
|
x_transformers/x_transformers.py,sha256=ADr83Fz2cehj_F7N1bMwxhAg-r48fGhlaZqw3hxoxMQ,125765
|
|
14
14
|
x_transformers/xl_autoregressive_wrapper.py,sha256=CvZMJ6A6PA-Y_bQAhnORwjJBSl6Vjq2IdW5KTdk8NI8,4195
|
|
15
15
|
x_transformers/xval.py,sha256=AwwYUm8yDAtKQyKJDIhYMsiLTJ_skh3scUFMjp5sda8,8597
|
|
16
|
-
x_transformers-2.10.
|
|
17
|
-
x_transformers-2.10.
|
|
18
|
-
x_transformers-2.10.
|
|
19
|
-
x_transformers-2.10.
|
|
16
|
+
x_transformers-2.10.2.dist-info/METADATA,sha256=VS0evtudpPIsBv4vWQFSn34IHkOEXoyJ7g797HqJd_A,95799
|
|
17
|
+
x_transformers-2.10.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
18
|
+
x_transformers-2.10.2.dist-info/licenses/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
|
|
19
|
+
x_transformers-2.10.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|