x-transformers 2.9.1__py3-none-any.whl → 2.9.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- x_transformers/attend.py +9 -0
- {x_transformers-2.9.1.dist-info → x_transformers-2.9.2.dist-info}/METADATA +1 -1
- {x_transformers-2.9.1.dist-info → x_transformers-2.9.2.dist-info}/RECORD +5 -5
- {x_transformers-2.9.1.dist-info → x_transformers-2.9.2.dist-info}/WHEEL +0 -0
- {x_transformers-2.9.1.dist-info → x_transformers-2.9.2.dist-info}/licenses/LICENSE +0 -0
x_transformers/attend.py
CHANGED
@@ -67,6 +67,15 @@ def once(fn):
|
|
67
67
|
|
68
68
|
print_once = once(print)
|
69
69
|
|
70
|
+
# gumbel softmax attention related
|
71
|
+
|
72
|
+
def log_prob_from_hard_attend(intermeds: Intermediates):
|
73
|
+
log_probs = intermeds.pre_softmax_attn.log_softmax(dim = -1)
|
74
|
+
|
75
|
+
one_hot = intermeds.post_softmax_attn.argmax(dim = -1, keepdim = True)
|
76
|
+
log_prob = log_probs.gather(-1, one_hot)
|
77
|
+
return rearrange(log_prob, 'b h i 1 -> b h i')
|
78
|
+
|
70
79
|
# selective attention
|
71
80
|
# https://arxiv.org/abs/2410.02703 - section 3.3
|
72
81
|
# it is a technique to allow each token to prevent itself from being attended to by future tokens
|
@@ -1,5 +1,5 @@
|
|
1
1
|
x_transformers/__init__.py,sha256=aVuhUU0572TJHW88BVc4yA2tla0Zb8l3NH7W4RZ1AEs,1005
|
2
|
-
x_transformers/attend.py,sha256=
|
2
|
+
x_transformers/attend.py,sha256=RZJT9pPlpqSG3nOUqQHNRR6jOeJ2r-Fvvar2wdu9HLw,18687
|
3
3
|
x_transformers/autoregressive_wrapper.py,sha256=BsGO9xfVYkvynqbU1__tu_S_cxl7gss0YwnkhIa2baY,18401
|
4
4
|
x_transformers/belief_state_wrapper.py,sha256=YLUMk6t2MhFBEw5lHDDHJHcoCxTIkHvxTNY__GGZEKU,13374
|
5
5
|
x_transformers/continuous.py,sha256=WwpQCjyVY4PtuEAOFY68zqgklbF9I7AL5w6874YlDe8,13249
|
@@ -13,7 +13,7 @@ x_transformers/up_wrapper.py,sha256=YC2LN14_7Xx9Wtiek2rtEJ_qHqdfSmKlh3d7Cgxwd80,
|
|
13
13
|
x_transformers/x_transformers.py,sha256=o6B10urcC7MRUrmoHOgYJgkrVDzHhX-jt6zZY3pZEgA,125700
|
14
14
|
x_transformers/xl_autoregressive_wrapper.py,sha256=CvZMJ6A6PA-Y_bQAhnORwjJBSl6Vjq2IdW5KTdk8NI8,4195
|
15
15
|
x_transformers/xval.py,sha256=AwwYUm8yDAtKQyKJDIhYMsiLTJ_skh3scUFMjp5sda8,8597
|
16
|
-
x_transformers-2.9.
|
17
|
-
x_transformers-2.9.
|
18
|
-
x_transformers-2.9.
|
19
|
-
x_transformers-2.9.
|
16
|
+
x_transformers-2.9.2.dist-info/METADATA,sha256=3JsbSIp9fsGpuXopeIaIq4ffjYTJIHyqdRLxM21cfUM,95381
|
17
|
+
x_transformers-2.9.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
18
|
+
x_transformers-2.9.2.dist-info/licenses/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
|
19
|
+
x_transformers-2.9.2.dist-info/RECORD,,
|
File without changes
|
File without changes
|