x-transformers 1.38.1__py3-none-any.whl → 1.38.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- x_transformers/attend.py +3 -0
- {x_transformers-1.38.1.dist-info → x_transformers-1.38.2.dist-info}/METADATA +1 -1
- {x_transformers-1.38.1.dist-info → x_transformers-1.38.2.dist-info}/RECORD +6 -6
- {x_transformers-1.38.1.dist-info → x_transformers-1.38.2.dist-info}/LICENSE +0 -0
- {x_transformers-1.38.1.dist-info → x_transformers-1.38.2.dist-info}/WHEEL +0 -0
- {x_transformers-1.38.1.dist-info → x_transformers-1.38.2.dist-info}/top_level.txt +0 -0
x_transformers/attend.py
CHANGED
@@ -185,6 +185,9 @@ class Attend(Module):
|
|
185
185
|
self.pre_softmax_talking_heads = nn.Conv2d(heads, heads, 1, bias = False)
|
186
186
|
self.post_softmax_talking_heads = nn.Conv2d(heads, heads, 1, bias = False)
|
187
187
|
|
188
|
+
nn.init.dirac_(self.pre_softmax_talking_heads.weight)
|
189
|
+
nn.init.dirac_(self.post_softmax_talking_heads.weight)
|
190
|
+
|
188
191
|
# selective attention
|
189
192
|
|
190
193
|
assert not (flash and selective), 'selective attention cannot work on flash attention'
|
@@ -1,5 +1,5 @@
|
|
1
1
|
x_transformers/__init__.py,sha256=-MkQrSc37cTVDX7AOykxunYnqVtFlQ7lb0Cse5dsGWU,793
|
2
|
-
x_transformers/attend.py,sha256=
|
2
|
+
x_transformers/attend.py,sha256=HO6HZ1fowJ6a6v915PH4s8PnfNj0_q47Sq7yc9AP5YQ,15380
|
3
3
|
x_transformers/autoregressive_wrapper.py,sha256=DOJJCMMDOqDYKWy_IaG5IyKsXD3AW6amzfUgdAADOLY,10500
|
4
4
|
x_transformers/continuous.py,sha256=cIVEdhfei258__ziV7kQBrJMxCel54bExBTDrO9rfCI,6450
|
5
5
|
x_transformers/dpo.py,sha256=LjvWgCkqTl-UuehrzQ8nkX5guLr4whYwsmm7SKSwdls,3450
|
@@ -8,8 +8,8 @@ x_transformers/nonautoregressive_wrapper.py,sha256=2NU58hYMgn-4Jzg3mie-mXb0XH_dC
|
|
8
8
|
x_transformers/x_transformers.py,sha256=Dol6GMZOoHGOFdHwe21o2SbJp6b3YKCUHoIs_AjfvTo,83963
|
9
9
|
x_transformers/xl_autoregressive_wrapper.py,sha256=CvZMJ6A6PA-Y_bQAhnORwjJBSl6Vjq2IdW5KTdk8NI8,4195
|
10
10
|
x_transformers/xval.py,sha256=QE1ltYZTR_eGgIHPP2BrMWVWVLqMW-OpDZh87BSmQEg,8563
|
11
|
-
x_transformers-1.38.
|
12
|
-
x_transformers-1.38.
|
13
|
-
x_transformers-1.38.
|
14
|
-
x_transformers-1.38.
|
15
|
-
x_transformers-1.38.
|
11
|
+
x_transformers-1.38.2.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
|
12
|
+
x_transformers-1.38.2.dist-info/METADATA,sha256=pThqFTEo8bihgUlSYdv3r-JFB153pbO2baJgXwMMZZs,661
|
13
|
+
x_transformers-1.38.2.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
14
|
+
x_transformers-1.38.2.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
|
15
|
+
x_transformers-1.38.2.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|