x-transformers 1.42.17__py3-none-any.whl → 1.42.18__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- x_transformers/x_transformers.py +8 -1
- {x_transformers-1.42.17.dist-info → x_transformers-1.42.18.dist-info}/METADATA +3 -2
- {x_transformers-1.42.17.dist-info → x_transformers-1.42.18.dist-info}/RECORD +6 -6
- {x_transformers-1.42.17.dist-info → x_transformers-1.42.18.dist-info}/LICENSE +0 -0
- {x_transformers-1.42.17.dist-info → x_transformers-1.42.18.dist-info}/WHEEL +0 -0
- {x_transformers-1.42.17.dist-info → x_transformers-1.42.18.dist-info}/top_level.txt +0 -0
x_transformers/x_transformers.py
CHANGED
@@ -20,6 +20,8 @@ import einx
|
|
20
20
|
from einops.layers.torch import Rearrange
|
21
21
|
from einops import rearrange, repeat, reduce, pack, unpack
|
22
22
|
|
23
|
+
from loguru import logger
|
24
|
+
|
23
25
|
from x_transformers.attend import Attend, Intermediates
|
24
26
|
from x_transformers.autoregressive_wrapper import AutoregressiveWrapper
|
25
27
|
|
@@ -1580,7 +1582,12 @@ class AttentionLayers(Module):
|
|
1580
1582
|
|
1581
1583
|
self.disable_abs_pos_emb = default(disable_abs_pos_emb, (rel_pos_bias or rotary_pos_emb))
|
1582
1584
|
|
1583
|
-
rotary_emb_dim =
|
1585
|
+
rotary_emb_dim = default(rotary_emb_dim, dim_head // 2)
|
1586
|
+
|
1587
|
+
assert rotary_emb_dim <= dim_head, f'rotary emb dim {rotary_emb_dim} must be less than or equal to attention head dimension {dim_head}'
|
1588
|
+
|
1589
|
+
if rotary_emb_dim < 32:
|
1590
|
+
logger.warning('when training language model, rotary embedding dimension should be at least 32')
|
1584
1591
|
|
1585
1592
|
assert not (rotary_xpos and not causal), 'rotary xpos is not compatible with bidirectional attention'
|
1586
1593
|
self.rotary_pos_emb = RotaryEmbedding(rotary_emb_dim, use_xpos = rotary_xpos, scale_base = rotary_xpos_scale_base, interpolation_factor = rotary_interpolation_factor, base_rescale_factor = rotary_base_rescale_factor) if rotary_pos_emb else None
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: x-transformers
|
3
|
-
Version: 1.42.
|
3
|
+
Version: 1.42.18
|
4
4
|
Summary: X-Transformers - Pytorch
|
5
5
|
Home-page: https://github.com/lucidrains/x-transformers
|
6
6
|
Author: Phil Wang
|
@@ -14,7 +14,8 @@ Classifier: License :: OSI Approved :: MIT License
|
|
14
14
|
Classifier: Programming Language :: Python :: 3.6
|
15
15
|
Description-Content-Type: text/markdown
|
16
16
|
License-File: LICENSE
|
17
|
-
Requires-Dist: torch>=2.0
|
18
17
|
Requires-Dist: einx>=0.3.0
|
19
18
|
Requires-Dist: einops>=0.8.0
|
19
|
+
Requires-Dist: loguru
|
20
20
|
Requires-Dist: packaging>=21.0
|
21
|
+
Requires-Dist: torch>=2.0
|
@@ -6,11 +6,11 @@ x_transformers/dpo.py,sha256=xt4OuOWhU8pN3OKN2LZAaC2NC8iiEnchqqcrPWVqf0o,3521
|
|
6
6
|
x_transformers/multi_input.py,sha256=tCh-fTJDj2ib4SMGtsa-AM8MxKzJAQSwqAXOu3HU2mg,9252
|
7
7
|
x_transformers/neo_mlp.py,sha256=XCNnnop9WLarcxap1kGuYc1x8GHvwkZiDRnXOxSl3Po,3452
|
8
8
|
x_transformers/nonautoregressive_wrapper.py,sha256=2NU58hYMgn-4Jzg3mie-mXb0XH_dCN7fjlzd3K1rLUY,10510
|
9
|
-
x_transformers/x_transformers.py,sha256=
|
9
|
+
x_transformers/x_transformers.py,sha256=pFVTmAoAbrir7YjTwzC3X2buRSm7PFnWqYyTYePA8Es,95486
|
10
10
|
x_transformers/xl_autoregressive_wrapper.py,sha256=CvZMJ6A6PA-Y_bQAhnORwjJBSl6Vjq2IdW5KTdk8NI8,4195
|
11
11
|
x_transformers/xval.py,sha256=7S00kCuab4tWQa-vf-z-XfzADjVj48MoFIr7VSIvttg,8575
|
12
|
-
x_transformers-1.42.
|
13
|
-
x_transformers-1.42.
|
14
|
-
x_transformers-1.42.
|
15
|
-
x_transformers-1.42.
|
16
|
-
x_transformers-1.42.
|
12
|
+
x_transformers-1.42.18.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
|
13
|
+
x_transformers-1.42.18.dist-info/METADATA,sha256=v9YlgCULHqvWhTC3bViadNngzfiyYkzrQa6XRZ0uDa4,739
|
14
|
+
x_transformers-1.42.18.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
15
|
+
x_transformers-1.42.18.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
|
16
|
+
x_transformers-1.42.18.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|