x-transformers 2.0.0__py3-none-any.whl → 2.0.2__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- x_transformers/x_transformers.py +43 -5
- x_transformers-2.0.2.dist-info/METADATA +2420 -0
- {x_transformers-2.0.0.dist-info → x_transformers-2.0.2.dist-info}/RECORD +5 -6
- {x_transformers-2.0.0.dist-info → x_transformers-2.0.2.dist-info}/WHEEL +1 -2
- x_transformers-2.0.0.dist-info/METADATA +0 -30
- x_transformers-2.0.0.dist-info/top_level.txt +0 -1
- {x_transformers-2.0.0.dist-info → x_transformers-2.0.2.dist-info/licenses}/LICENSE +0 -0
@@ -6,11 +6,10 @@ x_transformers/dpo.py,sha256=xt4OuOWhU8pN3OKN2LZAaC2NC8iiEnchqqcrPWVqf0o,3521
|
|
6
6
|
x_transformers/multi_input.py,sha256=tCh-fTJDj2ib4SMGtsa-AM8MxKzJAQSwqAXOu3HU2mg,9252
|
7
7
|
x_transformers/neo_mlp.py,sha256=XCNnnop9WLarcxap1kGuYc1x8GHvwkZiDRnXOxSl3Po,3452
|
8
8
|
x_transformers/nonautoregressive_wrapper.py,sha256=2NU58hYMgn-4Jzg3mie-mXb0XH_dCN7fjlzd3K1rLUY,10510
|
9
|
-
x_transformers/x_transformers.py,sha256=
|
9
|
+
x_transformers/x_transformers.py,sha256=1s8KCSfHXMN9TKLFdS-RzzCskBDkh4CuBk2_XRb6IXk,107537
|
10
10
|
x_transformers/xl_autoregressive_wrapper.py,sha256=CvZMJ6A6PA-Y_bQAhnORwjJBSl6Vjq2IdW5KTdk8NI8,4195
|
11
11
|
x_transformers/xval.py,sha256=7S00kCuab4tWQa-vf-z-XfzADjVj48MoFIr7VSIvttg,8575
|
12
|
-
x_transformers-2.0.
|
13
|
-
x_transformers-2.0.
|
14
|
-
x_transformers-2.0.
|
15
|
-
x_transformers-2.0.
|
16
|
-
x_transformers-2.0.0.dist-info/RECORD,,
|
12
|
+
x_transformers-2.0.2.dist-info/METADATA,sha256=tNdI3H2S4HnnGK1hPY3l94FoXH3SB9vGAb55pcah6Yw,86506
|
13
|
+
x_transformers-2.0.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
14
|
+
x_transformers-2.0.2.dist-info/licenses/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
|
15
|
+
x_transformers-2.0.2.dist-info/RECORD,,
|
@@ -1,30 +0,0 @@
|
|
1
|
-
Metadata-Version: 2.2
|
2
|
-
Name: x-transformers
|
3
|
-
Version: 2.0.0
|
4
|
-
Summary: X-Transformers - Pytorch
|
5
|
-
Home-page: https://github.com/lucidrains/x-transformers
|
6
|
-
Author: Phil Wang
|
7
|
-
Author-email: lucidrains@gmail.com
|
8
|
-
License: MIT
|
9
|
-
Keywords: artificial intelligence,attention mechanism,transformers
|
10
|
-
Classifier: Development Status :: 4 - Beta
|
11
|
-
Classifier: Intended Audience :: Developers
|
12
|
-
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
13
|
-
Classifier: License :: OSI Approved :: MIT License
|
14
|
-
Classifier: Programming Language :: Python :: 3.6
|
15
|
-
Description-Content-Type: text/markdown
|
16
|
-
License-File: LICENSE
|
17
|
-
Requires-Dist: einx>=0.3.0
|
18
|
-
Requires-Dist: einops>=0.8.0
|
19
|
-
Requires-Dist: loguru
|
20
|
-
Requires-Dist: packaging>=21.0
|
21
|
-
Requires-Dist: torch>=2.0
|
22
|
-
Dynamic: author
|
23
|
-
Dynamic: author-email
|
24
|
-
Dynamic: classifier
|
25
|
-
Dynamic: description-content-type
|
26
|
-
Dynamic: home-page
|
27
|
-
Dynamic: keywords
|
28
|
-
Dynamic: license
|
29
|
-
Dynamic: requires-dist
|
30
|
-
Dynamic: summary
|
@@ -1 +0,0 @@
|
|
1
|
-
x_transformers
|
File without changes
|