x-transformers 1.42.10__tar.gz → 1.42.12__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {x_transformers-1.42.10/x_transformers.egg-info → x_transformers-1.42.12}/PKG-INFO +2 -1
- {x_transformers-1.42.10 → x_transformers-1.42.12}/setup.py +3 -2
- {x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers/continuous.py +20 -3
- {x_transformers-1.42.10 → x_transformers-1.42.12/x_transformers.egg-info}/PKG-INFO +2 -1
- {x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers.egg-info/requires.txt +1 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/LICENSE +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/README.md +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/setup.cfg +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/tests/test_x_transformers.py +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers/__init__.py +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers/attend.py +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers/autoregressive_wrapper.py +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers/dpo.py +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers/multi_input.py +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers/neo_mlp.py +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers/nonautoregressive_wrapper.py +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers/x_transformers.py +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers/xl_autoregressive_wrapper.py +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers/xval.py +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers.egg-info/SOURCES.txt +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers.egg-info/dependency_links.txt +0 -0
- {x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: x-transformers
|
3
|
-
Version: 1.42.
|
3
|
+
Version: 1.42.12
|
4
4
|
Summary: X-Transformers - Pytorch
|
5
5
|
Home-page: https://github.com/lucidrains/x-transformers
|
6
6
|
Author: Phil Wang
|
@@ -17,3 +17,4 @@ License-File: LICENSE
|
|
17
17
|
Requires-Dist: torch>=2.0
|
18
18
|
Requires-Dist: einx>=0.3.0
|
19
19
|
Requires-Dist: einops>=0.8.0
|
20
|
+
Requires-Dist: packaging>=21.0
|
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
|
|
3
3
|
setup(
|
4
4
|
name = 'x-transformers',
|
5
5
|
packages = find_packages(exclude=['examples']),
|
6
|
-
version = '1.42.
|
6
|
+
version = '1.42.12',
|
7
7
|
license='MIT',
|
8
8
|
description = 'X-Transformers - Pytorch',
|
9
9
|
author = 'Phil Wang',
|
@@ -18,7 +18,8 @@ setup(
|
|
18
18
|
install_requires=[
|
19
19
|
'torch>=2.0',
|
20
20
|
'einx>=0.3.0',
|
21
|
-
'einops>=0.8.0'
|
21
|
+
'einops>=0.8.0',
|
22
|
+
'packaging>=21.0',
|
22
23
|
],
|
23
24
|
setup_requires=[
|
24
25
|
'pytest-runner',
|
@@ -2,7 +2,8 @@ import torch
|
|
2
2
|
from torch import nn
|
3
3
|
import torch.nn.functional as F
|
4
4
|
|
5
|
-
|
5
|
+
import einx
|
6
|
+
from einops import reduce, pack, repeat, unpack
|
6
7
|
|
7
8
|
from x_transformers.x_transformers import (
|
8
9
|
AttentionLayers,
|
@@ -24,6 +25,15 @@ def default(val, d):
|
|
24
25
|
return val
|
25
26
|
return d() if callable(d) else d
|
26
27
|
|
28
|
+
def masked_mean(t, mask):
|
29
|
+
t = einx.where('b n, b n d, -> b n d', mask, t, 0.)
|
30
|
+
|
31
|
+
num = reduce(t, 'b n d -> b', 'sum')
|
32
|
+
den = mask.sum(dim = -1)
|
33
|
+
|
34
|
+
masked_average = num / den.clamp(min = 1.)
|
35
|
+
return masked_average
|
36
|
+
|
27
37
|
# main classes
|
28
38
|
|
29
39
|
class ContinuousTransformerWrapper(nn.Module):
|
@@ -169,12 +179,15 @@ class ContinuousAutoregressiveWrapper(nn.Module):
|
|
169
179
|
net: ContinuousTransformerWrapper,
|
170
180
|
ignore_index = -100,
|
171
181
|
pad_value = 0,
|
172
|
-
loss_fn = nn.MSELoss(reduction = 'none')
|
182
|
+
loss_fn = nn.MSELoss(reduction = 'none'),
|
183
|
+
equal_loss_weight_batch = False # setting this to True, if the mask is passed in and sequences are variable in length, each sequence will be weighted the same (as opposed to each token)
|
173
184
|
):
|
174
185
|
super().__init__()
|
175
186
|
self.net = net
|
176
187
|
self.max_seq_len = net.max_seq_len
|
188
|
+
|
177
189
|
self.loss_fn = loss_fn
|
190
|
+
self.equal_loss_weight_batch = equal_loss_weight_batch
|
178
191
|
|
179
192
|
@torch.no_grad()
|
180
193
|
def generate(self, start_tokens, seq_len, **kwargs):
|
@@ -222,6 +235,10 @@ class ContinuousAutoregressiveWrapper(nn.Module):
|
|
222
235
|
|
223
236
|
if exists(mask):
|
224
237
|
assert loss.ndim > 1, 'loss should not be reduced if mask is passed in'
|
225
|
-
|
238
|
+
|
239
|
+
if self.equal_loss_weight_batch:
|
240
|
+
loss = masked_mean(loss, mask)
|
241
|
+
else:
|
242
|
+
loss = loss[mask]
|
226
243
|
|
227
244
|
return loss.mean()
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: x-transformers
|
3
|
-
Version: 1.42.
|
3
|
+
Version: 1.42.12
|
4
4
|
Summary: X-Transformers - Pytorch
|
5
5
|
Home-page: https://github.com/lucidrains/x-transformers
|
6
6
|
Author: Phil Wang
|
@@ -17,3 +17,4 @@ License-File: LICENSE
|
|
17
17
|
Requires-Dist: torch>=2.0
|
18
18
|
Requires-Dist: einx>=0.3.0
|
19
19
|
Requires-Dist: einops>=0.8.0
|
20
|
+
Requires-Dist: packaging>=21.0
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers/nonautoregressive_wrapper.py
RENAMED
File without changes
|
File without changes
|
{x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers/xl_autoregressive_wrapper.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{x_transformers-1.42.10 → x_transformers-1.42.12}/x_transformers.egg-info/dependency_links.txt
RENAMED
File without changes
|
File without changes
|