x-transformers 1.27.20__tar.gz → 1.27.21__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (19) hide show
  1. {x_transformers-1.27.20/x_transformers.egg-info → x_transformers-1.27.21}/PKG-INFO +1 -1
  2. {x_transformers-1.27.20 → x_transformers-1.27.21}/README.md +1 -2
  3. {x_transformers-1.27.20 → x_transformers-1.27.21}/setup.py +1 -1
  4. {x_transformers-1.27.20 → x_transformers-1.27.21}/x_transformers/xval.py +9 -0
  5. {x_transformers-1.27.20 → x_transformers-1.27.21/x_transformers.egg-info}/PKG-INFO +1 -1
  6. {x_transformers-1.27.20 → x_transformers-1.27.21}/LICENSE +0 -0
  7. {x_transformers-1.27.20 → x_transformers-1.27.21}/setup.cfg +0 -0
  8. {x_transformers-1.27.20 → x_transformers-1.27.21}/x_transformers/__init__.py +0 -0
  9. {x_transformers-1.27.20 → x_transformers-1.27.21}/x_transformers/attend.py +0 -0
  10. {x_transformers-1.27.20 → x_transformers-1.27.21}/x_transformers/autoregressive_wrapper.py +0 -0
  11. {x_transformers-1.27.20 → x_transformers-1.27.21}/x_transformers/continuous.py +0 -0
  12. {x_transformers-1.27.20 → x_transformers-1.27.21}/x_transformers/dpo.py +0 -0
  13. {x_transformers-1.27.20 → x_transformers-1.27.21}/x_transformers/nonautoregressive_wrapper.py +0 -0
  14. {x_transformers-1.27.20 → x_transformers-1.27.21}/x_transformers/x_transformers.py +0 -0
  15. {x_transformers-1.27.20 → x_transformers-1.27.21}/x_transformers/xl_autoregressive_wrapper.py +0 -0
  16. {x_transformers-1.27.20 → x_transformers-1.27.21}/x_transformers.egg-info/SOURCES.txt +0 -0
  17. {x_transformers-1.27.20 → x_transformers-1.27.21}/x_transformers.egg-info/dependency_links.txt +0 -0
  18. {x_transformers-1.27.20 → x_transformers-1.27.21}/x_transformers.egg-info/requires.txt +0 -0
  19. {x_transformers-1.27.20 → x_transformers-1.27.21}/x_transformers.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.27.20
3
+ Version: 1.27.21
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -1437,11 +1437,10 @@ model = XValAutoregressiveWrapper(model)
1437
1437
 
1438
1438
  ids = torch.randint(0, 4, (1, 777))
1439
1439
  nums = torch.randn(1, 777)
1440
- mask = torch.ones(1, 777).bool()
1441
1440
 
1442
1441
  # train on a lot of data above
1443
1442
 
1444
- loss = model(ids, nums, mask = mask)
1443
+ loss = model(ids, nums)
1445
1444
  loss.backward()
1446
1445
 
1447
1446
  # then generate
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
  setup(
4
4
  name = 'x-transformers',
5
5
  packages = find_packages(exclude=['examples']),
6
- version = '1.27.20',
6
+ version = '1.27.21',
7
7
  license='MIT',
8
8
  description = 'X-Transformers - Pytorch',
9
9
  author = 'Phil Wang',
@@ -271,8 +271,17 @@ class XValAutoregressiveWrapper(nn.Module):
271
271
 
272
272
  cross_entropy_loss = F.cross_entropy(logits, target, reduction = 'none', ignore_index = self.ignore_index)
273
273
 
274
+ # protect against nan in `x_num` input tensor
275
+
276
+ target_is_number_mask = target == self.net.numerical_token_id
277
+ x_num_target = x_num_target.masked_fill(~target_is_number_mask, 0.)
278
+
279
+ # ignore index
280
+
274
281
  target_mask = target != self.ignore_index
275
282
 
283
+ # numerical mse loss
284
+
276
285
  numerical_mse_loss = F.mse_loss(numerical_pred, x_num_target, reduction = 'none')
277
286
 
278
287
  numerical_mse_loss = numerical_mse_loss * target_mask
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.27.20
3
+ Version: 1.27.21
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang