x-transformers 1.42.0__tar.gz → 1.42.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (22) hide show
  1. {x_transformers-1.42.0/x_transformers.egg-info → x_transformers-1.42.2}/PKG-INFO +1 -1
  2. {x_transformers-1.42.0 → x_transformers-1.42.2}/setup.py +1 -1
  3. {x_transformers-1.42.0 → x_transformers-1.42.2}/x_transformers/neo_mlp.py +9 -0
  4. {x_transformers-1.42.0 → x_transformers-1.42.2/x_transformers.egg-info}/PKG-INFO +1 -1
  5. {x_transformers-1.42.0 → x_transformers-1.42.2}/LICENSE +0 -0
  6. {x_transformers-1.42.0 → x_transformers-1.42.2}/README.md +0 -0
  7. {x_transformers-1.42.0 → x_transformers-1.42.2}/setup.cfg +0 -0
  8. {x_transformers-1.42.0 → x_transformers-1.42.2}/tests/test_x_transformers.py +0 -0
  9. {x_transformers-1.42.0 → x_transformers-1.42.2}/x_transformers/__init__.py +0 -0
  10. {x_transformers-1.42.0 → x_transformers-1.42.2}/x_transformers/attend.py +0 -0
  11. {x_transformers-1.42.0 → x_transformers-1.42.2}/x_transformers/autoregressive_wrapper.py +0 -0
  12. {x_transformers-1.42.0 → x_transformers-1.42.2}/x_transformers/continuous.py +0 -0
  13. {x_transformers-1.42.0 → x_transformers-1.42.2}/x_transformers/dpo.py +0 -0
  14. {x_transformers-1.42.0 → x_transformers-1.42.2}/x_transformers/multi_input.py +0 -0
  15. {x_transformers-1.42.0 → x_transformers-1.42.2}/x_transformers/nonautoregressive_wrapper.py +0 -0
  16. {x_transformers-1.42.0 → x_transformers-1.42.2}/x_transformers/x_transformers.py +0 -0
  17. {x_transformers-1.42.0 → x_transformers-1.42.2}/x_transformers/xl_autoregressive_wrapper.py +0 -0
  18. {x_transformers-1.42.0 → x_transformers-1.42.2}/x_transformers/xval.py +0 -0
  19. {x_transformers-1.42.0 → x_transformers-1.42.2}/x_transformers.egg-info/SOURCES.txt +0 -0
  20. {x_transformers-1.42.0 → x_transformers-1.42.2}/x_transformers.egg-info/dependency_links.txt +0 -0
  21. {x_transformers-1.42.0 → x_transformers-1.42.2}/x_transformers.egg-info/requires.txt +0 -0
  22. {x_transformers-1.42.0 → x_transformers-1.42.2}/x_transformers.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.42.0
3
+ Version: 1.42.2
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
  setup(
4
4
  name = 'x-transformers',
5
5
  packages = find_packages(exclude=['examples']),
6
- version = '1.42.0',
6
+ version = '1.42.2',
7
7
  license='MIT',
8
8
  description = 'X-Transformers - Pytorch',
9
9
  author = 'Phil Wang',
@@ -41,6 +41,7 @@ class RandomFourierEmbed(Module):
41
41
 
42
42
  class NeoMLP(Module):
43
43
  """ https://openreview.net/forum?id=A8Vuf2e8y6 """
44
+ """ https://haian-jin.github.io/projects/LVSM/ """
44
45
 
45
46
  def __init__(
46
47
  self,
@@ -93,6 +94,11 @@ class NeoMLP(Module):
93
94
  x,
94
95
  return_embeds = False
95
96
  ):
97
+ no_batch = x.ndim == 1
98
+
99
+ if no_batch:
100
+ x = rearrange(x, '... -> 1 ...')
101
+
96
102
  batch = x.shape[0]
97
103
 
98
104
  fouriered_input = self.random_fourier(x)
@@ -120,6 +126,9 @@ class NeoMLP(Module):
120
126
  output = einsum(output_embed, self.to_output_weights, 'b n d, n d -> b n')
121
127
  output = output + self.to_output_bias
122
128
 
129
+ if no_batch:
130
+ output = rearrange(output, '1 ... -> ...')
131
+
123
132
  if not return_embeds:
124
133
  return output
125
134
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.42.0
3
+ Version: 1.42.2
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
File without changes