x-transformers 1.29.0__tar.gz → 1.29.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (19) hide show
  1. {x_transformers-1.29.0/x_transformers.egg-info → x_transformers-1.29.2}/PKG-INFO +1 -1
  2. {x_transformers-1.29.0 → x_transformers-1.29.2}/README.md +1 -6
  3. {x_transformers-1.29.0 → x_transformers-1.29.2}/setup.py +1 -1
  4. {x_transformers-1.29.0 → x_transformers-1.29.2}/x_transformers/x_transformers.py +5 -1
  5. {x_transformers-1.29.0 → x_transformers-1.29.2/x_transformers.egg-info}/PKG-INFO +1 -1
  6. {x_transformers-1.29.0 → x_transformers-1.29.2}/LICENSE +0 -0
  7. {x_transformers-1.29.0 → x_transformers-1.29.2}/setup.cfg +0 -0
  8. {x_transformers-1.29.0 → x_transformers-1.29.2}/x_transformers/__init__.py +0 -0
  9. {x_transformers-1.29.0 → x_transformers-1.29.2}/x_transformers/attend.py +0 -0
  10. {x_transformers-1.29.0 → x_transformers-1.29.2}/x_transformers/autoregressive_wrapper.py +0 -0
  11. {x_transformers-1.29.0 → x_transformers-1.29.2}/x_transformers/continuous.py +0 -0
  12. {x_transformers-1.29.0 → x_transformers-1.29.2}/x_transformers/dpo.py +0 -0
  13. {x_transformers-1.29.0 → x_transformers-1.29.2}/x_transformers/nonautoregressive_wrapper.py +0 -0
  14. {x_transformers-1.29.0 → x_transformers-1.29.2}/x_transformers/xl_autoregressive_wrapper.py +0 -0
  15. {x_transformers-1.29.0 → x_transformers-1.29.2}/x_transformers/xval.py +0 -0
  16. {x_transformers-1.29.0 → x_transformers-1.29.2}/x_transformers.egg-info/SOURCES.txt +0 -0
  17. {x_transformers-1.29.0 → x_transformers-1.29.2}/x_transformers.egg-info/dependency_links.txt +0 -0
  18. {x_transformers-1.29.0 → x_transformers-1.29.2}/x_transformers.egg-info/requires.txt +0 -0
  19. {x_transformers-1.29.0 → x_transformers-1.29.2}/x_transformers.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.29.0
3
+ Version: 1.29.2
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -693,7 +693,7 @@ model = TransformerWrapper(
693
693
  )
694
694
  ```
695
695
 
696
- If you wish to do something more sophisticated, say 3 layers, with each layer recurrent 4 times before onto the next, that is possible as well.
696
+ If you wish to do something more sophisticated, say 3 layers, with each layer recurrent 4 times before onto the next, that is possible as well. Be aware the `layers_execute_order` is 0-indexed
697
697
 
698
698
  ```python
699
699
  import torch
@@ -716,11 +716,6 @@ model = TransformerWrapper(
716
716
  )
717
717
  )
718
718
  )
719
-
720
- x = torch.randint(0, 256, (1, 1024))
721
-
722
- model(x) # (1, 1024, 20000)
723
-
724
719
  ```
725
720
 
726
721
  ### Understanding and Improving Transformer From a Multi-Particle Dynamic System Point of View
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
  setup(
4
4
  name = 'x-transformers',
5
5
  packages = find_packages(exclude=['examples']),
6
- version = '1.29.0',
6
+ version = '1.29.2',
7
7
  license='MIT',
8
8
  description = 'X-Transformers - Pytorch',
9
9
  author = 'Phil Wang',
@@ -1059,7 +1059,6 @@ class AttentionLayers(Module):
1059
1059
  dim_head = attn_kwargs.get('dim_head', DEFAULT_DIM_HEAD)
1060
1060
 
1061
1061
  self.dim = dim
1062
- self.depth = depth
1063
1062
  self.causal = causal
1064
1063
  self.layers = ModuleList([])
1065
1064
 
@@ -1179,6 +1178,11 @@ class AttentionLayers(Module):
1179
1178
 
1180
1179
  self.num_attn_layers = len(list(filter(equals('a'), layer_types)))
1181
1180
 
1181
+ # set the depth
1182
+
1183
+ depth = default(depth, len(self.layers_execute_order))
1184
+ self.depth = depth
1185
+
1182
1186
  # stochastic depth
1183
1187
 
1184
1188
  self.layer_dropouts = cast_tuple(layer_dropout, len(layer_types))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.29.0
3
+ Version: 1.29.2
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
File without changes