x-transformers 2.3.11__py3-none-any.whl → 2.3.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1981,6 +1981,7 @@ class AttentionLayers(Module):
1981
1981
  learned_value_residual_mix = True, # seeing big improvements when the value residual mix value is learned per token - credit goes to @faresobeid for taking the first step with learned scalar mix, then @Blinkdl for taking it a step further with data dependent. here we will use per token learned
1982
1982
  rel_pos_kwargs: dict = dict(),
1983
1983
  residual_fn_kwargs: dict = dict(),
1984
+ verbose = True,
1984
1985
  **kwargs
1985
1986
  ):
1986
1987
  super().__init__()
@@ -2032,7 +2033,7 @@ class AttentionLayers(Module):
2032
2033
 
2033
2034
  assert rotary_emb_dim <= dim_head, f'rotary emb dim {rotary_emb_dim} must be less than or equal to attention head dimension {dim_head}'
2034
2035
 
2035
- if rotary_emb_dim < 32:
2036
+ if verbose and rotary_emb_dim < 32:
2036
2037
  logger.warning('when training language model, rotary embedding dimension should be at least 32')
2037
2038
 
2038
2039
  assert not (rotary_xpos and not causal), 'rotary xpos is not compatible with bidirectional attention'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: x-transformers
3
- Version: 2.3.11
3
+ Version: 2.3.12
4
4
  Summary: X-Transformers
5
5
  Project-URL: Homepage, https://pypi.org/project/x-transformers/
6
6
  Project-URL: Repository, https://github.com/lucidrains/x-transformers
@@ -8,10 +8,10 @@ x_transformers/entropy_based_tokenizer.py,sha256=F2lO8-v3aLIcVDVNhu7RR-UtRdlmaaY
8
8
  x_transformers/multi_input.py,sha256=tCh-fTJDj2ib4SMGtsa-AM8MxKzJAQSwqAXOu3HU2mg,9252
9
9
  x_transformers/neo_mlp.py,sha256=XCNnnop9WLarcxap1kGuYc1x8GHvwkZiDRnXOxSl3Po,3452
10
10
  x_transformers/nonautoregressive_wrapper.py,sha256=2NU58hYMgn-4Jzg3mie-mXb0XH_dCN7fjlzd3K1rLUY,10510
11
- x_transformers/x_transformers.py,sha256=Wo5hauzdn4Q9PUVjBqQo-1vCq08BT2jYUDbq3r2a5Go,114061
11
+ x_transformers/x_transformers.py,sha256=ZfOXrZSiy2jlZ8wVmDdMTLW4hAY_qfmPQHW9t2ABxbo,114097
12
12
  x_transformers/xl_autoregressive_wrapper.py,sha256=CvZMJ6A6PA-Y_bQAhnORwjJBSl6Vjq2IdW5KTdk8NI8,4195
13
13
  x_transformers/xval.py,sha256=7S00kCuab4tWQa-vf-z-XfzADjVj48MoFIr7VSIvttg,8575
14
- x_transformers-2.3.11.dist-info/METADATA,sha256=cDJW4GNyTXb0gKUm-Z_T8Bk1Jrk8HJU35v6tldG2IJo,89022
15
- x_transformers-2.3.11.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
16
- x_transformers-2.3.11.dist-info/licenses/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
17
- x_transformers-2.3.11.dist-info/RECORD,,
14
+ x_transformers-2.3.12.dist-info/METADATA,sha256=ADkMqVrlqcYcMk8U_9oxqOHc4XRD1NbSfVJsGhy5R8s,89022
15
+ x_transformers-2.3.12.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
16
+ x_transformers-2.3.12.dist-info/licenses/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
17
+ x_transformers-2.3.12.dist-info/RECORD,,