x-transformers 1.35.2__py3-none-any.whl → 1.35.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -920,6 +920,7 @@ class Attention(Module):
920
920
  kv_heads = None,
921
921
  shared_kv = False,
922
922
  value_dim_head = None,
923
+ dim_out = None,
923
924
  tensor_product = False, # https://arxiv.org/abs/2208.06061
924
925
  add_zero_kv = False, # same as add_zero_attn in pytorch
925
926
  rotary_embed_values = False,
@@ -1057,7 +1058,11 @@ class Attention(Module):
1057
1058
  # attention on attention
1058
1059
 
1059
1060
  self.attn_on_attn = on_attn
1060
- self.to_out = nn.Sequential(nn.Linear(out_dim, dim * 2, bias = False), nn.GLU()) if on_attn else nn.Linear(out_dim, dim, bias = False)
1061
+
1062
+ # output dimension by default same as input, but can be overridden
1063
+
1064
+ dim_out = default(dim_out, dim)
1065
+ self.to_out = nn.Sequential(nn.Linear(out_dim, dim_out * 2, bias = False), nn.GLU()) if on_attn else nn.Linear(out_dim, dim_out, bias = False)
1061
1066
 
1062
1067
  # whether to rotate positions into values, for absolute positions in addition to relative
1063
1068
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.35.2
3
+ Version: 1.35.3
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -5,11 +5,11 @@ x_transformers/continuous.py,sha256=cIVEdhfei258__ziV7kQBrJMxCel54bExBTDrO9rfCI,
5
5
  x_transformers/dpo.py,sha256=LjvWgCkqTl-UuehrzQ8nkX5guLr4whYwsmm7SKSwdls,3450
6
6
  x_transformers/multi_input.py,sha256=tCh-fTJDj2ib4SMGtsa-AM8MxKzJAQSwqAXOu3HU2mg,9252
7
7
  x_transformers/nonautoregressive_wrapper.py,sha256=ys_p8obc7lTeeodCqvkRKxOXQ1C9T3j5Jwr-JbVgnXk,10432
8
- x_transformers/x_transformers.py,sha256=mpA9hriHqCXLckdlVo8sxzXT6sjxwsY6AaKoP-Rpw3c,80631
8
+ x_transformers/x_transformers.py,sha256=ma5_LbZf5UvfKYJUJcqceUdFG8THFVzER9ZrDXKVV7Y,80780
9
9
  x_transformers/xl_autoregressive_wrapper.py,sha256=DCx4n0_c1tFai4nOqaWVnqx2p9eutsZsDMiMP1ckxNU,4117
10
10
  x_transformers/xval.py,sha256=QE1ltYZTR_eGgIHPP2BrMWVWVLqMW-OpDZh87BSmQEg,8563
11
- x_transformers-1.35.2.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
12
- x_transformers-1.35.2.dist-info/METADATA,sha256=4UuWPhkRRayYadZ8kwaHyqEGhHurnWJGRbPTzDMdEZo,661
13
- x_transformers-1.35.2.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
14
- x_transformers-1.35.2.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
15
- x_transformers-1.35.2.dist-info/RECORD,,
11
+ x_transformers-1.35.3.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
12
+ x_transformers-1.35.3.dist-info/METADATA,sha256=YEiRJvu5g17ZVT3saNBhrmpNeRLqPXyN0cBdajt3psM,661
13
+ x_transformers-1.35.3.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
14
+ x_transformers-1.35.3.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
15
+ x_transformers-1.35.3.dist-info/RECORD,,