x-transformers 2.3.6__py3-none-any.whl → 2.3.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1208,7 +1208,7 @@ class FeedForward(Module):
1208
1208
  sublayer_dropout = 0.,
1209
1209
  no_bias = False,
1210
1210
  zero_init_output = False,
1211
- deep_embed_hiddens = False,
1211
+ deep_embed = False,
1212
1212
  deep_embed_num_tokens = None,
1213
1213
  ):
1214
1214
  super().__init__()
@@ -1249,9 +1249,9 @@ class FeedForward(Module):
1249
1249
  # improvements were clearest to me (on my toy setup) with multiplying on output of feedforward, will try with attention at future date
1250
1250
 
1251
1251
  self.deep_embed = None
1252
- if deep_embed_hiddens:
1252
+ if deep_embed:
1253
1253
  assert exists(deep_embed_num_tokens)
1254
- self.deep_embed = nn.Parameter(torch.zeros(deep_embed_num_tokens, dim_out))
1254
+ self.deep_embed = nn.Parameter(torch.ones(deep_embed_num_tokens, dim_out))
1255
1255
 
1256
1256
  # init last linear layer to 0
1257
1257
 
@@ -1266,7 +1266,7 @@ class FeedForward(Module):
1266
1266
  out = self.ff(x)
1267
1267
 
1268
1268
  if exists(deep_embed_ids) and exists(self.deep_embed):
1269
- deep_embed = self.deep_embed[deep_embed_ids] + 1.
1269
+ deep_embed = self.deep_embed[deep_embed_ids]
1270
1270
  out = out * deep_embed
1271
1271
 
1272
1272
  return out
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: x-transformers
3
- Version: 2.3.6
3
+ Version: 2.3.8
4
4
  Summary: X-Transformers
5
5
  Project-URL: Homepage, https://pypi.org/project/x-transformers/
6
6
  Project-URL: Repository, https://github.com/lucidrains/x-transformers
@@ -8,10 +8,10 @@ x_transformers/entropy_based_tokenizer.py,sha256=F2lO8-v3aLIcVDVNhu7RR-UtRdlmaaY
8
8
  x_transformers/multi_input.py,sha256=tCh-fTJDj2ib4SMGtsa-AM8MxKzJAQSwqAXOu3HU2mg,9252
9
9
  x_transformers/neo_mlp.py,sha256=XCNnnop9WLarcxap1kGuYc1x8GHvwkZiDRnXOxSl3Po,3452
10
10
  x_transformers/nonautoregressive_wrapper.py,sha256=2NU58hYMgn-4Jzg3mie-mXb0XH_dCN7fjlzd3K1rLUY,10510
11
- x_transformers/x_transformers.py,sha256=kZKk80hxV0Pvmx1E745BR7c8YzB-S4u2cZHSMZvpZq8,113507
11
+ x_transformers/x_transformers.py,sha256=-9YbiPLh7w408Dxe5sGxkaOCPx__itHqCP6af6KrsxY,113485
12
12
  x_transformers/xl_autoregressive_wrapper.py,sha256=CvZMJ6A6PA-Y_bQAhnORwjJBSl6Vjq2IdW5KTdk8NI8,4195
13
13
  x_transformers/xval.py,sha256=7S00kCuab4tWQa-vf-z-XfzADjVj48MoFIr7VSIvttg,8575
14
- x_transformers-2.3.6.dist-info/METADATA,sha256=Z337g7NRRYaKGbBHkKe1UZbIQJeXPk-dtZ4aBiVvSH8,89021
15
- x_transformers-2.3.6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
16
- x_transformers-2.3.6.dist-info/licenses/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
17
- x_transformers-2.3.6.dist-info/RECORD,,
14
+ x_transformers-2.3.8.dist-info/METADATA,sha256=KV2fYRQSAkf1dJwWvH4rKf8GFz-WMEhVkAcxUVu1fNk,89021
15
+ x_transformers-2.3.8.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
16
+ x_transformers-2.3.8.dist-info/licenses/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
17
+ x_transformers-2.3.8.dist-info/RECORD,,