x-transformers 1.27.8__py3-none-any.whl → 1.27.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
x_transformers/attend.py CHANGED
@@ -132,10 +132,10 @@ class Attend(nn.Module):
132
132
  # kv shape torch.Size([1, 512, 64]) -> torch.Size([1, 8, 512, 64])
133
133
 
134
134
  if k.ndim == 3:
135
- k = rearrange(k, 'b ... -> b 1 ...').expand_as(q)
135
+ k = repeat(k, 'b ... -> b h ...', h = q.shape[1])
136
136
 
137
137
  if v.ndim == 3:
138
- v = rearrange(v, 'b ... -> b 1 ...').expand_as(q)
138
+ v = repeat(v, 'b ... -> b h ...', h = q.shape[1])
139
139
 
140
140
  # handle scale - by default they scale by dim_head ** -0.5, but need to take care if using cosine sim attention
141
141
 
@@ -1457,7 +1457,7 @@ class ViTransformerWrapper(nn.Module):
1457
1457
  LayerNorm(dim)
1458
1458
  )
1459
1459
 
1460
- LayerNorm(dim) if post_emb_norm else nn.Identity()
1460
+ self.post_emb_norm = LayerNorm(dim) if post_emb_norm else nn.Identity()
1461
1461
  self.dropout = nn.Dropout(emb_dropout)
1462
1462
 
1463
1463
  self.attn_layers = attn_layers
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.27.8
3
+ Version: 1.27.10
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -1,14 +1,14 @@
1
1
  x_transformers/__init__.py,sha256=0-2m0LtLpZiZYGwO-6OMYXofx5hbFb_FJOHMxIBqQr4,673
2
- x_transformers/attend.py,sha256=MFl_FbgPsm9mziZPTi_s8QbxASETwbGeciMH8sUIwT8,10188
2
+ x_transformers/attend.py,sha256=Y3PzYqD3G_x1bYPd6mlp27dp3obaum1O-TOOQaARctc,10188
3
3
  x_transformers/autoregressive_wrapper.py,sha256=gYKIN5Rm8dMYSTX5yHpg9sPYyZf9rsRTJCNrYRdJ-Ww,9618
4
4
  x_transformers/continuous.py,sha256=92Wczoaz6dJalix-e3mdIzW0xyRIx3GlBSgsSQOsJeI,6123
5
5
  x_transformers/dpo.py,sha256=ek9dgiSs05xeCn8ORceOgKy6LJOnNDw-OJDqxAVLecM,2243
6
6
  x_transformers/nonautoregressive_wrapper.py,sha256=AQLE4rA_Kh8VNoe9OzpwyeWson34sRkhks4dn4seNjI,10414
7
- x_transformers/x_transformers.py,sha256=c8axLT-n2zz3mvQ1tBbE4KUs-8qL7yFsgtIujyh1JDg,63408
7
+ x_transformers/x_transformers.py,sha256=TPH5PitIzIBWTQdnO8nlctB8poSMvHkBPWcWFolgZAM,63429
8
8
  x_transformers/xl_autoregressive_wrapper.py,sha256=DCx4n0_c1tFai4nOqaWVnqx2p9eutsZsDMiMP1ckxNU,4117
9
9
  x_transformers/xval.py,sha256=ulEPep6i5Hl7H-H9vGfdsmHdprUmK8ajB306jViyV2c,8147
10
- x_transformers-1.27.8.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
11
- x_transformers-1.27.8.dist-info/METADATA,sha256=LYDYUsXQOHYBZRr_5pepdN9HSzaW-2nFX5pEzEOFkcA,661
12
- x_transformers-1.27.8.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
13
- x_transformers-1.27.8.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
14
- x_transformers-1.27.8.dist-info/RECORD,,
10
+ x_transformers-1.27.10.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
11
+ x_transformers-1.27.10.dist-info/METADATA,sha256=v2ZVeG1yd-HPYFbBWYjNL-q4s74asgt7U8VWg4f9Leg,662
12
+ x_transformers-1.27.10.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
13
+ x_transformers-1.27.10.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
14
+ x_transformers-1.27.10.dist-info/RECORD,,