x-transformers 1.27.16__py3-none-any.whl → 1.27.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
x_transformers/attend.py CHANGED
@@ -84,7 +84,6 @@ class Attend(nn.Module):
84
84
  ):
85
85
  super().__init__()
86
86
  self.scale = scale
87
- self.qk_norm = qk_norm
88
87
 
89
88
  self.causal = causal
90
89
  self.create_causal_mask = onnx_create_causal_mask if onnxable else create_causal_mask
@@ -139,7 +138,7 @@ class Attend(nn.Module):
139
138
 
140
139
  # handle scale - by default they scale by dim_head ** -0.5, but need to take care if using cosine sim attention
141
140
 
142
- if self.qk_norm:
141
+ if exists(self.scale):
143
142
  default_scale = q.shape[-1] ** -0.5
144
143
  q = q * (self.scale / default_scale)
145
144
 
@@ -47,8 +47,10 @@ class ContinuousTransformerWrapper(nn.Module):
47
47
  self.max_seq_len = max_seq_len
48
48
 
49
49
  self.max_mem_len = max_mem_len
50
+
51
+ no_abs_pos_emb = max_seq_len == 0 or not (use_abs_pos_emb and not attn_layers.disable_abs_pos_emb)
50
52
 
51
- if not (use_abs_pos_emb and not attn_layers.disable_abs_pos_emb):
53
+ if no_abs_pos_emb:
52
54
  self.pos_emb = always(0)
53
55
  elif scaled_sinu_pos_emb:
54
56
  self.pos_emb = ScaledSinusoidalEmbedding(dim)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: x-transformers
3
- Version: 1.27.16
3
+ Version: 1.27.18
4
4
  Summary: X-Transformers - Pytorch
5
5
  Home-page: https://github.com/lucidrains/x-transformers
6
6
  Author: Phil Wang
@@ -1,14 +1,14 @@
1
1
  x_transformers/__init__.py,sha256=0-2m0LtLpZiZYGwO-6OMYXofx5hbFb_FJOHMxIBqQr4,673
2
- x_transformers/attend.py,sha256=Y3PzYqD3G_x1bYPd6mlp27dp3obaum1O-TOOQaARctc,10188
2
+ x_transformers/attend.py,sha256=L7vctHJ0PnECohu4cUu8yvY8cUrVyJxHmMFR0RGL0z4,10163
3
3
  x_transformers/autoregressive_wrapper.py,sha256=gYKIN5Rm8dMYSTX5yHpg9sPYyZf9rsRTJCNrYRdJ-Ww,9618
4
- x_transformers/continuous.py,sha256=92Wczoaz6dJalix-e3mdIzW0xyRIx3GlBSgsSQOsJeI,6123
4
+ x_transformers/continuous.py,sha256=dpHK4NSMDQAJQ_N3Uj9rip0fYGXyu0QCCO_OfEdbRGs,6192
5
5
  x_transformers/dpo.py,sha256=LjvWgCkqTl-UuehrzQ8nkX5guLr4whYwsmm7SKSwdls,3450
6
6
  x_transformers/nonautoregressive_wrapper.py,sha256=AQLE4rA_Kh8VNoe9OzpwyeWson34sRkhks4dn4seNjI,10414
7
7
  x_transformers/x_transformers.py,sha256=hRU3L-8fyK-ftFb69Yr1xrOwVraqgram_6mNEWFcKNg,63641
8
8
  x_transformers/xl_autoregressive_wrapper.py,sha256=DCx4n0_c1tFai4nOqaWVnqx2p9eutsZsDMiMP1ckxNU,4117
9
9
  x_transformers/xval.py,sha256=ulEPep6i5Hl7H-H9vGfdsmHdprUmK8ajB306jViyV2c,8147
10
- x_transformers-1.27.16.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
11
- x_transformers-1.27.16.dist-info/METADATA,sha256=gIwssqunCEy8XgCnW0eOcaIviUmoudCnayy4qi2jn0I,662
12
- x_transformers-1.27.16.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
13
- x_transformers-1.27.16.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
14
- x_transformers-1.27.16.dist-info/RECORD,,
10
+ x_transformers-1.27.18.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
11
+ x_transformers-1.27.18.dist-info/METADATA,sha256=kKCnDS7zqGNAlBV_FHWR7b5o8Q1EzIbmU3krSpNIQK0,662
12
+ x_transformers-1.27.18.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
13
+ x_transformers-1.27.18.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
14
+ x_transformers-1.27.18.dist-info/RECORD,,