x-transformers 2.11.14__py3-none-any.whl → 2.11.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of x-transformers might be problematic. Click here for more details.
- x_transformers/gpt_vae.py +4 -1
- {x_transformers-2.11.14.dist-info → x_transformers-2.11.15.dist-info}/METADATA +1 -1
- {x_transformers-2.11.14.dist-info → x_transformers-2.11.15.dist-info}/RECORD +5 -5
- {x_transformers-2.11.14.dist-info → x_transformers-2.11.15.dist-info}/WHEEL +0 -0
- {x_transformers-2.11.14.dist-info → x_transformers-2.11.15.dist-info}/licenses/LICENSE +0 -0
x_transformers/gpt_vae.py
CHANGED
|
@@ -174,11 +174,14 @@ class GPTVAE(Module):
|
|
|
174
174
|
def forward(
|
|
175
175
|
self,
|
|
176
176
|
seq,
|
|
177
|
+
seq_for_latents = None,
|
|
177
178
|
return_all_losses = False
|
|
178
179
|
):
|
|
179
180
|
batch, device = seq.shape[0], seq.device
|
|
180
181
|
|
|
181
|
-
|
|
182
|
+
seq_for_latents = default(seq_for_latents, seq)
|
|
183
|
+
|
|
184
|
+
latents, (latents_mean, latents_log_var) = self.encode_to_latents(seq_for_latents, return_mean_log_var = True)
|
|
182
185
|
|
|
183
186
|
dropped_latents = ~self.latents_dropout(torch.ones((batch,), device = device)).bool()
|
|
184
187
|
|
|
@@ -6,7 +6,7 @@ x_transformers/continuous.py,sha256=WwpQCjyVY4PtuEAOFY68zqgklbF9I7AL5w6874YlDe8,
|
|
|
6
6
|
x_transformers/dpo.py,sha256=xt4OuOWhU8pN3OKN2LZAaC2NC8iiEnchqqcrPWVqf0o,3521
|
|
7
7
|
x_transformers/entropy_based_tokenizer.py,sha256=F2lO8-v3aLIcVDVNhu7RR-UtRdlmaaYJzBK9m7OnLE8,5018
|
|
8
8
|
x_transformers/free_transformer.py,sha256=a_sF_tx2RgKNsPCum22jlYam28OWEk3B0o1D4-Vo9Fw,10714
|
|
9
|
-
x_transformers/gpt_vae.py,sha256=
|
|
9
|
+
x_transformers/gpt_vae.py,sha256=1zyjwgfZr6CRDsh5VMCPSdoCPg-sdX5mXmZ_mn4VyYQ,6082
|
|
10
10
|
x_transformers/multi_input.py,sha256=tCh-fTJDj2ib4SMGtsa-AM8MxKzJAQSwqAXOu3HU2mg,9252
|
|
11
11
|
x_transformers/neo_mlp.py,sha256=XCNnnop9WLarcxap1kGuYc1x8GHvwkZiDRnXOxSl3Po,3452
|
|
12
12
|
x_transformers/nonautoregressive_wrapper.py,sha256=hMQqNimGtchNIe13cR5LZule1V7I1qM5LmY8VQfVdnA,11698
|
|
@@ -14,7 +14,7 @@ x_transformers/up_wrapper.py,sha256=YC2LN14_7Xx9Wtiek2rtEJ_qHqdfSmKlh3d7Cgxwd80,
|
|
|
14
14
|
x_transformers/x_transformers.py,sha256=5ctPu8tvlbUMrtW360e_LPnoGv6xcgQFsyWdbvLo6Tk,127002
|
|
15
15
|
x_transformers/xl_autoregressive_wrapper.py,sha256=CvZMJ6A6PA-Y_bQAhnORwjJBSl6Vjq2IdW5KTdk8NI8,4195
|
|
16
16
|
x_transformers/xval.py,sha256=AwwYUm8yDAtKQyKJDIhYMsiLTJ_skh3scUFMjp5sda8,8597
|
|
17
|
-
x_transformers-2.11.
|
|
18
|
-
x_transformers-2.11.
|
|
19
|
-
x_transformers-2.11.
|
|
20
|
-
x_transformers-2.11.
|
|
17
|
+
x_transformers-2.11.15.dist-info/METADATA,sha256=DSY5ug0mmywhOVxsCxjVkIzyWNY9ot4kmUxBFresdaE,96012
|
|
18
|
+
x_transformers-2.11.15.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
19
|
+
x_transformers-2.11.15.dist-info/licenses/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
|
|
20
|
+
x_transformers-2.11.15.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|