x-transformers 1.32.8__py3-none-any.whl → 1.32.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- x_transformers/x_transformers.py +0 -5
- {x_transformers-1.32.8.dist-info → x_transformers-1.32.9.dist-info}/METADATA +1 -1
- {x_transformers-1.32.8.dist-info → x_transformers-1.32.9.dist-info}/RECORD +6 -6
- {x_transformers-1.32.8.dist-info → x_transformers-1.32.9.dist-info}/LICENSE +0 -0
- {x_transformers-1.32.8.dist-info → x_transformers-1.32.9.dist-info}/WHEEL +0 -0
- {x_transformers-1.32.8.dist-info → x_transformers-1.32.9.dist-info}/top_level.txt +0 -0
x_transformers/x_transformers.py
CHANGED
@@ -1677,8 +1677,6 @@ class AttentionLayers(Module):
|
|
1677
1677
|
|
1678
1678
|
layer_variables = tuple(tuple(layer_variable[i] for i in layers_execute_order) for layer_variable in layer_variables)
|
1679
1679
|
|
1680
|
-
first_skip = None
|
1681
|
-
|
1682
1680
|
# go through the attention and feedforward layers
|
1683
1681
|
|
1684
1682
|
for ind, (layer_type, (norm, block, residual_fn), layer_dropout) in enumerate(zip(*layer_variables)):
|
@@ -1687,9 +1685,6 @@ class AttentionLayers(Module):
|
|
1687
1685
|
if self.training and layer_dropout > 0. and random() < layer_dropout:
|
1688
1686
|
continue
|
1689
1687
|
|
1690
|
-
if ind == 1:
|
1691
|
-
first_skip = x.clone()
|
1692
|
-
|
1693
1688
|
if layer_type == 'a':
|
1694
1689
|
if return_hiddens:
|
1695
1690
|
hiddens.append(x)
|
@@ -5,11 +5,11 @@ x_transformers/continuous.py,sha256=cIVEdhfei258__ziV7kQBrJMxCel54bExBTDrO9rfCI,
|
|
5
5
|
x_transformers/dpo.py,sha256=LjvWgCkqTl-UuehrzQ8nkX5guLr4whYwsmm7SKSwdls,3450
|
6
6
|
x_transformers/multi_input.py,sha256=tCh-fTJDj2ib4SMGtsa-AM8MxKzJAQSwqAXOu3HU2mg,9252
|
7
7
|
x_transformers/nonautoregressive_wrapper.py,sha256=ys_p8obc7lTeeodCqvkRKxOXQ1C9T3j5Jwr-JbVgnXk,10432
|
8
|
-
x_transformers/x_transformers.py,sha256=
|
8
|
+
x_transformers/x_transformers.py,sha256=8558TPHcDxWUvJYz01EdeyZl0lkHB14bzlsEMwSMPyw,77300
|
9
9
|
x_transformers/xl_autoregressive_wrapper.py,sha256=DCx4n0_c1tFai4nOqaWVnqx2p9eutsZsDMiMP1ckxNU,4117
|
10
10
|
x_transformers/xval.py,sha256=QE1ltYZTR_eGgIHPP2BrMWVWVLqMW-OpDZh87BSmQEg,8563
|
11
|
-
x_transformers-1.32.
|
12
|
-
x_transformers-1.32.
|
13
|
-
x_transformers-1.32.
|
14
|
-
x_transformers-1.32.
|
15
|
-
x_transformers-1.32.
|
11
|
+
x_transformers-1.32.9.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
|
12
|
+
x_transformers-1.32.9.dist-info/METADATA,sha256=-GidCdPhcKpZ49ElbeuJUPko5LZZP_vyEodaN_P3g48,661
|
13
|
+
x_transformers-1.32.9.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
|
14
|
+
x_transformers-1.32.9.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
|
15
|
+
x_transformers-1.32.9.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|