x-transformers 1.31.1__py3-none-any.whl → 1.31.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- x_transformers/x_transformers.py +2 -2
- {x_transformers-1.31.1.dist-info → x_transformers-1.31.3.dist-info}/METADATA +1 -1
- {x_transformers-1.31.1.dist-info → x_transformers-1.31.3.dist-info}/RECORD +6 -6
- {x_transformers-1.31.1.dist-info → x_transformers-1.31.3.dist-info}/LICENSE +0 -0
- {x_transformers-1.31.1.dist-info → x_transformers-1.31.3.dist-info}/WHEEL +0 -0
- {x_transformers-1.31.1.dist-info → x_transformers-1.31.3.dist-info}/top_level.txt +0 -0
x_transformers/x_transformers.py
CHANGED
@@ -1234,7 +1234,7 @@ class AttentionLayers(Module):
|
|
1234
1234
|
use_adaptive_layernorm = False,
|
1235
1235
|
use_adaptive_rmsnorm = False,
|
1236
1236
|
use_adaptive_layerscale = False, # paired with use_adaptive_layernorm for ada-ln-zero from DiT paper
|
1237
|
-
norm_add_unit_offset =
|
1237
|
+
norm_add_unit_offset = True,
|
1238
1238
|
dim_condition = None,
|
1239
1239
|
adaptive_condition_mlp = False,
|
1240
1240
|
adaptive_condition_mlp_expansion = 4,
|
@@ -1403,7 +1403,7 @@ class AttentionLayers(Module):
|
|
1403
1403
|
|
1404
1404
|
self.post_branch_fn_needs_condition = post_branch_fn_needs_condition
|
1405
1405
|
|
1406
|
-
if not post_branch_fn_needs_condition and norm_add_unit_offset:
|
1406
|
+
if exists(post_branch_fn) and not post_branch_fn_needs_condition and norm_add_unit_offset:
|
1407
1407
|
post_branch_fn = partial(post_branch_fn, unit_offset = 1.)
|
1408
1408
|
|
1409
1409
|
# setup mlp for conditioning
|
@@ -4,11 +4,11 @@ x_transformers/autoregressive_wrapper.py,sha256=uX8Mb0zLsQrZECt_9UGt35g7tC05Rk3n
|
|
4
4
|
x_transformers/continuous.py,sha256=WO52n9lFAXv5-SGadi2cApGF8dkouN8QSTEOuC7erj8,6180
|
5
5
|
x_transformers/dpo.py,sha256=LjvWgCkqTl-UuehrzQ8nkX5guLr4whYwsmm7SKSwdls,3450
|
6
6
|
x_transformers/nonautoregressive_wrapper.py,sha256=ys_p8obc7lTeeodCqvkRKxOXQ1C9T3j5Jwr-JbVgnXk,10432
|
7
|
-
x_transformers/x_transformers.py,sha256=
|
7
|
+
x_transformers/x_transformers.py,sha256=D9l3jL1D0RzHynIJcSUpxQec1n-7cHgRQZNDdDoYCFQ,75832
|
8
8
|
x_transformers/xl_autoregressive_wrapper.py,sha256=DCx4n0_c1tFai4nOqaWVnqx2p9eutsZsDMiMP1ckxNU,4117
|
9
9
|
x_transformers/xval.py,sha256=QE1ltYZTR_eGgIHPP2BrMWVWVLqMW-OpDZh87BSmQEg,8563
|
10
|
-
x_transformers-1.31.
|
11
|
-
x_transformers-1.31.
|
12
|
-
x_transformers-1.31.
|
13
|
-
x_transformers-1.31.
|
14
|
-
x_transformers-1.31.
|
10
|
+
x_transformers-1.31.3.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
|
11
|
+
x_transformers-1.31.3.dist-info/METADATA,sha256=cS3vVeEi3fSoYqXWDkW6FJhgmX11vrfQr9Lc3kMT9MI,661
|
12
|
+
x_transformers-1.31.3.dist-info/WHEEL,sha256=mguMlWGMX-VHnMpKOjjQidIo1ssRlCFu4a4mBpz1s2M,91
|
13
|
+
x_transformers-1.31.3.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
|
14
|
+
x_transformers-1.31.3.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|