x-transformers 1.30.19__py3-none-any.whl → 1.30.20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- x_transformers/x_transformers.py +8 -7
- {x_transformers-1.30.19.dist-info → x_transformers-1.30.20.dist-info}/METADATA +1 -1
- {x_transformers-1.30.19.dist-info → x_transformers-1.30.20.dist-info}/RECORD +6 -6
- {x_transformers-1.30.19.dist-info → x_transformers-1.30.20.dist-info}/LICENSE +0 -0
- {x_transformers-1.30.19.dist-info → x_transformers-1.30.20.dist-info}/WHEEL +0 -0
- {x_transformers-1.30.19.dist-info → x_transformers-1.30.20.dist-info}/top_level.txt +0 -0
x_transformers/x_transformers.py
CHANGED
@@ -1271,18 +1271,11 @@ class AttentionLayers(Module):
|
|
1271
1271
|
|
1272
1272
|
need_condition = False
|
1273
1273
|
dim_condition = default(dim_condition, dim)
|
1274
|
-
|
1275
|
-
self.adaptive_mlp = nn.Identity()
|
1276
1274
|
dim_condition_mult = 1
|
1277
1275
|
|
1278
1276
|
if adaptive_condition_mlp:
|
1279
1277
|
dim_condition_mult = adaptive_condition_mlp_expansion
|
1280
1278
|
|
1281
|
-
self.adaptive_mlp = nn.Sequential(
|
1282
|
-
nn.Linear(dim_condition, dim_condition * dim_condition_mult, bias = False),
|
1283
|
-
nn.SiLU()
|
1284
|
-
)
|
1285
|
-
|
1286
1279
|
if use_scalenorm:
|
1287
1280
|
norm_class = ScaleNorm
|
1288
1281
|
elif use_rmsnorm:
|
@@ -1300,6 +1293,14 @@ class AttentionLayers(Module):
|
|
1300
1293
|
|
1301
1294
|
norm_fn = partial(norm_class, dim)
|
1302
1295
|
|
1296
|
+
self.adaptive_mlp = nn.Identity()
|
1297
|
+
|
1298
|
+
if need_condition and adaptive_condition_mlp:
|
1299
|
+
self.adaptive_mlp = nn.Sequential(
|
1300
|
+
nn.Linear(dim_condition, dim_condition * dim_condition_mult, bias = False),
|
1301
|
+
nn.SiLU()
|
1302
|
+
)
|
1303
|
+
|
1303
1304
|
self.need_condition = need_condition
|
1304
1305
|
self.dim_condition = dim_condition
|
1305
1306
|
|
@@ -4,11 +4,11 @@ x_transformers/autoregressive_wrapper.py,sha256=uX8Mb0zLsQrZECt_9UGt35g7tC05Rk3n
|
|
4
4
|
x_transformers/continuous.py,sha256=WO52n9lFAXv5-SGadi2cApGF8dkouN8QSTEOuC7erj8,6180
|
5
5
|
x_transformers/dpo.py,sha256=LjvWgCkqTl-UuehrzQ8nkX5guLr4whYwsmm7SKSwdls,3450
|
6
6
|
x_transformers/nonautoregressive_wrapper.py,sha256=ys_p8obc7lTeeodCqvkRKxOXQ1C9T3j5Jwr-JbVgnXk,10432
|
7
|
-
x_transformers/x_transformers.py,sha256=
|
7
|
+
x_transformers/x_transformers.py,sha256=kd0H1tsw3SynfQu7xjuzacnuYimVhVMVxLID_I_pM8A,72322
|
8
8
|
x_transformers/xl_autoregressive_wrapper.py,sha256=DCx4n0_c1tFai4nOqaWVnqx2p9eutsZsDMiMP1ckxNU,4117
|
9
9
|
x_transformers/xval.py,sha256=QE1ltYZTR_eGgIHPP2BrMWVWVLqMW-OpDZh87BSmQEg,8563
|
10
|
-
x_transformers-1.30.
|
11
|
-
x_transformers-1.30.
|
12
|
-
x_transformers-1.30.
|
13
|
-
x_transformers-1.30.
|
14
|
-
x_transformers-1.30.
|
10
|
+
x_transformers-1.30.20.dist-info/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
|
11
|
+
x_transformers-1.30.20.dist-info/METADATA,sha256=OwTLO7xb31tvGd5vHK1XAU70ireyNSlrcDlg2zUJUuY,662
|
12
|
+
x_transformers-1.30.20.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
13
|
+
x_transformers-1.30.20.dist-info/top_level.txt,sha256=hO6KGpFuGucRNEtRfme4A_rGcM53AKwGP7RVlRIxS5Q,15
|
14
|
+
x_transformers-1.30.20.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|