x-transformers 2.1.11__py3-none-any.whl → 2.1.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- x_transformers/belief_state_wrapper.py +8 -4
- x_transformers/x_transformers.py +9 -0
- {x_transformers-2.1.11.dist-info → x_transformers-2.1.14.dist-info}/METADATA +1 -1
- {x_transformers-2.1.11.dist-info → x_transformers-2.1.14.dist-info}/RECORD +6 -6
- {x_transformers-2.1.11.dist-info → x_transformers-2.1.14.dist-info}/WHEEL +0 -0
- {x_transformers-2.1.11.dist-info → x_transformers-2.1.14.dist-info}/licenses/LICENSE +0 -0
@@ -132,11 +132,11 @@ class BeliefStateWrapper(Module):
|
|
132
132
|
|
133
133
|
# get the encoded suffix token once
|
134
134
|
|
135
|
-
if
|
136
|
-
suffix
|
135
|
+
if exists(suffix):
|
136
|
+
if suffix.ndim == 1:
|
137
|
+
suffix = repeat(suffix, 'n -> b n', b = batch)
|
137
138
|
|
138
|
-
|
139
|
-
suffix = repeat(suffix, 'n -> b n', b = batch)
|
139
|
+
suffix = suffix.flip(1) # reverse autoregressive
|
140
140
|
|
141
141
|
suffix_sos_tokens = rearrange(self.suffix_token, 'd -> 1 1 d')
|
142
142
|
|
@@ -148,6 +148,10 @@ class BeliefStateWrapper(Module):
|
|
148
148
|
return_embeddings = True
|
149
149
|
)
|
150
150
|
|
151
|
+
# pick out the last embedding for fill in the model
|
152
|
+
|
153
|
+
suffix_embed = suffix_embed[:, -1:]
|
154
|
+
|
151
155
|
# sampling up to seq_len
|
152
156
|
|
153
157
|
for _ in range(seq_len):
|
x_transformers/x_transformers.py
CHANGED
@@ -2898,6 +2898,15 @@ class TransformerWrapper(Module):
|
|
2898
2898
|
to_logits_kwargs = dict(),
|
2899
2899
|
**kwargs,
|
2900
2900
|
):
|
2901
|
+
|
2902
|
+
# if sequence is None, auto create an empty one if `prepend_embeds` was supplied
|
2903
|
+
|
2904
|
+
if not exists(x):
|
2905
|
+
assert exists(prepend_embeds)
|
2906
|
+
x = prepend_embeds.new_empty((prepend_embeds.shape[0], 0), dtype = torch.long)
|
2907
|
+
|
2908
|
+
# shapes and variables
|
2909
|
+
|
2901
2910
|
b, n, device, num_mems, has_memory_tokens, emb_frac_gradient, orig_mask = x.shape[0], x.shape[1], x.device, self.num_memory_tokens, self.num_memory_tokens > 0, self.emb_frac_gradient, mask
|
2902
2911
|
|
2903
2912
|
return_hiddens = return_mems | return_attn | return_intermediates | return_attn_z_loss
|
@@ -1,16 +1,16 @@
|
|
1
1
|
x_transformers/__init__.py,sha256=NDoiBivau559WQ0FvXG4ssU3Il9aoHmTIUFN_1juz0s,911
|
2
2
|
x_transformers/attend.py,sha256=-5BWWhFsp7tvZTdN91Ay5SqOjyj9uOs-122vFvoO6b4,17253
|
3
3
|
x_transformers/autoregressive_wrapper.py,sha256=reLCno9Z9pchVU79tBF8OMo21LwSZ67KAeB83jqkyAc,10505
|
4
|
-
x_transformers/belief_state_wrapper.py,sha256=
|
4
|
+
x_transformers/belief_state_wrapper.py,sha256=qkVg_Nd77G09rjgrPJJkYN7NE58Rxl2DQUR475Zk4vA,8849
|
5
5
|
x_transformers/continuous.py,sha256=p0sCAiH1na236ygwgL1Yyhu36eZBf9cZvoW1JyP_fFE,7073
|
6
6
|
x_transformers/dpo.py,sha256=xt4OuOWhU8pN3OKN2LZAaC2NC8iiEnchqqcrPWVqf0o,3521
|
7
7
|
x_transformers/multi_input.py,sha256=tCh-fTJDj2ib4SMGtsa-AM8MxKzJAQSwqAXOu3HU2mg,9252
|
8
8
|
x_transformers/neo_mlp.py,sha256=XCNnnop9WLarcxap1kGuYc1x8GHvwkZiDRnXOxSl3Po,3452
|
9
9
|
x_transformers/nonautoregressive_wrapper.py,sha256=2NU58hYMgn-4Jzg3mie-mXb0XH_dCN7fjlzd3K1rLUY,10510
|
10
|
-
x_transformers/x_transformers.py,sha256
|
10
|
+
x_transformers/x_transformers.py,sha256=fqgtIs6__JpLWMnJa8AY5OW3AJ2GR1B5p-9TsWdiOIU,110425
|
11
11
|
x_transformers/xl_autoregressive_wrapper.py,sha256=CvZMJ6A6PA-Y_bQAhnORwjJBSl6Vjq2IdW5KTdk8NI8,4195
|
12
12
|
x_transformers/xval.py,sha256=7S00kCuab4tWQa-vf-z-XfzADjVj48MoFIr7VSIvttg,8575
|
13
|
-
x_transformers-2.1.
|
14
|
-
x_transformers-2.1.
|
15
|
-
x_transformers-2.1.
|
16
|
-
x_transformers-2.1.
|
13
|
+
x_transformers-2.1.14.dist-info/METADATA,sha256=WIAtwwv4SrEpN239S56A2MVxTFKxo4Csyj4VJBrUee0,87571
|
14
|
+
x_transformers-2.1.14.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
15
|
+
x_transformers-2.1.14.dist-info/licenses/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
|
16
|
+
x_transformers-2.1.14.dist-info/RECORD,,
|
File without changes
|
File without changes
|