keras-hub-nightly 0.15.0.dev20240823171555__py3-none-any.whl → 0.15.0.dev20240911134614__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- keras_hub/api/__init__.py +1 -0
- keras_hub/api/bounding_box/__init__.py +36 -0
- keras_hub/api/layers/__init__.py +14 -0
- keras_hub/api/models/__init__.py +75 -31
- keras_hub/api/tokenizers/__init__.py +30 -0
- keras_hub/src/bounding_box/__init__.py +13 -0
- keras_hub/src/bounding_box/converters.py +529 -0
- keras_hub/src/bounding_box/formats.py +162 -0
- keras_hub/src/bounding_box/iou.py +263 -0
- keras_hub/src/bounding_box/to_dense.py +95 -0
- keras_hub/src/bounding_box/to_ragged.py +99 -0
- keras_hub/src/bounding_box/utils.py +194 -0
- keras_hub/src/bounding_box/validate_format.py +99 -0
- keras_hub/src/layers/preprocessing/audio_converter.py +121 -0
- keras_hub/src/layers/preprocessing/image_converter.py +130 -0
- keras_hub/src/layers/preprocessing/masked_lm_mask_generator.py +2 -0
- keras_hub/src/layers/preprocessing/multi_segment_packer.py +9 -8
- keras_hub/src/layers/preprocessing/preprocessing_layer.py +2 -29
- keras_hub/src/layers/preprocessing/random_deletion.py +33 -31
- keras_hub/src/layers/preprocessing/random_swap.py +33 -31
- keras_hub/src/layers/preprocessing/resizing_image_converter.py +101 -0
- keras_hub/src/layers/preprocessing/start_end_packer.py +3 -2
- keras_hub/src/models/albert/__init__.py +1 -2
- keras_hub/src/models/albert/albert_masked_lm_preprocessor.py +6 -86
- keras_hub/src/models/albert/{albert_classifier.py → albert_text_classifier.py} +29 -10
- keras_hub/src/models/albert/{albert_preprocessor.py → albert_text_classifier_preprocessor.py} +14 -70
- keras_hub/src/models/albert/albert_tokenizer.py +17 -36
- keras_hub/src/models/backbone.py +12 -34
- keras_hub/src/models/bart/__init__.py +1 -2
- keras_hub/src/models/bart/bart_preprocessor.py +6 -18
- keras_hub/src/models/bart/bart_seq_2_seq_lm_preprocessor.py +21 -148
- keras_hub/src/models/bart/bart_tokenizer.py +12 -39
- keras_hub/src/models/bert/__init__.py +1 -5
- keras_hub/src/models/bert/bert_masked_lm_preprocessor.py +6 -87
- keras_hub/src/models/bert/bert_presets.py +1 -4
- keras_hub/src/models/bert/{bert_classifier.py → bert_text_classifier.py} +12 -10
- keras_hub/src/models/bert/{bert_preprocessor.py → bert_text_classifier_preprocessor.py} +14 -70
- keras_hub/src/models/bert/bert_tokenizer.py +17 -35
- keras_hub/src/models/bloom/__init__.py +1 -2
- keras_hub/src/models/bloom/bloom_causal_lm_preprocessor.py +6 -91
- keras_hub/src/models/bloom/bloom_preprocessor.py +5 -12
- keras_hub/src/models/bloom/bloom_tokenizer.py +12 -41
- keras_hub/src/models/causal_lm.py +10 -29
- keras_hub/src/models/causal_lm_preprocessor.py +195 -0
- keras_hub/src/models/csp_darknet/csp_darknet_backbone.py +54 -15
- keras_hub/src/models/deberta_v3/__init__.py +1 -4
- keras_hub/src/models/deberta_v3/deberta_v3_masked_lm_preprocessor.py +14 -77
- keras_hub/src/models/deberta_v3/{deberta_v3_classifier.py → deberta_v3_text_classifier.py} +11 -11
- keras_hub/src/models/deberta_v3/{deberta_v3_preprocessor.py → deberta_v3_text_classifier_preprocessor.py} +23 -64
- keras_hub/src/models/deberta_v3/deberta_v3_tokenizer.py +30 -25
- keras_hub/src/models/densenet/densenet_backbone.py +46 -22
- keras_hub/src/models/distil_bert/__init__.py +1 -4
- keras_hub/src/models/distil_bert/distil_bert_masked_lm_preprocessor.py +14 -76
- keras_hub/src/models/distil_bert/{distil_bert_classifier.py → distil_bert_text_classifier.py} +12 -12
- keras_hub/src/models/distil_bert/{distil_bert_preprocessor.py → distil_bert_text_classifier_preprocessor.py} +23 -63
- keras_hub/src/models/distil_bert/distil_bert_tokenizer.py +19 -35
- keras_hub/src/models/efficientnet/__init__.py +13 -0
- keras_hub/src/models/efficientnet/efficientnet_backbone.py +569 -0
- keras_hub/src/models/efficientnet/fusedmbconv.py +229 -0
- keras_hub/src/models/efficientnet/mbconv.py +238 -0
- keras_hub/src/models/electra/__init__.py +1 -2
- keras_hub/src/models/electra/electra_preprocessor.py +6 -5
- keras_hub/src/models/electra/electra_tokenizer.py +17 -32
- keras_hub/src/models/f_net/__init__.py +1 -2
- keras_hub/src/models/f_net/f_net_masked_lm_preprocessor.py +12 -78
- keras_hub/src/models/f_net/{f_net_classifier.py → f_net_text_classifier.py} +10 -8
- keras_hub/src/models/f_net/{f_net_preprocessor.py → f_net_text_classifier_preprocessor.py} +19 -63
- keras_hub/src/models/f_net/f_net_tokenizer.py +17 -35
- keras_hub/src/models/falcon/__init__.py +1 -2
- keras_hub/src/models/falcon/falcon_causal_lm_preprocessor.py +6 -89
- keras_hub/src/models/falcon/falcon_preprocessor.py +5 -12
- keras_hub/src/models/falcon/falcon_tokenizer.py +12 -35
- keras_hub/src/models/gemma/__init__.py +1 -2
- keras_hub/src/models/gemma/gemma_causal_lm_preprocessor.py +6 -90
- keras_hub/src/models/gemma/gemma_preprocessor.py +5 -12
- keras_hub/src/models/gemma/gemma_tokenizer.py +12 -23
- keras_hub/src/models/gpt2/__init__.py +1 -2
- keras_hub/src/models/gpt2/gpt2_causal_lm_preprocessor.py +6 -89
- keras_hub/src/models/gpt2/gpt2_preprocessor.py +5 -12
- keras_hub/src/models/gpt2/gpt2_tokenizer.py +12 -34
- keras_hub/src/models/gpt_neo_x/gpt_neo_x_causal_lm_preprocessor.py +6 -91
- keras_hub/src/models/gpt_neo_x/gpt_neo_x_preprocessor.py +5 -12
- keras_hub/src/models/gpt_neo_x/gpt_neo_x_tokenizer.py +12 -34
- keras_hub/src/models/image_classifier.py +0 -5
- keras_hub/src/models/image_classifier_preprocessor.py +83 -0
- keras_hub/src/models/llama/__init__.py +1 -2
- keras_hub/src/models/llama/llama_causal_lm_preprocessor.py +6 -85
- keras_hub/src/models/llama/llama_preprocessor.py +5 -12
- keras_hub/src/models/llama/llama_tokenizer.py +12 -25
- keras_hub/src/models/llama3/__init__.py +1 -2
- keras_hub/src/models/llama3/llama3_causal_lm_preprocessor.py +6 -89
- keras_hub/src/models/llama3/llama3_preprocessor.py +2 -0
- keras_hub/src/models/llama3/llama3_tokenizer.py +12 -33
- keras_hub/src/models/masked_lm.py +0 -2
- keras_hub/src/models/masked_lm_preprocessor.py +156 -0
- keras_hub/src/models/mistral/__init__.py +1 -2
- keras_hub/src/models/mistral/mistral_causal_lm_preprocessor.py +6 -91
- keras_hub/src/models/mistral/mistral_preprocessor.py +5 -12
- keras_hub/src/models/mistral/mistral_tokenizer.py +12 -23
- keras_hub/src/models/mix_transformer/mix_transformer_backbone.py +2 -2
- keras_hub/src/models/mobilenet/__init__.py +13 -0
- keras_hub/src/models/mobilenet/mobilenet_backbone.py +530 -0
- keras_hub/src/models/mobilenet/mobilenet_image_classifier.py +114 -0
- keras_hub/src/models/opt/__init__.py +1 -2
- keras_hub/src/models/opt/opt_causal_lm_preprocessor.py +6 -93
- keras_hub/src/models/opt/opt_preprocessor.py +5 -12
- keras_hub/src/models/opt/opt_tokenizer.py +12 -41
- keras_hub/src/models/pali_gemma/__init__.py +1 -4
- keras_hub/src/models/pali_gemma/pali_gemma_causal_lm_preprocessor.py +28 -28
- keras_hub/src/models/pali_gemma/pali_gemma_image_converter.py +25 -0
- keras_hub/src/models/pali_gemma/pali_gemma_presets.py +5 -5
- keras_hub/src/models/pali_gemma/pali_gemma_tokenizer.py +10 -2
- keras_hub/src/models/phi3/__init__.py +1 -2
- keras_hub/src/models/phi3/phi3_causal_lm.py +3 -9
- keras_hub/src/models/phi3/phi3_causal_lm_preprocessor.py +6 -89
- keras_hub/src/models/phi3/phi3_preprocessor.py +5 -12
- keras_hub/src/models/phi3/phi3_tokenizer.py +12 -36
- keras_hub/src/models/preprocessor.py +76 -83
- keras_hub/src/models/resnet/__init__.py +6 -0
- keras_hub/src/models/resnet/resnet_backbone.py +387 -26
- keras_hub/src/models/resnet/resnet_image_classifier.py +7 -3
- keras_hub/src/models/resnet/resnet_image_classifier_preprocessor.py +28 -0
- keras_hub/src/models/resnet/resnet_image_converter.py +23 -0
- keras_hub/src/models/resnet/resnet_presets.py +95 -0
- keras_hub/src/models/roberta/__init__.py +1 -2
- keras_hub/src/models/roberta/roberta_masked_lm_preprocessor.py +22 -74
- keras_hub/src/models/roberta/{roberta_classifier.py → roberta_text_classifier.py} +11 -11
- keras_hub/src/models/roberta/{roberta_preprocessor.py → roberta_text_classifier_preprocessor.py} +21 -53
- keras_hub/src/models/roberta/roberta_tokenizer.py +13 -52
- keras_hub/src/models/seq_2_seq_lm_preprocessor.py +269 -0
- keras_hub/src/models/stable_diffusion_v3/__init__.py +13 -0
- keras_hub/src/models/stable_diffusion_v3/clip_encoder_block.py +103 -0
- keras_hub/src/models/stable_diffusion_v3/clip_preprocessor.py +93 -0
- keras_hub/src/models/stable_diffusion_v3/clip_text_encoder.py +149 -0
- keras_hub/src/models/stable_diffusion_v3/clip_tokenizer.py +167 -0
- keras_hub/src/models/stable_diffusion_v3/mmdit.py +427 -0
- keras_hub/src/models/stable_diffusion_v3/mmdit_block.py +317 -0
- keras_hub/src/models/stable_diffusion_v3/t5_xxl_preprocessor.py +74 -0
- keras_hub/src/models/stable_diffusion_v3/t5_xxl_text_encoder.py +155 -0
- keras_hub/src/models/stable_diffusion_v3/vae_attention.py +126 -0
- keras_hub/src/models/stable_diffusion_v3/vae_image_decoder.py +186 -0
- keras_hub/src/models/t5/__init__.py +1 -2
- keras_hub/src/models/t5/t5_tokenizer.py +13 -23
- keras_hub/src/models/task.py +71 -116
- keras_hub/src/models/{classifier.py → text_classifier.py} +8 -13
- keras_hub/src/models/text_classifier_preprocessor.py +138 -0
- keras_hub/src/models/whisper/__init__.py +1 -2
- keras_hub/src/models/whisper/{whisper_audio_feature_extractor.py → whisper_audio_converter.py} +20 -18
- keras_hub/src/models/whisper/whisper_backbone.py +0 -3
- keras_hub/src/models/whisper/whisper_presets.py +10 -10
- keras_hub/src/models/whisper/whisper_tokenizer.py +20 -16
- keras_hub/src/models/xlm_roberta/__init__.py +1 -4
- keras_hub/src/models/xlm_roberta/xlm_roberta_masked_lm_preprocessor.py +26 -72
- keras_hub/src/models/xlm_roberta/{xlm_roberta_classifier.py → xlm_roberta_text_classifier.py} +11 -11
- keras_hub/src/models/xlm_roberta/{xlm_roberta_preprocessor.py → xlm_roberta_text_classifier_preprocessor.py} +26 -53
- keras_hub/src/models/xlm_roberta/xlm_roberta_tokenizer.py +25 -10
- keras_hub/src/tests/test_case.py +25 -0
- keras_hub/src/tokenizers/byte_pair_tokenizer.py +29 -17
- keras_hub/src/tokenizers/byte_tokenizer.py +14 -15
- keras_hub/src/tokenizers/sentence_piece_tokenizer.py +19 -7
- keras_hub/src/tokenizers/tokenizer.py +67 -32
- keras_hub/src/tokenizers/unicode_codepoint_tokenizer.py +14 -15
- keras_hub/src/tokenizers/word_piece_tokenizer.py +33 -47
- keras_hub/src/utils/keras_utils.py +0 -50
- keras_hub/src/utils/preset_utils.py +238 -67
- keras_hub/src/utils/tensor_utils.py +187 -69
- keras_hub/src/utils/timm/convert_resnet.py +20 -16
- keras_hub/src/utils/timm/preset_loader.py +67 -0
- keras_hub/src/utils/transformers/convert_albert.py +193 -0
- keras_hub/src/utils/transformers/convert_bart.py +373 -0
- keras_hub/src/utils/transformers/convert_bert.py +7 -17
- keras_hub/src/utils/transformers/convert_distilbert.py +10 -20
- keras_hub/src/utils/transformers/convert_gemma.py +5 -19
- keras_hub/src/utils/transformers/convert_gpt2.py +5 -18
- keras_hub/src/utils/transformers/convert_llama3.py +7 -18
- keras_hub/src/utils/transformers/convert_mistral.py +129 -0
- keras_hub/src/utils/transformers/convert_pali_gemma.py +7 -29
- keras_hub/src/utils/transformers/preset_loader.py +77 -0
- keras_hub/src/utils/transformers/safetensor_utils.py +2 -2
- keras_hub/src/version_utils.py +1 -1
- {keras_hub_nightly-0.15.0.dev20240823171555.dist-info → keras_hub_nightly-0.15.0.dev20240911134614.dist-info}/METADATA +1 -2
- keras_hub_nightly-0.15.0.dev20240911134614.dist-info/RECORD +338 -0
- {keras_hub_nightly-0.15.0.dev20240823171555.dist-info → keras_hub_nightly-0.15.0.dev20240911134614.dist-info}/WHEEL +1 -1
- keras_hub/src/models/whisper/whisper_preprocessor.py +0 -326
- keras_hub/src/utils/timm/convert.py +0 -37
- keras_hub/src/utils/transformers/convert.py +0 -101
- keras_hub_nightly-0.15.0.dev20240823171555.dist-info/RECORD +0 -297
- {keras_hub_nightly-0.15.0.dev20240823171555.dist-info → keras_hub_nightly-0.15.0.dev20240911134614.dist-info}/top_level.txt +0 -0
@@ -16,20 +16,25 @@
|
|
16
16
|
import keras
|
17
17
|
|
18
18
|
from keras_hub.src.api_export import keras_hub_export
|
19
|
-
from keras_hub.src.
|
20
|
-
|
19
|
+
from keras_hub.src.models.distil_bert.distil_bert_backbone import (
|
20
|
+
DistilBertBackbone,
|
21
21
|
)
|
22
22
|
from keras_hub.src.models.distil_bert.distil_bert_tokenizer import (
|
23
23
|
DistilBertTokenizer,
|
24
24
|
)
|
25
|
-
from keras_hub.src.models.
|
26
|
-
|
27
|
-
convert_inputs_to_list_of_tensor_segments,
|
25
|
+
from keras_hub.src.models.text_classifier_preprocessor import (
|
26
|
+
TextClassifierPreprocessor,
|
28
27
|
)
|
28
|
+
from keras_hub.src.utils.tensor_utils import preprocessing_function
|
29
29
|
|
30
30
|
|
31
|
-
@keras_hub_export(
|
32
|
-
|
31
|
+
@keras_hub_export(
|
32
|
+
[
|
33
|
+
"keras_hub.models.DistilBertTextClassifierPreprocessor",
|
34
|
+
"keras_hub.models.DistilBertPreprocessor",
|
35
|
+
]
|
36
|
+
)
|
37
|
+
class DistilBertTextClassifierPreprocessor(TextClassifierPreprocessor):
|
33
38
|
"""A DistilBERT preprocessing layer which tokenizes and packs inputs.
|
34
39
|
|
35
40
|
This preprocessing layer will do three things:
|
@@ -70,7 +75,7 @@ class DistilBertPreprocessor(Preprocessor):
|
|
70
75
|
|
71
76
|
Directly calling the layer on data.
|
72
77
|
```python
|
73
|
-
preprocessor = keras_hub.models.
|
78
|
+
preprocessor = keras_hub.models.TextClassifierPreprocessor.from_preset(
|
74
79
|
"distil_bert_base_en_uncased"
|
75
80
|
)
|
76
81
|
preprocessor(["The quick brown fox jumped.", "Call me Ishmael."])
|
@@ -79,13 +84,15 @@ class DistilBertPreprocessor(Preprocessor):
|
|
79
84
|
vocab = ["[UNK]", "[CLS]", "[SEP]", "[PAD]", "[MASK]"]
|
80
85
|
vocab += ["The", "quick", "brown", "fox", "jumped", "."]
|
81
86
|
tokenizer = keras_hub.models.DistilBertTokenizer(vocabulary=vocab)
|
82
|
-
preprocessor = keras_hub.models.
|
87
|
+
preprocessor = keras_hub.models.DistilBertTextClassifierPreprocessor(
|
88
|
+
tokenizer
|
89
|
+
)
|
83
90
|
preprocessor("The quick brown fox jumped.")
|
84
91
|
```
|
85
92
|
|
86
93
|
Mapping with `tf.data.Dataset`.
|
87
94
|
```python
|
88
|
-
preprocessor = keras_hub.models.
|
95
|
+
preprocessor = keras_hub.models.TextClassifierPreprocessor.from_preset(
|
89
96
|
"distil_bert_base_en_uncased"
|
90
97
|
)
|
91
98
|
|
@@ -116,60 +123,13 @@ class DistilBertPreprocessor(Preprocessor):
|
|
116
123
|
```
|
117
124
|
"""
|
118
125
|
|
126
|
+
backbone_cls = DistilBertBackbone
|
119
127
|
tokenizer_cls = DistilBertTokenizer
|
120
128
|
|
121
|
-
|
122
|
-
self,
|
123
|
-
tokenizer,
|
124
|
-
sequence_length=512,
|
125
|
-
truncate="round_robin",
|
126
|
-
**kwargs,
|
127
|
-
):
|
128
|
-
super().__init__(**kwargs)
|
129
|
-
self.tokenizer = tokenizer
|
130
|
-
self.packer = None
|
131
|
-
self.sequence_length = sequence_length
|
132
|
-
self.truncate = truncate
|
133
|
-
|
134
|
-
def build(self, input_shape):
|
135
|
-
super().build(input_shape)
|
136
|
-
# Defer masker creation to `build()` so that we can be sure tokenizer
|
137
|
-
# assets have loaded when restoring a saved model.
|
138
|
-
self.packer = MultiSegmentPacker(
|
139
|
-
start_value=self.tokenizer.cls_token_id,
|
140
|
-
end_value=self.tokenizer.sep_token_id,
|
141
|
-
pad_value=self.tokenizer.pad_token_id,
|
142
|
-
truncate=self.truncate,
|
143
|
-
sequence_length=self.sequence_length,
|
144
|
-
)
|
145
|
-
|
129
|
+
@preprocessing_function
|
146
130
|
def call(self, x, y=None, sample_weight=None):
|
147
|
-
|
148
|
-
x =
|
149
|
-
|
150
|
-
x
|
151
|
-
"token_ids": token_ids,
|
152
|
-
"padding_mask": token_ids != self.tokenizer.pad_token_id,
|
153
|
-
}
|
131
|
+
output = super().call(x, y=y, sample_weight=sample_weight)
|
132
|
+
x, y, sample_weight = keras.utils.unpack_x_y_sample_weight(output)
|
133
|
+
# Backbone has no segment ID input.
|
134
|
+
del x["segment_ids"]
|
154
135
|
return keras.utils.pack_x_y_sample_weight(x, y, sample_weight)
|
155
|
-
|
156
|
-
def get_config(self):
|
157
|
-
config = super().get_config()
|
158
|
-
config.update(
|
159
|
-
{
|
160
|
-
"sequence_length": self.sequence_length,
|
161
|
-
"truncate": self.truncate,
|
162
|
-
}
|
163
|
-
)
|
164
|
-
return config
|
165
|
-
|
166
|
-
@property
|
167
|
-
def sequence_length(self):
|
168
|
-
"""The padded length of model input sequences."""
|
169
|
-
return self._sequence_length
|
170
|
-
|
171
|
-
@sequence_length.setter
|
172
|
-
def sequence_length(self, value):
|
173
|
-
self._sequence_length = value
|
174
|
-
if self.packer is not None:
|
175
|
-
self.packer.sequence_length = value
|
@@ -14,10 +14,18 @@
|
|
14
14
|
|
15
15
|
|
16
16
|
from keras_hub.src.api_export import keras_hub_export
|
17
|
+
from keras_hub.src.models.distil_bert.distil_bert_backbone import (
|
18
|
+
DistilBertBackbone,
|
19
|
+
)
|
17
20
|
from keras_hub.src.tokenizers.word_piece_tokenizer import WordPieceTokenizer
|
18
21
|
|
19
22
|
|
20
|
-
@keras_hub_export(
|
23
|
+
@keras_hub_export(
|
24
|
+
[
|
25
|
+
"keras_hub.tokenizers.DistilBertTokenizer",
|
26
|
+
"keras_hub.models.DistilBertTokenizer",
|
27
|
+
]
|
28
|
+
)
|
21
29
|
class DistilBertTokenizer(WordPieceTokenizer):
|
22
30
|
"""A DistilBERT tokenizer using WordPiece subword segmentation.
|
23
31
|
|
@@ -27,9 +35,6 @@ class DistilBertTokenizer(WordPieceTokenizer):
|
|
27
35
|
models and provides a `from_preset()` method to automatically download
|
28
36
|
a matching vocabulary for a DistilBERT preset.
|
29
37
|
|
30
|
-
This tokenizer does not provide truncation or padding of inputs. It can be
|
31
|
-
combined with a `keras_hub.models.DistilBertPreprocessor` layer for input packing.
|
32
|
-
|
33
38
|
If input is a batch of strings (rank > 0), the layer will output a
|
34
39
|
`tf.RaggedTensor` where the last dimension of the output is ragged.
|
35
40
|
|
@@ -70,45 +75,24 @@ class DistilBertTokenizer(WordPieceTokenizer):
|
|
70
75
|
```
|
71
76
|
"""
|
72
77
|
|
78
|
+
backbone_cls = DistilBertBackbone
|
79
|
+
|
73
80
|
def __init__(
|
74
81
|
self,
|
75
82
|
vocabulary,
|
76
83
|
lowercase=False,
|
77
|
-
special_tokens_in_strings=False,
|
78
84
|
**kwargs,
|
79
85
|
):
|
80
|
-
self.
|
81
|
-
self.
|
82
|
-
self.
|
83
|
-
self.
|
86
|
+
self._add_special_token("[CLS]", "cls_token")
|
87
|
+
self._add_special_token("[SEP]", "sep_token")
|
88
|
+
self._add_special_token("[PAD]", "pad_token")
|
89
|
+
self._add_special_token("[MASK]", "mask_token")
|
90
|
+
# Also add `tokenizer.start_token` and `tokenizer.end_token` for
|
91
|
+
# compatibility with other tokenizers.
|
92
|
+
self._add_special_token("[CLS]", "start_token")
|
93
|
+
self._add_special_token("[SEP]", "end_token")
|
84
94
|
super().__init__(
|
85
95
|
vocabulary=vocabulary,
|
86
96
|
lowercase=lowercase,
|
87
|
-
special_tokens=[
|
88
|
-
self.cls_token,
|
89
|
-
self.sep_token,
|
90
|
-
self.pad_token,
|
91
|
-
self.mask_token,
|
92
|
-
],
|
93
|
-
special_tokens_in_strings=special_tokens_in_strings,
|
94
97
|
**kwargs,
|
95
98
|
)
|
96
|
-
|
97
|
-
def set_vocabulary(self, vocabulary):
|
98
|
-
super().set_vocabulary(vocabulary)
|
99
|
-
|
100
|
-
if vocabulary is not None:
|
101
|
-
self.cls_token_id = self.token_to_id(self.cls_token)
|
102
|
-
self.sep_token_id = self.token_to_id(self.sep_token)
|
103
|
-
self.pad_token_id = self.token_to_id(self.pad_token)
|
104
|
-
self.mask_token_id = self.token_to_id(self.mask_token)
|
105
|
-
else:
|
106
|
-
self.cls_token_id = None
|
107
|
-
self.sep_token_id = None
|
108
|
-
self.pad_token_id = None
|
109
|
-
self.mask_token_id = None
|
110
|
-
|
111
|
-
def get_config(self):
|
112
|
-
config = super().get_config()
|
113
|
-
del config["special_tokens"] # Not configurable; set in __init__.
|
114
|
-
return config
|
@@ -0,0 +1,13 @@
|
|
1
|
+
# Copyright 2024 The KerasHub Authors
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at
|
6
|
+
#
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
# See the License for the specific language governing permissions and
|
13
|
+
# limitations under the License.
|