keras-hub-nightly 0.15.0.dev20240911134614__py3-none-any.whl → 0.16.0.dev2024092017__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- keras_hub/__init__.py +0 -6
- keras_hub/api/__init__.py +1 -0
- keras_hub/api/models/__init__.py +22 -17
- keras_hub/{src/models/llama3/llama3_preprocessor.py → api/utils/__init__.py} +7 -8
- keras_hub/src/api_export.py +15 -9
- keras_hub/src/models/albert/albert_text_classifier.py +6 -1
- keras_hub/src/models/bert/bert_text_classifier.py +6 -1
- keras_hub/src/models/deberta_v3/deberta_v3_text_classifier.py +6 -1
- keras_hub/src/models/densenet/densenet_backbone.py +1 -1
- keras_hub/src/models/distil_bert/distil_bert_text_classifier.py +6 -1
- keras_hub/src/models/f_net/f_net_text_classifier.py +6 -1
- keras_hub/src/models/gemma/gemma_decoder_block.py +1 -1
- keras_hub/src/models/gpt2/gpt2_preprocessor.py +7 -78
- keras_hub/src/models/pali_gemma/pali_gemma_tokenizer.py +1 -1
- keras_hub/src/models/preprocessor.py +1 -5
- keras_hub/src/models/resnet/resnet_backbone.py +3 -16
- keras_hub/src/models/resnet/resnet_image_classifier.py +26 -3
- keras_hub/src/models/resnet/resnet_presets.py +12 -12
- keras_hub/src/models/retinanet/__init__.py +13 -0
- keras_hub/src/models/retinanet/anchor_generator.py +175 -0
- keras_hub/src/models/retinanet/box_matcher.py +259 -0
- keras_hub/src/models/retinanet/non_max_supression.py +578 -0
- keras_hub/src/models/roberta/roberta_text_classifier.py +6 -1
- keras_hub/src/models/task.py +6 -6
- keras_hub/src/models/text_classifier.py +12 -1
- keras_hub/src/models/xlm_roberta/xlm_roberta_text_classifier.py +6 -1
- keras_hub/src/tests/test_case.py +21 -0
- keras_hub/src/tokenizers/byte_pair_tokenizer.py +1 -0
- keras_hub/src/tokenizers/sentence_piece_tokenizer.py +1 -0
- keras_hub/src/tokenizers/word_piece_tokenizer.py +1 -0
- keras_hub/src/utils/imagenet/__init__.py +13 -0
- keras_hub/src/utils/imagenet/imagenet_utils.py +1067 -0
- keras_hub/src/utils/preset_utils.py +24 -33
- keras_hub/src/utils/tensor_utils.py +14 -14
- keras_hub/src/utils/timm/convert_resnet.py +0 -1
- keras_hub/src/utils/timm/preset_loader.py +6 -7
- keras_hub/src/version_utils.py +1 -1
- keras_hub_nightly-0.16.0.dev2024092017.dist-info/METADATA +202 -0
- {keras_hub_nightly-0.15.0.dev20240911134614.dist-info → keras_hub_nightly-0.16.0.dev2024092017.dist-info}/RECORD +41 -45
- {keras_hub_nightly-0.15.0.dev20240911134614.dist-info → keras_hub_nightly-0.16.0.dev2024092017.dist-info}/WHEEL +1 -1
- keras_hub/src/models/bart/bart_preprocessor.py +0 -264
- keras_hub/src/models/bloom/bloom_preprocessor.py +0 -178
- keras_hub/src/models/electra/electra_preprocessor.py +0 -155
- keras_hub/src/models/falcon/falcon_preprocessor.py +0 -180
- keras_hub/src/models/gemma/gemma_preprocessor.py +0 -184
- keras_hub/src/models/gpt_neo_x/gpt_neo_x_preprocessor.py +0 -138
- keras_hub/src/models/llama/llama_preprocessor.py +0 -182
- keras_hub/src/models/mistral/mistral_preprocessor.py +0 -183
- keras_hub/src/models/opt/opt_preprocessor.py +0 -181
- keras_hub/src/models/phi3/phi3_preprocessor.py +0 -183
- keras_hub_nightly-0.15.0.dev20240911134614.dist-info/METADATA +0 -33
- {keras_hub_nightly-0.15.0.dev20240911134614.dist-info → keras_hub_nightly-0.16.0.dev2024092017.dist-info}/top_level.txt +0 -0
@@ -1,183 +0,0 @@
|
|
1
|
-
# Copyright 2024 The KerasHub Authors
|
2
|
-
#
|
3
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
-
# you may not use this file except in compliance with the License.
|
5
|
-
# You may obtain a copy of the License at
|
6
|
-
#
|
7
|
-
# https://www.apache.org/licenses/LICENSE-2.0
|
8
|
-
#
|
9
|
-
# Unless required by applicable law or agreed to in writing, software
|
10
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
-
# See the License for the specific language governing permissions and
|
13
|
-
# limitations under the License.
|
14
|
-
import keras
|
15
|
-
|
16
|
-
from keras_hub.src.api_export import keras_hub_export
|
17
|
-
from keras_hub.src.layers.preprocessing.start_end_packer import StartEndPacker
|
18
|
-
from keras_hub.src.models.phi3.phi3_backbone import Phi3Backbone
|
19
|
-
from keras_hub.src.models.phi3.phi3_tokenizer import Phi3Tokenizer
|
20
|
-
from keras_hub.src.models.preprocessor import Preprocessor
|
21
|
-
from keras_hub.src.utils.tensor_utils import preprocessing_function
|
22
|
-
|
23
|
-
|
24
|
-
@keras_hub_export("keras_hub.models.Phi3Preprocessor")
|
25
|
-
class Phi3Preprocessor(Preprocessor):
|
26
|
-
"""A Phi3 preprocessing layer which tokenizes and packs inputs.
|
27
|
-
|
28
|
-
This preprocessing layer will do three things:
|
29
|
-
|
30
|
-
1. Tokenize any number of input segments using the `tokenizer`.
|
31
|
-
2. Pack the inputs together using a `keras_hub.layers.StartEndPacker`.
|
32
|
-
with the appropriate tokens.
|
33
|
-
3. Construct a dictionary with keys `"token_ids"`, and `"padding_mask"`
|
34
|
-
that can be passed directly to `keras_hub.models.Phi3Backbone`.
|
35
|
-
|
36
|
-
This layer can be used directly with `tf.data.Dataset.map` to preprocess
|
37
|
-
string data in the `(x, y, sample_weight)` format used by
|
38
|
-
`keras.Model.fit`.
|
39
|
-
|
40
|
-
Args:
|
41
|
-
tokenizer: A `keras_hub.models.Phi3Tokenizer` instance.
|
42
|
-
sequence_length: The length of the packed inputs.
|
43
|
-
add_start_token: If `True`, the preprocessor will prepend the tokenizer
|
44
|
-
start token to each input sequence. Default is `True`.
|
45
|
-
add_end_token: If `True`, the preprocessor will append the tokenizer
|
46
|
-
end token to each input sequence. Default is `False`.
|
47
|
-
|
48
|
-
Call arguments:
|
49
|
-
x: A tensor of single string sequences, or a tuple of multiple
|
50
|
-
tensor sequences to be packed together. Inputs may be batched or
|
51
|
-
unbatched. For single sequences, raw python inputs will be converted
|
52
|
-
to tensors. For multiple sequences, pass tensors directly.
|
53
|
-
y: Any label data. Will be passed through unaltered.
|
54
|
-
sample_weight: Any label weight data. Will be passed through unaltered.
|
55
|
-
sequence_length: Pass to override the configured `sequence_length` of
|
56
|
-
the layer.
|
57
|
-
|
58
|
-
Examples:
|
59
|
-
|
60
|
-
Directly calling the from_preset().
|
61
|
-
```python
|
62
|
-
preprocessor = keras_hub.models.Phi3Preprocessor.from_preset(
|
63
|
-
""
|
64
|
-
)
|
65
|
-
|
66
|
-
# Tokenize and pack a single sentence.
|
67
|
-
preprocessor("The quick brown fox jumped.")
|
68
|
-
|
69
|
-
# Tokenize and a batch of single sentences.
|
70
|
-
preprocessor(["The quick brown fox jumped.", "Call me Ishmael."])
|
71
|
-
|
72
|
-
# Preprocess a batch of sentence pairs.
|
73
|
-
# When handling multiple sequences, always convert to tensors first!
|
74
|
-
first = tf.constant(["The quick brown fox jumped.", "Call me Ishmael."])
|
75
|
-
second = tf.constant(["The fox tripped.", "Oh look, a whale."])
|
76
|
-
preprocessor((first, second))
|
77
|
-
```
|
78
|
-
|
79
|
-
Mapping with `tf.data.Dataset`.
|
80
|
-
```python
|
81
|
-
preprocessor = keras_hub.models.Phi3Preprocessor.from_preset(
|
82
|
-
"phi3_mini_4k_instruct_en"
|
83
|
-
)
|
84
|
-
first = tf.constant(["The quick brown fox jumped.", "Call me Ishmael."])
|
85
|
-
second = tf.constant(["The fox tripped.", "Oh look, a whale."])
|
86
|
-
label = tf.constant([1, 1])
|
87
|
-
|
88
|
-
# Map labeled single sentences.
|
89
|
-
ds = tf.data.Dataset.from_tensor_slices((first, label))
|
90
|
-
ds = ds.map(preprocessor, num_parallel_calls=tf.data.AUTOTUNE)
|
91
|
-
|
92
|
-
# Map unlabeled single sentences.
|
93
|
-
ds = tf.data.Dataset.from_tensor_slices(first)
|
94
|
-
ds = ds.map(preprocessor, num_parallel_calls=tf.data.AUTOTUNE)
|
95
|
-
|
96
|
-
# Map labeled sentence pairs.
|
97
|
-
ds = tf.data.Dataset.from_tensor_slices(((first, second), label))
|
98
|
-
ds = ds.map(preprocessor, num_parallel_calls=tf.data.AUTOTUNE)
|
99
|
-
|
100
|
-
# Map unlabeled sentence pairs.
|
101
|
-
ds = tf.data.Dataset.from_tensor_slices((first, second))
|
102
|
-
|
103
|
-
# Watch out for tf.data's default unpacking of tuples here!
|
104
|
-
# Best to invoke the `preprocessor` directly in this case.
|
105
|
-
ds = ds.map(
|
106
|
-
lambda first, second: preprocessor(x=(first, second)),
|
107
|
-
num_parallel_calls=tf.data.AUTOTUNE,
|
108
|
-
)
|
109
|
-
```
|
110
|
-
"""
|
111
|
-
|
112
|
-
backbone_cls = Phi3Backbone
|
113
|
-
tokenizer_cls = Phi3Tokenizer
|
114
|
-
|
115
|
-
def __init__(
|
116
|
-
self,
|
117
|
-
tokenizer,
|
118
|
-
sequence_length=1024,
|
119
|
-
add_start_token=True,
|
120
|
-
add_end_token=False,
|
121
|
-
**kwargs,
|
122
|
-
):
|
123
|
-
super().__init__(**kwargs)
|
124
|
-
self.tokenizer = tokenizer
|
125
|
-
self.packer = None
|
126
|
-
self.add_start_token = add_start_token
|
127
|
-
self.add_end_token = add_end_token
|
128
|
-
self.sequence_length = sequence_length
|
129
|
-
|
130
|
-
def build(self, input_shape):
|
131
|
-
# Defer packer creation to `build()` so that we can be sure tokenizer
|
132
|
-
# assets have loaded when restoring a saved model.
|
133
|
-
self.packer = StartEndPacker(
|
134
|
-
start_value=self.tokenizer.start_token_id,
|
135
|
-
end_value=self.tokenizer.end_token_id,
|
136
|
-
pad_value=self.tokenizer.pad_token_id,
|
137
|
-
sequence_length=self.sequence_length,
|
138
|
-
return_padding_mask=True,
|
139
|
-
)
|
140
|
-
self.built = True
|
141
|
-
|
142
|
-
def get_config(self):
|
143
|
-
config = super().get_config()
|
144
|
-
config.update(
|
145
|
-
{
|
146
|
-
"sequence_length": self.sequence_length,
|
147
|
-
"add_start_token": self.add_start_token,
|
148
|
-
"add_end_token": self.add_end_token,
|
149
|
-
}
|
150
|
-
)
|
151
|
-
return config
|
152
|
-
|
153
|
-
@preprocessing_function
|
154
|
-
def call(
|
155
|
-
self,
|
156
|
-
x,
|
157
|
-
y=None,
|
158
|
-
sample_weight=None,
|
159
|
-
sequence_length=None,
|
160
|
-
):
|
161
|
-
sequence_length = sequence_length or self.sequence_length
|
162
|
-
token_ids, padding_mask = self.packer(
|
163
|
-
self.tokenizer(x),
|
164
|
-
sequence_length=sequence_length,
|
165
|
-
add_start_value=self.add_start_token,
|
166
|
-
add_end_value=self.add_end_token,
|
167
|
-
)
|
168
|
-
x = {
|
169
|
-
"token_ids": token_ids,
|
170
|
-
"padding_mask": padding_mask,
|
171
|
-
}
|
172
|
-
return keras.utils.pack_x_y_sample_weight(x, y, sample_weight)
|
173
|
-
|
174
|
-
@property
|
175
|
-
def sequence_length(self):
|
176
|
-
"""The padded length of model input sequences."""
|
177
|
-
return self._sequence_length
|
178
|
-
|
179
|
-
@sequence_length.setter
|
180
|
-
def sequence_length(self, value):
|
181
|
-
self._sequence_length = value
|
182
|
-
if self.packer is not None:
|
183
|
-
self.packer.sequence_length = value
|
@@ -1,33 +0,0 @@
|
|
1
|
-
Metadata-Version: 2.1
|
2
|
-
Name: keras-hub-nightly
|
3
|
-
Version: 0.15.0.dev20240911134614
|
4
|
-
Summary: 🚧🚧🚧 Work in progress. 🚧🚧🚧 More details soon!
|
5
|
-
Home-page: https://github.com/keras-team/keras-hub
|
6
|
-
Author: Keras team
|
7
|
-
Author-email: keras-hub@google.com
|
8
|
-
License: Apache License 2.0
|
9
|
-
Classifier: Development Status :: 3 - Alpha
|
10
|
-
Classifier: Programming Language :: Python :: 3
|
11
|
-
Classifier: Programming Language :: Python :: 3.9
|
12
|
-
Classifier: Programming Language :: Python :: 3.10
|
13
|
-
Classifier: Programming Language :: Python :: 3.11
|
14
|
-
Classifier: Programming Language :: Python :: 3 :: Only
|
15
|
-
Classifier: Operating System :: Unix
|
16
|
-
Classifier: Operating System :: Microsoft :: Windows
|
17
|
-
Classifier: Operating System :: MacOS
|
18
|
-
Classifier: Intended Audience :: Science/Research
|
19
|
-
Classifier: Topic :: Scientific/Engineering
|
20
|
-
Classifier: Topic :: Software Development
|
21
|
-
Requires-Python: >=3.9
|
22
|
-
Requires-Dist: absl-py
|
23
|
-
Requires-Dist: numpy
|
24
|
-
Requires-Dist: packaging
|
25
|
-
Requires-Dist: regex
|
26
|
-
Requires-Dist: rich
|
27
|
-
Requires-Dist: kagglehub
|
28
|
-
Requires-Dist: tensorflow-text ; platform_system != "Darwin"
|
29
|
-
Provides-Extra: extras
|
30
|
-
Requires-Dist: rouge-score ; extra == 'extras'
|
31
|
-
Requires-Dist: sentencepiece ; extra == 'extras'
|
32
|
-
|
33
|
-
🚧🚧🚧 Work in progress. 🚧🚧🚧 More details soon!
|