keras-hub-nightly 0.21.0.dev202504210405__py3-none-any.whl → 0.21.0.dev202504250023__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- keras_hub/__init__.py +15 -33
- keras_hub/layers/__init__.py +131 -0
- keras_hub/metrics/__init__.py +11 -0
- keras_hub/models/__init__.py +609 -0
- keras_hub/samplers/__init__.py +18 -0
- keras_hub/src/tests/mocks/mock_gemma3_tokenizer.py +155 -0
- keras_hub/src/utils/coco/__init__.py +0 -0
- keras_hub/src/utils/coco/coco_utils.py +133 -0
- keras_hub/src/utils/imagenet/imagenet_utils.py +36 -0
- keras_hub/src/utils/preset_utils.py +1 -1
- keras_hub/src/{version_utils.py → version.py} +1 -1
- keras_hub/tokenizers/__init__.py +108 -0
- keras_hub/utils/__init__.py +21 -0
- {keras_hub_nightly-0.21.0.dev202504210405.dist-info → keras_hub_nightly-0.21.0.dev202504250023.dist-info}/METADATA +6 -20
- {keras_hub_nightly-0.21.0.dev202504210405.dist-info → keras_hub_nightly-0.21.0.dev202504250023.dist-info}/RECORD +17 -15
- {keras_hub_nightly-0.21.0.dev202504210405.dist-info → keras_hub_nightly-0.21.0.dev202504250023.dist-info}/WHEEL +1 -1
- keras_hub/api/__init__.py +0 -15
- keras_hub/api/layers/__init__.py +0 -89
- keras_hub/api/metrics/__init__.py +0 -11
- keras_hub/api/models/__init__.py +0 -441
- keras_hub/api/samplers/__init__.py +0 -16
- keras_hub/api/tokenizers/__init__.py +0 -62
- keras_hub/api/utils/__init__.py +0 -9
- {keras_hub_nightly-0.21.0.dev202504210405.dist-info → keras_hub_nightly-0.21.0.dev202504250023.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,155 @@
|
|
1
|
+
import tensorflow as tf
|
2
|
+
|
3
|
+
from keras_hub.src.tokenizers.tokenizer import Tokenizer
|
4
|
+
from keras_hub.src.utils.tensor_utils import convert_to_ragged_batch
|
5
|
+
from keras_hub.src.utils.tensor_utils import is_int_dtype
|
6
|
+
from keras_hub.src.utils.tensor_utils import is_string_dtype
|
7
|
+
from keras_hub.src.utils.tensor_utils import preprocessing_function
|
8
|
+
|
9
|
+
|
10
|
+
class MockGemma3Tokenizer(Tokenizer):
|
11
|
+
def __init__(
|
12
|
+
self,
|
13
|
+
proto=None,
|
14
|
+
sequence_length=None,
|
15
|
+
dtype="int32",
|
16
|
+
add_bos=False,
|
17
|
+
add_eos=False,
|
18
|
+
**kwargs,
|
19
|
+
):
|
20
|
+
if not is_int_dtype(dtype) and not is_string_dtype(dtype):
|
21
|
+
raise ValueError(
|
22
|
+
"Output dtype must be an integer type or a string. "
|
23
|
+
f"Received: dtype={dtype}"
|
24
|
+
)
|
25
|
+
|
26
|
+
super().__init__(dtype=dtype, **kwargs)
|
27
|
+
|
28
|
+
self.vocabulary = [
|
29
|
+
"<pad>",
|
30
|
+
"<bos>",
|
31
|
+
"<eos>",
|
32
|
+
"<unk>",
|
33
|
+
"<start_of_image>",
|
34
|
+
"<end_of_image>",
|
35
|
+
"<start_of_turn>",
|
36
|
+
"<end_of_turn>",
|
37
|
+
"<img>",
|
38
|
+
"the",
|
39
|
+
"brown",
|
40
|
+
"earth",
|
41
|
+
"fox",
|
42
|
+
"is",
|
43
|
+
"quick",
|
44
|
+
"round",
|
45
|
+
"\n\n",
|
46
|
+
]
|
47
|
+
self.string_to_id = tf.lookup.StaticHashTable(
|
48
|
+
tf.lookup.KeyValueTensorInitializer(
|
49
|
+
self.vocabulary, list(range(len(self.vocabulary)))
|
50
|
+
),
|
51
|
+
default_value=3,
|
52
|
+
)
|
53
|
+
self.id_to_string = tf.lookup.StaticHashTable(
|
54
|
+
tf.lookup.KeyValueTensorInitializer(
|
55
|
+
list(range(len(self.vocabulary))), self.vocabulary
|
56
|
+
),
|
57
|
+
default_value="<unk>",
|
58
|
+
)
|
59
|
+
|
60
|
+
# The usual tokens.
|
61
|
+
self._add_special_token("<bos>", "start_token")
|
62
|
+
self._add_special_token("<eos>", "end_token")
|
63
|
+
self._add_special_token("<pad>", "pad_token")
|
64
|
+
|
65
|
+
# Image placeholder token.
|
66
|
+
self._add_special_token("<img>", "image_placeholder")
|
67
|
+
|
68
|
+
# Some tokens which are used in the preprocessor. We need to keep them
|
69
|
+
# here so that the preprocessor works with `tf.data`.
|
70
|
+
self._add_special_token("<start_of_image>", "start_of_image_token")
|
71
|
+
self._add_special_token("<end_of_image>", "end_of_image_token")
|
72
|
+
|
73
|
+
# self.special_token_ids = [
|
74
|
+
# 0, 1, 2, 4, 5, 8
|
75
|
+
# ]
|
76
|
+
|
77
|
+
self.sequence_length = sequence_length
|
78
|
+
self.add_bos = add_bos
|
79
|
+
self.add_eos = add_eos
|
80
|
+
|
81
|
+
def vocabulary_size(self):
|
82
|
+
return len(self.vocabulary)
|
83
|
+
|
84
|
+
def get_vocabulary(self):
|
85
|
+
return self.vocabulary
|
86
|
+
|
87
|
+
def id_to_token(self, id):
|
88
|
+
return self.vocabulary[id]
|
89
|
+
|
90
|
+
def token_to_id(self, token):
|
91
|
+
return self.vocabulary.index(token)
|
92
|
+
|
93
|
+
@preprocessing_function
|
94
|
+
def tokenize(self, inputs):
|
95
|
+
inputs = tf.convert_to_tensor(inputs)
|
96
|
+
unbatched = inputs.shape.rank == 0
|
97
|
+
if unbatched:
|
98
|
+
inputs = tf.expand_dims(inputs, 0)
|
99
|
+
|
100
|
+
inputs = tf.strings.regex_replace(
|
101
|
+
inputs, self.start_of_image_token, f" {self.start_of_image_token} "
|
102
|
+
)
|
103
|
+
inputs = tf.strings.regex_replace(
|
104
|
+
inputs, self.end_of_image_token, f" {self.end_of_image_token} "
|
105
|
+
)
|
106
|
+
inputs = tf.strings.regex_replace(
|
107
|
+
inputs, self.image_placeholder, f" {self.image_placeholder} "
|
108
|
+
)
|
109
|
+
inputs = tf.strings.regex_replace(inputs, " ", " ")
|
110
|
+
|
111
|
+
sep_inputs = tf.strings.split(inputs, sep=" ")
|
112
|
+
tokens = self.string_to_id.lookup(sep_inputs)
|
113
|
+
|
114
|
+
if self.add_bos:
|
115
|
+
bos_tensor = tf.fill(
|
116
|
+
value=self.start_token_id,
|
117
|
+
dims=tokens.shape.as_list()[0:1] + [1],
|
118
|
+
)
|
119
|
+
tokens = tf.concat((bos_tensor, tokens), axis=-1)
|
120
|
+
if self.add_eos:
|
121
|
+
eos_tensor = tf.fill(
|
122
|
+
value=self.end_token_id, dims=tokens.shape.as_list()[0:1] + [1]
|
123
|
+
)
|
124
|
+
tokens = tf.concat((tokens, eos_tensor), axis=-1)
|
125
|
+
|
126
|
+
# Convert to a dense output if input was a scalar.
|
127
|
+
if unbatched:
|
128
|
+
tokens = tf.squeeze(tokens, 0)
|
129
|
+
|
130
|
+
return tokens
|
131
|
+
|
132
|
+
@preprocessing_function
|
133
|
+
def detokenize(self, inputs):
|
134
|
+
inputs, unbatched, rectangular = convert_to_ragged_batch(inputs)
|
135
|
+
# tf-text sentencepiece does not handle int64.
|
136
|
+
inputs = tf.cast(inputs, "int32")
|
137
|
+
|
138
|
+
outputs = self.id_to_string.lookup(inputs)
|
139
|
+
outputs = tf.strings.reduce_join(outputs, axis=-1, separator=" ")
|
140
|
+
|
141
|
+
for token in [
|
142
|
+
self.start_token,
|
143
|
+
self.end_token,
|
144
|
+
self.pad_token,
|
145
|
+
]:
|
146
|
+
outputs = tf.strings.regex_replace(outputs, token, "")
|
147
|
+
|
148
|
+
outputs = tf.strings.strip(outputs)
|
149
|
+
|
150
|
+
if unbatched:
|
151
|
+
outputs = tf.squeeze(outputs, 0)
|
152
|
+
return outputs
|
153
|
+
|
154
|
+
def __call__(self, inputs):
|
155
|
+
return self.tokenize(inputs)
|
File without changes
|
@@ -0,0 +1,133 @@
|
|
1
|
+
from keras_hub.src.api_export import keras_hub_export
|
2
|
+
|
3
|
+
|
4
|
+
@keras_hub_export("keras_hub.utils.coco_id_to_name")
|
5
|
+
def coco_id_to_name(id):
|
6
|
+
"""Convert a single COCO class name to a class ID.
|
7
|
+
|
8
|
+
Args:
|
9
|
+
id: An integer class id from 0 to 91.
|
10
|
+
|
11
|
+
Returns:
|
12
|
+
The human readable image class name, e.g. "bicycle".
|
13
|
+
|
14
|
+
Example:
|
15
|
+
>>> keras_hub.utils.coco_id_to_name(2)
|
16
|
+
'bicycle'
|
17
|
+
"""
|
18
|
+
return COCO_NAMES[id]
|
19
|
+
|
20
|
+
|
21
|
+
@keras_hub_export("keras_hub.utils.coco_name_to_id")
|
22
|
+
def coco_name_to_id(name):
|
23
|
+
"""Convert a single COCO class name to a class ID.
|
24
|
+
|
25
|
+
Args:
|
26
|
+
name: A human readable image class name, e.g. "bicycle".
|
27
|
+
|
28
|
+
Returns:
|
29
|
+
The integer class id from 0 to 999.
|
30
|
+
|
31
|
+
Example:
|
32
|
+
>>> keras_hub.utils.coco_name_to_id("bicycle")
|
33
|
+
2
|
34
|
+
"""
|
35
|
+
return COCO_IDS[name]
|
36
|
+
|
37
|
+
|
38
|
+
COCO_NAMES = {
|
39
|
+
0: "unlabeled",
|
40
|
+
1: "person",
|
41
|
+
2: "bicycle",
|
42
|
+
3: "car",
|
43
|
+
4: "motorcycle",
|
44
|
+
5: "airplane",
|
45
|
+
6: "bus",
|
46
|
+
7: "train",
|
47
|
+
8: "truck",
|
48
|
+
9: "boat",
|
49
|
+
10: "traffic_light",
|
50
|
+
11: "fire_hydrant",
|
51
|
+
12: "street_sign",
|
52
|
+
13: "stop_sign",
|
53
|
+
14: "parking_meter",
|
54
|
+
15: "bench",
|
55
|
+
16: "bird",
|
56
|
+
17: "cat",
|
57
|
+
18: "dog",
|
58
|
+
19: "horse",
|
59
|
+
20: "sheep",
|
60
|
+
21: "cow",
|
61
|
+
22: "elephant",
|
62
|
+
23: "bear",
|
63
|
+
24: "zebra",
|
64
|
+
25: "giraffe",
|
65
|
+
26: "hat",
|
66
|
+
27: "backpack",
|
67
|
+
28: "umbrella",
|
68
|
+
29: "shoe",
|
69
|
+
30: "eye_glasses",
|
70
|
+
31: "handbag",
|
71
|
+
32: "tie",
|
72
|
+
33: "suitcase",
|
73
|
+
34: "frisbee",
|
74
|
+
35: "skis",
|
75
|
+
36: "snowboard",
|
76
|
+
37: "sports_ball",
|
77
|
+
38: "kite",
|
78
|
+
39: "baseball_bat",
|
79
|
+
40: "baseball_glove",
|
80
|
+
41: "skateboard",
|
81
|
+
42: "surfboard",
|
82
|
+
43: "tennis_racket",
|
83
|
+
44: "bottle",
|
84
|
+
45: "plate",
|
85
|
+
46: "wine_glass",
|
86
|
+
47: "cup",
|
87
|
+
48: "fork",
|
88
|
+
49: "knife",
|
89
|
+
50: "spoon",
|
90
|
+
51: "bowl",
|
91
|
+
52: "banana",
|
92
|
+
53: "apple",
|
93
|
+
54: "sandwich",
|
94
|
+
55: "orange",
|
95
|
+
56: "broccoli",
|
96
|
+
57: "carrot",
|
97
|
+
58: "hot_dog",
|
98
|
+
59: "pizza",
|
99
|
+
60: "donut",
|
100
|
+
61: "cake",
|
101
|
+
62: "chair",
|
102
|
+
63: "couch",
|
103
|
+
64: "potted_plant",
|
104
|
+
65: "bed",
|
105
|
+
66: "mirror",
|
106
|
+
67: "dining_table",
|
107
|
+
68: "window",
|
108
|
+
69: "desk",
|
109
|
+
70: "toilet",
|
110
|
+
71: "door",
|
111
|
+
72: "tv",
|
112
|
+
73: "laptop",
|
113
|
+
74: "mouse",
|
114
|
+
75: "remote",
|
115
|
+
76: "keyboard",
|
116
|
+
77: "cell_phone",
|
117
|
+
78: "microwave",
|
118
|
+
79: "oven",
|
119
|
+
80: "toaster",
|
120
|
+
81: "sink",
|
121
|
+
82: "refrigerator",
|
122
|
+
83: "blender",
|
123
|
+
84: "book",
|
124
|
+
85: "clock",
|
125
|
+
86: "vase",
|
126
|
+
87: "scissors",
|
127
|
+
88: "teddy_bear",
|
128
|
+
89: "hair_drier",
|
129
|
+
90: "toothbrush",
|
130
|
+
91: "hair_brush",
|
131
|
+
}
|
132
|
+
|
133
|
+
COCO_IDS = {v: k for k, v in COCO_NAMES.items()}
|
@@ -3,6 +3,40 @@ from keras import ops
|
|
3
3
|
from keras_hub.src.api_export import keras_hub_export
|
4
4
|
|
5
5
|
|
6
|
+
@keras_hub_export("keras_hub.utils.imagenet_id_to_name")
|
7
|
+
def imagenet_id_to_name(id):
|
8
|
+
"""Convert a single ImageNet class ID to a class name.
|
9
|
+
|
10
|
+
Args:
|
11
|
+
id: An integer class id from 0 to 999.
|
12
|
+
|
13
|
+
Returns:
|
14
|
+
The human readable image class name, e.g. "goldfish".
|
15
|
+
|
16
|
+
Example:
|
17
|
+
>>> keras_hub.utils.imagenet_id_to_name(1)
|
18
|
+
'goldfish'
|
19
|
+
"""
|
20
|
+
return IMAGENET_NAMES[id][1]
|
21
|
+
|
22
|
+
|
23
|
+
@keras_hub_export("keras_hub.utils.imagenet_name_to_id")
|
24
|
+
def imagenet_name_to_id(name):
|
25
|
+
"""Convert a single ImageNet class name to a class ID.
|
26
|
+
|
27
|
+
Args:
|
28
|
+
name: A human readable image class name, e.g. "goldfish".
|
29
|
+
|
30
|
+
Returns:
|
31
|
+
The integer class id from 0 to 999.
|
32
|
+
|
33
|
+
Example:
|
34
|
+
>>> keras_hub.utils.imagenet_name_to_id("goldfish")
|
35
|
+
1
|
36
|
+
"""
|
37
|
+
return IMAGENET_IDS[name]
|
38
|
+
|
39
|
+
|
6
40
|
@keras_hub_export("keras_hub.utils.decode_imagenet_predictions")
|
7
41
|
def decode_imagenet_predictions(preds, top=5, include_synset_ids=False):
|
8
42
|
"""Decodes the predictions for an ImageNet-1k prediction.
|
@@ -1052,3 +1086,5 @@ IMAGENET_NAMES = {
|
|
1052
1086
|
998: ("n13133613", "ear"),
|
1053
1087
|
999: ("n15075141", "toilet_tissue"),
|
1054
1088
|
}
|
1089
|
+
|
1090
|
+
IMAGENET_IDS = {v[1]: k for k, v in IMAGENET_NAMES.items()}
|
@@ -801,7 +801,7 @@ class KerasPresetSaver:
|
|
801
801
|
|
802
802
|
def _save_metadata(self, layer):
|
803
803
|
from keras_hub.src.models.task import Task
|
804
|
-
from keras_hub.src.
|
804
|
+
from keras_hub.src.version import __version__ as keras_hub_version
|
805
805
|
|
806
806
|
# Find all tasks that are compatible with the backbone.
|
807
807
|
# E.g. for `BertBackbone` we would have `TextClassifier` and `MaskedLM`.
|
@@ -0,0 +1,108 @@
|
|
1
|
+
"""DO NOT EDIT.
|
2
|
+
|
3
|
+
This file was autogenerated. Do not edit it by hand,
|
4
|
+
since your modifications would be overwritten.
|
5
|
+
"""
|
6
|
+
|
7
|
+
from keras_hub.src.models.albert.albert_tokenizer import (
|
8
|
+
AlbertTokenizer as AlbertTokenizer,
|
9
|
+
)
|
10
|
+
from keras_hub.src.models.bart.bart_tokenizer import (
|
11
|
+
BartTokenizer as BartTokenizer,
|
12
|
+
)
|
13
|
+
from keras_hub.src.models.bert.bert_tokenizer import (
|
14
|
+
BertTokenizer as BertTokenizer,
|
15
|
+
)
|
16
|
+
from keras_hub.src.models.bloom.bloom_tokenizer import (
|
17
|
+
BloomTokenizer as BloomTokenizer,
|
18
|
+
)
|
19
|
+
from keras_hub.src.models.clip.clip_tokenizer import (
|
20
|
+
CLIPTokenizer as CLIPTokenizer,
|
21
|
+
)
|
22
|
+
from keras_hub.src.models.deberta_v3.deberta_v3_tokenizer import (
|
23
|
+
DebertaV3Tokenizer as DebertaV3Tokenizer,
|
24
|
+
)
|
25
|
+
from keras_hub.src.models.distil_bert.distil_bert_tokenizer import (
|
26
|
+
DistilBertTokenizer as DistilBertTokenizer,
|
27
|
+
)
|
28
|
+
from keras_hub.src.models.electra.electra_tokenizer import (
|
29
|
+
ElectraTokenizer as ElectraTokenizer,
|
30
|
+
)
|
31
|
+
from keras_hub.src.models.f_net.f_net_tokenizer import (
|
32
|
+
FNetTokenizer as FNetTokenizer,
|
33
|
+
)
|
34
|
+
from keras_hub.src.models.falcon.falcon_tokenizer import (
|
35
|
+
FalconTokenizer as FalconTokenizer,
|
36
|
+
)
|
37
|
+
from keras_hub.src.models.gemma.gemma_tokenizer import (
|
38
|
+
GemmaTokenizer as GemmaTokenizer,
|
39
|
+
)
|
40
|
+
from keras_hub.src.models.gemma3.gemma3_tokenizer import (
|
41
|
+
Gemma3Tokenizer as Gemma3Tokenizer,
|
42
|
+
)
|
43
|
+
from keras_hub.src.models.gpt2.gpt2_tokenizer import (
|
44
|
+
GPT2Tokenizer as GPT2Tokenizer,
|
45
|
+
)
|
46
|
+
from keras_hub.src.models.gpt_neo_x.gpt_neo_x_tokenizer import (
|
47
|
+
GPTNeoXTokenizer as GPTNeoXTokenizer,
|
48
|
+
)
|
49
|
+
from keras_hub.src.models.llama.llama_tokenizer import (
|
50
|
+
LlamaTokenizer as LlamaTokenizer,
|
51
|
+
)
|
52
|
+
from keras_hub.src.models.llama3.llama3_tokenizer import (
|
53
|
+
Llama3Tokenizer as Llama3Tokenizer,
|
54
|
+
)
|
55
|
+
from keras_hub.src.models.mistral.mistral_tokenizer import (
|
56
|
+
MistralTokenizer as MistralTokenizer,
|
57
|
+
)
|
58
|
+
from keras_hub.src.models.opt.opt_tokenizer import OPTTokenizer as OPTTokenizer
|
59
|
+
from keras_hub.src.models.pali_gemma.pali_gemma_tokenizer import (
|
60
|
+
PaliGemmaTokenizer as PaliGemmaTokenizer,
|
61
|
+
)
|
62
|
+
from keras_hub.src.models.phi3.phi3_tokenizer import (
|
63
|
+
Phi3Tokenizer as Phi3Tokenizer,
|
64
|
+
)
|
65
|
+
from keras_hub.src.models.qwen.qwen_tokenizer import (
|
66
|
+
QwenTokenizer as Qwen2Tokenizer,
|
67
|
+
)
|
68
|
+
from keras_hub.src.models.qwen.qwen_tokenizer import (
|
69
|
+
QwenTokenizer as QwenTokenizer,
|
70
|
+
)
|
71
|
+
from keras_hub.src.models.roberta.roberta_tokenizer import (
|
72
|
+
RobertaTokenizer as RobertaTokenizer,
|
73
|
+
)
|
74
|
+
from keras_hub.src.models.roformer_v2.roformer_v2_tokenizer import (
|
75
|
+
RoformerV2Tokenizer as RoformerV2Tokenizer,
|
76
|
+
)
|
77
|
+
from keras_hub.src.models.siglip.siglip_tokenizer import (
|
78
|
+
SigLIPTokenizer as SigLIPTokenizer,
|
79
|
+
)
|
80
|
+
from keras_hub.src.models.t5.t5_tokenizer import T5Tokenizer as T5Tokenizer
|
81
|
+
from keras_hub.src.models.whisper.whisper_tokenizer import (
|
82
|
+
WhisperTokenizer as WhisperTokenizer,
|
83
|
+
)
|
84
|
+
from keras_hub.src.models.xlm_roberta.xlm_roberta_tokenizer import (
|
85
|
+
XLMRobertaTokenizer as XLMRobertaTokenizer,
|
86
|
+
)
|
87
|
+
from keras_hub.src.tokenizers.byte_pair_tokenizer import (
|
88
|
+
BytePairTokenizer as BytePairTokenizer,
|
89
|
+
)
|
90
|
+
from keras_hub.src.tokenizers.byte_tokenizer import (
|
91
|
+
ByteTokenizer as ByteTokenizer,
|
92
|
+
)
|
93
|
+
from keras_hub.src.tokenizers.sentence_piece_tokenizer import (
|
94
|
+
SentencePieceTokenizer as SentencePieceTokenizer,
|
95
|
+
)
|
96
|
+
from keras_hub.src.tokenizers.sentence_piece_tokenizer_trainer import (
|
97
|
+
compute_sentence_piece_proto as compute_sentence_piece_proto,
|
98
|
+
)
|
99
|
+
from keras_hub.src.tokenizers.tokenizer import Tokenizer as Tokenizer
|
100
|
+
from keras_hub.src.tokenizers.unicode_codepoint_tokenizer import (
|
101
|
+
UnicodeCodepointTokenizer as UnicodeCodepointTokenizer,
|
102
|
+
)
|
103
|
+
from keras_hub.src.tokenizers.word_piece_tokenizer import (
|
104
|
+
WordPieceTokenizer as WordPieceTokenizer,
|
105
|
+
)
|
106
|
+
from keras_hub.src.tokenizers.word_piece_tokenizer_trainer import (
|
107
|
+
compute_word_piece_vocabulary as compute_word_piece_vocabulary,
|
108
|
+
)
|
@@ -0,0 +1,21 @@
|
|
1
|
+
"""DO NOT EDIT.
|
2
|
+
|
3
|
+
This file was autogenerated. Do not edit it by hand,
|
4
|
+
since your modifications would be overwritten.
|
5
|
+
"""
|
6
|
+
|
7
|
+
from keras_hub.src.utils.coco.coco_utils import (
|
8
|
+
coco_id_to_name as coco_id_to_name,
|
9
|
+
)
|
10
|
+
from keras_hub.src.utils.coco.coco_utils import (
|
11
|
+
coco_name_to_id as coco_name_to_id,
|
12
|
+
)
|
13
|
+
from keras_hub.src.utils.imagenet.imagenet_utils import (
|
14
|
+
decode_imagenet_predictions as decode_imagenet_predictions,
|
15
|
+
)
|
16
|
+
from keras_hub.src.utils.imagenet.imagenet_utils import (
|
17
|
+
imagenet_id_to_name as imagenet_id_to_name,
|
18
|
+
)
|
19
|
+
from keras_hub.src.utils.imagenet.imagenet_utils import (
|
20
|
+
imagenet_name_to_id as imagenet_name_to_id,
|
21
|
+
)
|
@@ -1,11 +1,11 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: keras-hub-nightly
|
3
|
-
Version: 0.21.0.
|
4
|
-
Summary:
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
3
|
+
Version: 0.21.0.dev202504250023
|
4
|
+
Summary: Pretrained models for Keras.
|
5
|
+
Author-email: Keras team <keras-users@googlegroups.com>
|
6
|
+
License-Expression: Apache-2.0
|
7
|
+
Project-URL: Home, https://keras.io/keras_hub/
|
8
|
+
Project-URL: Repository, https://github.com/keras-team/keras/keras_hub
|
9
9
|
Classifier: Development Status :: 3 - Alpha
|
10
10
|
Classifier: Programming Language :: Python :: 3
|
11
11
|
Classifier: Programming Language :: Python :: 3.9
|
@@ -28,20 +28,6 @@ Requires-Dist: regex
|
|
28
28
|
Requires-Dist: rich
|
29
29
|
Requires-Dist: kagglehub
|
30
30
|
Requires-Dist: tensorflow-text; platform_system != "Windows"
|
31
|
-
Provides-Extra: extras
|
32
|
-
Requires-Dist: rouge-score; extra == "extras"
|
33
|
-
Requires-Dist: sentencepiece; extra == "extras"
|
34
|
-
Dynamic: author
|
35
|
-
Dynamic: author-email
|
36
|
-
Dynamic: classifier
|
37
|
-
Dynamic: description
|
38
|
-
Dynamic: description-content-type
|
39
|
-
Dynamic: home-page
|
40
|
-
Dynamic: license
|
41
|
-
Dynamic: provides-extra
|
42
|
-
Dynamic: requires-dist
|
43
|
-
Dynamic: requires-python
|
44
|
-
Dynamic: summary
|
45
31
|
|
46
32
|
# KerasHub: Multi-framework Pretrained Models
|
47
33
|
[](https://github.com/keras-team/keras-hub/actions?query=workflow%3ATests+branch%3Amaster)
|
@@ -1,14 +1,11 @@
|
|
1
|
-
keras_hub/__init__.py,sha256=
|
2
|
-
keras_hub/
|
3
|
-
keras_hub/
|
4
|
-
keras_hub/
|
5
|
-
keras_hub/
|
6
|
-
keras_hub/api/samplers/__init__.py,sha256=n-_SEXxr2LNUzK2FqVFN7alsrkx1P_HOVTeLZKeGCdE,730
|
7
|
-
keras_hub/api/tokenizers/__init__.py,sha256=NCQSOg3vf3KlM2YBsxApcJUVu9MH2jV0NQrM3f4EhJ4,2927
|
8
|
-
keras_hub/api/utils/__init__.py,sha256=Gp1E6gG-RtKQS3PBEQEOz9PQvXkXaJ0ySGMqZ7myN7A,215
|
1
|
+
keras_hub/__init__.py,sha256=bJbUZkqwhZvTb1Tqx1fbkq6mzBYiEyq-Hin3oQIkhdE,558
|
2
|
+
keras_hub/layers/__init__.py,sha256=LhMUEcl3xJwqr0XphTgRZ5Ayz5SsBAKV19c0XwSzj1I,4952
|
3
|
+
keras_hub/metrics/__init__.py,sha256=KYalsMPBnfwim9BdGHFfJ5WxUKFXOQ1QoKIMT_0lwlM,439
|
4
|
+
keras_hub/models/__init__.py,sha256=Np-V3YdZcIa0xwqhjmgm5NWnsCj647aClYW0Uhi3eSI,25108
|
5
|
+
keras_hub/samplers/__init__.py,sha256=aFQIkiqbZpi8vjrPp2MVII4QUfE-eQjra5fMeHsoy7k,886
|
9
6
|
keras_hub/src/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
10
7
|
keras_hub/src/api_export.py,sha256=9pQZK27JObxWZ96QPLBp1OBsjWigh1iuV6RglPGMRk0,1499
|
11
|
-
keras_hub/src/
|
8
|
+
keras_hub/src/version.py,sha256=dRsXOj26LXBv341d8FhxFfwIn0o0wJ6K2qWxXazTlck,222
|
12
9
|
keras_hub/src/layers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
13
10
|
keras_hub/src/layers/modeling/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
14
11
|
keras_hub/src/layers/modeling/alibi_bias.py,sha256=1XBTHI52L_iJDhN_w5ydu_iMhCuTgQAxEPwcLA6BPuk,4411
|
@@ -427,6 +424,7 @@ keras_hub/src/samplers/top_k_sampler.py,sha256=WSyrhmOCan55X2JYAnNWE88rkx66sXqdo
|
|
427
424
|
keras_hub/src/samplers/top_p_sampler.py,sha256=9r29WdqBlrW_2TBma6QqkRps2Uit4a6iZPmq1Gsiuko,3400
|
428
425
|
keras_hub/src/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
429
426
|
keras_hub/src/tests/test_case.py,sha256=lBIH6rNJU7wasOV-Iq4mymPg28kznqMi81LOEWWvUYY,27476
|
427
|
+
keras_hub/src/tests/mocks/mock_gemma3_tokenizer.py,sha256=a4mSer84-xh9dVJUVpFUPzglCh-7NcFqHRKPDR35c8c,4888
|
430
428
|
keras_hub/src/tokenizers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
431
429
|
keras_hub/src/tokenizers/byte_pair_tokenizer.py,sha256=WeUlHMAf5y_MUjFIfVhEcFoOZu-z4kkSj-Dq-pegM9w,24052
|
432
430
|
keras_hub/src/tokenizers/byte_tokenizer.py,sha256=GPIKaddXugbfckfhodADsBpaYb72DgFMs_xfXHnK4qU,10418
|
@@ -439,11 +437,13 @@ keras_hub/src/tokenizers/word_piece_tokenizer_trainer.py,sha256=cylrs02ZrYQ1TuZr
|
|
439
437
|
keras_hub/src/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
440
438
|
keras_hub/src/utils/keras_utils.py,sha256=mtj5Kr9EROso10SafmQ-C9uCLbIId4cXAuJSNDRqHb8,4290
|
441
439
|
keras_hub/src/utils/pipeline_model.py,sha256=jgzB6NQPSl0KOu08N-TazfOnXnUJbZjH2EXXhx25Ftg,9084
|
442
|
-
keras_hub/src/utils/preset_utils.py,sha256=
|
440
|
+
keras_hub/src/utils/preset_utils.py,sha256=Zhc2xIHUagBajmdvuz-91gRnKqJA0CGPr_yIHI_UXEY,32006
|
443
441
|
keras_hub/src/utils/python_utils.py,sha256=N8nWeO3san4YnGkffRXG3Ix7VEIMTKSN21FX5TuL7G8,202
|
444
442
|
keras_hub/src/utils/tensor_utils.py,sha256=1Y9E2psWzN5hJqWz_r2kOoFU7kIp7mPBTX5xZbTsE_A,15144
|
443
|
+
keras_hub/src/utils/coco/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
444
|
+
keras_hub/src/utils/coco/coco_utils.py,sha256=x_QnUUvZ92zoFzMJugiInHORc4NrMdWVBkpp8BAYF6s,2586
|
445
445
|
keras_hub/src/utils/imagenet/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
446
|
-
keras_hub/src/utils/imagenet/imagenet_utils.py,sha256=
|
446
|
+
keras_hub/src/utils/imagenet/imagenet_utils.py,sha256=07ilM5feeD7Ut6YSbVj99RXAZOQONSC1IeKa3I9U6UQ,40161
|
447
447
|
keras_hub/src/utils/timm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
448
448
|
keras_hub/src/utils/timm/convert_cspnet.py,sha256=O5HCdeKcSFWOoFr8_wIUQb4Noc0tBEo5Aogk2d6SEes,5676
|
449
449
|
keras_hub/src/utils/timm/convert_densenet.py,sha256=fu8HBIQis5o3ib2tyI2qnmYScVrVIQySok8vTfa1qJ8,3393
|
@@ -466,7 +466,9 @@ keras_hub/src/utils/transformers/convert_qwen.py,sha256=WUxMAEFVqRs7TRw7QU5TH3_e
|
|
466
466
|
keras_hub/src/utils/transformers/convert_vit.py,sha256=9SUZ9utNJhW_5cj3acMn9cRy47u2eIcDsrhmzj77o9k,5187
|
467
467
|
keras_hub/src/utils/transformers/preset_loader.py,sha256=0Hi7R8HnATcwFVLsJwMMIMWTCXHNfep4IPiRpQXqM-w,3933
|
468
468
|
keras_hub/src/utils/transformers/safetensor_utils.py,sha256=CYUHyA4y-B61r7NDnCsFb4t_UmSwZ1k9L-8gzEd6KRg,3339
|
469
|
-
|
470
|
-
|
471
|
-
keras_hub_nightly-0.21.0.
|
472
|
-
keras_hub_nightly-0.21.0.
|
469
|
+
keras_hub/tokenizers/__init__.py,sha256=1X8s88lzi6zM9XaMHbpACa4kpwjDILgmUV0tl1jXeeo,3839
|
470
|
+
keras_hub/utils/__init__.py,sha256=jXPqVGBpJr_PpYmqD8aDG-fRMlxH-ulqCR2SZMn288Y,646
|
471
|
+
keras_hub_nightly-0.21.0.dev202504250023.dist-info/METADATA,sha256=fnygXbMKQYMF4db1PFypMcXgTA5s26eY38i7E6CL6tI,7393
|
472
|
+
keras_hub_nightly-0.21.0.dev202504250023.dist-info/WHEEL,sha256=SmOxYU7pzNKBqASvQJ7DjX3XGUF92lrGhMb3R6_iiqI,91
|
473
|
+
keras_hub_nightly-0.21.0.dev202504250023.dist-info/top_level.txt,sha256=N4J6piIWBKa38A4uV-CnIopnOEf8mHAbkNXafXm_CuA,10
|
474
|
+
keras_hub_nightly-0.21.0.dev202504250023.dist-info/RECORD,,
|
keras_hub/api/__init__.py
DELETED
@@ -1,15 +0,0 @@
|
|
1
|
-
"""DO NOT EDIT.
|
2
|
-
|
3
|
-
This file was autogenerated. Do not edit it by hand,
|
4
|
-
since your modifications would be overwritten.
|
5
|
-
"""
|
6
|
-
|
7
|
-
from keras_hub.api import layers
|
8
|
-
from keras_hub.api import metrics
|
9
|
-
from keras_hub.api import models
|
10
|
-
from keras_hub.api import samplers
|
11
|
-
from keras_hub.api import tokenizers
|
12
|
-
from keras_hub.api import utils
|
13
|
-
from keras_hub.src.utils.preset_utils import upload_preset
|
14
|
-
from keras_hub.src.version_utils import __version__
|
15
|
-
from keras_hub.src.version_utils import version
|