keras-hub-nightly 0.21.0.dev202505220409__py3-none-any.whl → 0.21.0.dev202505240409__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,66 @@
1
+ from keras_hub.src.models.seq_2_seq_lm import Seq2SeqLM
2
+
3
+
4
+ class AudioToText(Seq2SeqLM):
5
+ """Base class for audio-to-text models.
6
+
7
+ `AudioToText` tasks wrap a `keras_hub.models.Backbone` (capable of
8
+ processing audio and text features) and a
9
+ `keras_hub.models.AudioToTextPreprocessor` to create a model for
10
+ audio-to-text tasks like speech recognition or audio transcription.
11
+
12
+ These models typically consist of an encoder that processes audio input
13
+ and a decoder that generates a textual representation.
14
+
15
+ `AudioToText` tasks provide a high-level `generate()` method for
16
+ auto-regressively generating text from audio input. An optional text
17
+ prompt can also be provided to the decoder to guide generation. The
18
+ sampling strategy for generation (e.g., greedy, top-k, top-p) can be
19
+ controlled via the `sampler` argument in the `compile()` method.
20
+
21
+ When calling `fit()`, inputs should consist of audio data and corresponding
22
+ target text transcriptions. The model is trained to predict the target text
23
+ token-by-token.
24
+
25
+ All `AudioToText` tasks include a `from_preset()` constructor which
26
+ can be used to load pre-trained configurations and weights for specific
27
+ audio-to-text models.
28
+ This constructor can also be called on the base `AudioToText` class,
29
+ which will automatically select the correct subclass based on the preset.
30
+
31
+ Examples:
32
+ ```python
33
+ # Load a Moonshine backbone with pre-trained weights.
34
+ # AudioToText is a base class. You will typically work with a specific
35
+ # implementation, such as `keras_hub.models.MoonshineAudioToText`.
36
+ # The following examples demonstrate common usage patterns.
37
+
38
+ # Initialize a model from a preset using the specific subclass.
39
+ audio_to_text = keras_hub.models.MoonshineAudioToText.from_preset(
40
+ "moonshine_base_en"
41
+ )
42
+
43
+ # Initialize a model from a preset using the base class.
44
+ audio_to_text_model_base = keras_hub.models.AudioToText.from_preset(
45
+ "moonshine_base_en"
46
+ )
47
+
48
+ # Generate text from an audio input.
49
+ audio_input_tensor = keras.random.normal((1, 16000, 1))
50
+ generated_output = audio_to_text_model.generate(
51
+ {"audio": audio_input_tensor}
52
+ )
53
+
54
+ # Generate conditioned on the `"The quick brown fox."` as an input sequence.
55
+ prompted_output = audio_to_text_model.generate(
56
+ {"audio": audio_input_tensor, "text": "The quick brown fox."}
57
+ )
58
+
59
+ # Use a different sampling strategy for generation.
60
+ audio_to_text_model.compile(sampler="greedy")
61
+ greedy_output = audio_to_text_model.generate(
62
+ {"audio": audio_input_tensor}
63
+ )
64
+ """
65
+
66
+ # TODO: Fill in once audio to text task model requirements are clearer.
@@ -0,0 +1,80 @@
1
+ from keras_hub.src.models.seq_2_seq_lm_preprocessor import Seq2SeqLMPreprocessor
2
+
3
+
4
+ class AudioToTextPreprocessor(Seq2SeqLMPreprocessor):
5
+ """Base class for audio-to-text preprocessing layers.
6
+
7
+ `AudioToTextPreprocessor` layers wrap an audio feature extractor (specific
8
+ to the subclass) and a `keras_hub.tokenizer.Tokenizer` to create a
9
+ preprocessing layer for audio-to-text tasks. It is intended to be
10
+ paired with a `keras_hub.models.AudioToText` task.
11
+
12
+ Subclasses are expected to handle the conversion of raw audio data into
13
+ numerical features suitable for an encoder, and raw text data into token IDs
14
+ for a decoder.
15
+
16
+ All `AudioToTextPreprocessor` layers take a dictionary as input,
17
+ typically with keys like `"audio"` (for audio data) and `"text"` (for
18
+ target transcriptions or decoder prompts).
19
+
20
+ This layer will always output a `(x, y, sample_weight)` tuple, where `x`
21
+ is a dictionary containing processed audio features for the encoder and
22
+ tokenized text inputs for the decoder. `y` contains the target token IDs
23
+ (decoder input tokens shifted by one position), and `sample_weight`
24
+ indicates padding in `y`. The exact keys and structure of features within
25
+ `x` will depend on the specific subclass and the paired `AudioToText` model.
26
+
27
+ An `AudioToTextPreprocessor` includes `generate_preprocess` and
28
+ `generate_postprocess` methods for use during inference with an
29
+ `AudioToText` model's `generate()` method.
30
+
31
+ All `AudioToTextPreprocessor` tasks include a `from_preset()` constructor
32
+ which can be used to load a pre-trained configuration, including tokenizer
33
+ vocabularies and audio feature extraction settings. Calling `from_preset()`
34
+ on this base class can instantiate the correct subclass registered for the
35
+ given preset.
36
+
37
+ Examples:
38
+ ```python
39
+ preprocessor = keras_hub.models.AudioToTextPreprocessor.from_preset(
40
+ "moonshine_base_en",
41
+ decoder_sequence_length=10
42
+ )
43
+
44
+ # Process a single audio-text pair.
45
+ x = {
46
+ "audio": keras.random.normal((1, 16000, 1)),
47
+ "text": ["the quick brown fox"]
48
+ }
49
+ x, y, sample_weight = preprocessor(x)
50
+
51
+ # Process a batch of audio-text pairs.
52
+ x = {
53
+ "audio": keras.random.normal((2, 16000, 1)),
54
+ "text": ["first sentence", "second sentence"]
55
+ }
56
+ x, y, sample_weight = preprocessor(x)
57
+
58
+ # With a `tf.data.Dataset`.
59
+ audio_tf = keras.ops.convert_to_tensor(batch_input["audio"])
60
+ text_tf = batch_input["text"] # List of strings
61
+ x = {"audio": audio_tf, "text": text_tf}
62
+ ds = tf.data.Dataset.from_tensor_slices(x)
63
+ ds = ds.map(preprocessor, num_parallel_calls=tf.data.AUTOTUNE)
64
+ ds = ds.batch(2) # Batching after map
65
+
66
+ # Generate preprocess and postprocess.
67
+ x = preprocessor.generate_preprocess({
68
+ "audio": keras.random.normal((1, 16000, 1)),
69
+ "text": ["optional prompt text"]
70
+ })
71
+ x = preprocessor.generate_postprocess({
72
+ "decoder_token_ids": keras.ops.array([[10, 20, 30, 2, 0]]),
73
+ "decoder_padding_mask": keras.ops.array([
74
+ [True, True, True, True, False]
75
+ ])
76
+ })
77
+ ```
78
+ """
79
+
80
+ # TODO: Fill in once audio to text task model requirements are clearer.
@@ -61,7 +61,7 @@ backbone_presets = {
61
61
  "params": 8537680896,
62
62
  "path": "gemma",
63
63
  },
64
- "kaggle_handle": "kaggle://keras/gemma/keras/gemma_7b_en/3",
64
+ "kaggle_handle": "kaggle://keras/gemma/keras/gemma_7b_en/4",
65
65
  },
66
66
  "gemma_instruct_7b_en": {
67
67
  "metadata": {
@@ -71,7 +71,7 @@ backbone_presets = {
71
71
  "params": 8537680896,
72
72
  "path": "gemma",
73
73
  },
74
- "kaggle_handle": "kaggle://keras/gemma/keras/gemma_instruct_7b_en/3",
74
+ "kaggle_handle": "kaggle://keras/gemma/keras/gemma_instruct_7b_en/4",
75
75
  },
76
76
  "gemma_1.1_instruct_7b_en": {
77
77
  "metadata": {
@@ -82,7 +82,7 @@ backbone_presets = {
82
82
  "params": 8537680896,
83
83
  "path": "gemma",
84
84
  },
85
- "kaggle_handle": "kaggle://keras/gemma/keras/gemma_1.1_instruct_7b_en/4",
85
+ "kaggle_handle": "kaggle://keras/gemma/keras/gemma_1.1_instruct_7b_en/5",
86
86
  },
87
87
  "code_gemma_7b_en": {
88
88
  "metadata": {
@@ -94,7 +94,7 @@ backbone_presets = {
94
94
  "params": 8537680896,
95
95
  "path": "gemma",
96
96
  },
97
- "kaggle_handle": "kaggle://keras/codegemma/keras/code_gemma_7b_en/2",
97
+ "kaggle_handle": "kaggle://keras/codegemma/keras/code_gemma_7b_en/3",
98
98
  },
99
99
  "code_gemma_instruct_7b_en": {
100
100
  "metadata": {
@@ -106,7 +106,7 @@ backbone_presets = {
106
106
  "params": 8537680896,
107
107
  "path": "gemma",
108
108
  },
109
- "kaggle_handle": "kaggle://keras/codegemma/keras/code_gemma_instruct_7b_en/2",
109
+ "kaggle_handle": "kaggle://keras/codegemma/keras/code_gemma_instruct_7b_en/3",
110
110
  },
111
111
  "code_gemma_1.1_instruct_7b_en": {
112
112
  "metadata": {
@@ -118,7 +118,7 @@ backbone_presets = {
118
118
  "params": 8537680896,
119
119
  "path": "gemma",
120
120
  },
121
- "kaggle_handle": "kaggle://keras/codegemma/keras/code_gemma_1.1_instruct_7b_en/2",
121
+ "kaggle_handle": "kaggle://keras/codegemma/keras/code_gemma_1.1_instruct_7b_en/3",
122
122
  },
123
123
  "gemma2_2b_en": {
124
124
  "metadata": {
@@ -144,7 +144,7 @@ backbone_presets = {
144
144
  "params": 9241705984,
145
145
  "path": "gemma",
146
146
  },
147
- "kaggle_handle": "kaggle://keras/gemma2/keras/gemma2_9b_en/3",
147
+ "kaggle_handle": "kaggle://keras/gemma2/keras/gemma2_9b_en/4",
148
148
  },
149
149
  "gemma2_instruct_9b_en": {
150
150
  "metadata": {
@@ -154,7 +154,7 @@ backbone_presets = {
154
154
  "params": 9241705984,
155
155
  "path": "gemma",
156
156
  },
157
- "kaggle_handle": "kaggle://keras/gemma2/keras/gemma2_instruct_9b_en/3",
157
+ "kaggle_handle": "kaggle://keras/gemma2/keras/gemma2_instruct_9b_en/4",
158
158
  },
159
159
  "gemma2_27b_en": {
160
160
  "metadata": {
@@ -162,7 +162,7 @@ backbone_presets = {
162
162
  "params": 27227128320,
163
163
  "path": "gemma",
164
164
  },
165
- "kaggle_handle": "kaggle://keras/gemma2/keras/gemma2_27b_en/2",
165
+ "kaggle_handle": "kaggle://keras/gemma2/keras/gemma2_27b_en/3",
166
166
  },
167
167
  "gemma2_instruct_27b_en": {
168
168
  "metadata": {
@@ -172,7 +172,7 @@ backbone_presets = {
172
172
  "params": 27227128320,
173
173
  "path": "gemma",
174
174
  },
175
- "kaggle_handle": "kaggle://keras/gemma2/keras/gemma2_instruct_27b_en/2",
175
+ "kaggle_handle": "kaggle://keras/gemma2/keras/gemma2_instruct_27b_en/3",
176
176
  },
177
177
  "shieldgemma_2b_en": {
178
178
  "metadata": {
@@ -55,7 +55,7 @@ backbone_presets = {
55
55
  "params": 11765788416,
56
56
  "path": "gemma3",
57
57
  },
58
- "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_12b_text/2",
58
+ "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_12b_text/3",
59
59
  },
60
60
  "gemma3_instruct_12b_text": {
61
61
  "metadata": {
@@ -66,7 +66,7 @@ backbone_presets = {
66
66
  "params": 11765788416,
67
67
  "path": "gemma3",
68
68
  },
69
- "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_instruct_12b_text/2",
69
+ "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_instruct_12b_text/3",
70
70
  },
71
71
  "gemma3_27b_text": {
72
72
  "metadata": {
@@ -77,7 +77,7 @@ backbone_presets = {
77
77
  "params": 27009002240,
78
78
  "path": "gemma3",
79
79
  },
80
- "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_27b_text/3",
80
+ "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_27b_text/4",
81
81
  },
82
82
  "gemma3_instruct_27b_text": {
83
83
  "metadata": {
@@ -88,7 +88,7 @@ backbone_presets = {
88
88
  "params": 27009002240,
89
89
  "path": "gemma3",
90
90
  },
91
- "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_instruct_27b_text/2",
91
+ "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_instruct_27b_text/3",
92
92
  },
93
93
  "gemma3_4b": {
94
94
  "metadata": {
@@ -121,7 +121,7 @@ backbone_presets = {
121
121
  "params": 12187079280,
122
122
  "path": "gemma3",
123
123
  },
124
- "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_12b/1",
124
+ "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_12b/2",
125
125
  },
126
126
  "gemma3_instruct_12b": {
127
127
  "metadata": {
@@ -132,7 +132,7 @@ backbone_presets = {
132
132
  "params": 12187079280,
133
133
  "path": "gemma3",
134
134
  },
135
- "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_instruct_12b/1",
135
+ "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_instruct_12b/2",
136
136
  },
137
137
  "gemma3_27b": {
138
138
  "metadata": {
@@ -143,7 +143,7 @@ backbone_presets = {
143
143
  "params": 27432062576,
144
144
  "path": "gemma3",
145
145
  },
146
- "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_27b/1",
146
+ "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_27b/2",
147
147
  },
148
148
  "gemma3_instruct_27b": {
149
149
  "metadata": {
@@ -154,6 +154,6 @@ backbone_presets = {
154
154
  "params": 27432062576,
155
155
  "path": "gemma3",
156
156
  },
157
- "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_instruct_27b/1",
157
+ "kaggle_handle": "kaggle://keras/gemma3/keras/gemma3_instruct_27b/2",
158
158
  },
159
159
  }
@@ -8,7 +8,7 @@ backbone_presets = {
8
8
  "params": 6738415616,
9
9
  "path": "llama",
10
10
  },
11
- "kaggle_handle": "kaggle://keras/llama2/keras/llama2_7b_en/2",
11
+ "kaggle_handle": "kaggle://keras/llama2/keras/llama2_7b_en/3",
12
12
  },
13
13
  "llama2_7b_en_int8": {
14
14
  "metadata": {
@@ -30,7 +30,7 @@ backbone_presets = {
30
30
  "params": 6738415616,
31
31
  "path": "llama",
32
32
  },
33
- "kaggle_handle": "kaggle://keras/llama2/keras/llama2_instruct_7b_en/2",
33
+ "kaggle_handle": "kaggle://keras/llama2/keras/llama2_instruct_7b_en/3",
34
34
  },
35
35
  "llama2_instruct_7b_en_int8": {
36
36
  "metadata": {
@@ -52,6 +52,6 @@ backbone_presets = {
52
52
  "params": 6738415616,
53
53
  "path": "llama",
54
54
  },
55
- "kaggle_handle": "kaggle://keras/vicuna/keras/vicuna_1.5_7b_en/2",
55
+ "kaggle_handle": "kaggle://keras/vicuna/keras/vicuna_1.5_7b_en/3",
56
56
  },
57
57
  }
@@ -8,7 +8,7 @@ backbone_presets = {
8
8
  "params": 8030261248,
9
9
  "path": "llama3",
10
10
  },
11
- "kaggle_handle": "kaggle://keras/llama3/keras/llama3_8b_en/4",
11
+ "kaggle_handle": "kaggle://keras/llama3/keras/llama3_8b_en/5",
12
12
  },
13
13
  "llama3_8b_en_int8": {
14
14
  "metadata": {
@@ -30,7 +30,7 @@ backbone_presets = {
30
30
  "params": 8030261248,
31
31
  "path": "llama3",
32
32
  },
33
- "kaggle_handle": "kaggle://keras/llama3/keras/llama3_instruct_8b_en/4",
33
+ "kaggle_handle": "kaggle://keras/llama3/keras/llama3_instruct_8b_en/5",
34
34
  },
35
35
  "llama3_instruct_8b_en_int8": {
36
36
  "metadata": {
@@ -8,7 +8,7 @@ backbone_presets = {
8
8
  "params": 7241732096,
9
9
  "path": "mistral",
10
10
  },
11
- "kaggle_handle": "kaggle://keras/mistral/keras/mistral_7b_en/7",
11
+ "kaggle_handle": "kaggle://keras/mistral/keras/mistral_7b_en/8",
12
12
  },
13
13
  "mistral_instruct_7b_en": {
14
14
  "metadata": {
@@ -16,7 +16,7 @@ backbone_presets = {
16
16
  "params": 7241732096,
17
17
  "path": "mistral",
18
18
  },
19
- "kaggle_handle": "kaggle://keras/mistral/keras/mistral_instruct_7b_en/7",
19
+ "kaggle_handle": "kaggle://keras/mistral/keras/mistral_instruct_7b_en/8",
20
20
  },
21
21
  "mistral_0.2_instruct_7b_en": {
22
22
  "metadata": {
@@ -24,6 +24,6 @@ backbone_presets = {
24
24
  "params": 7241732096,
25
25
  "path": "mistral",
26
26
  },
27
- "kaggle_handle": "kaggle://keras/mistral/keras/mistral_0.2_instruct_7b_en/2",
27
+ "kaggle_handle": "kaggle://keras/mistral/keras/mistral_0.2_instruct_7b_en/3",
28
28
  },
29
29
  }
@@ -10,7 +10,7 @@ backbone_presets = {
10
10
  "params": 46702792704,
11
11
  "path": "mixtral",
12
12
  },
13
- "kaggle_handle": "kaggle://keras/mixtral/keras/mixtral_8_7b_en",
13
+ "kaggle_handle": "kaggle://keras/mixtral/keras/mixtral_8_7b_en/3",
14
14
  },
15
15
  "mixtral_8_instruct_7b_en": {
16
16
  "metadata": {
@@ -21,6 +21,6 @@ backbone_presets = {
21
21
  "params": 46702792704,
22
22
  "path": "mixtral",
23
23
  },
24
- "kaggle_handle": "kaggle://keras/mixtral/keras/mixtral_8_instruct_7b_en",
24
+ "kaggle_handle": "kaggle://keras/mixtral/keras/mixtral_8_instruct_7b_en/3",
25
25
  },
26
26
  }
@@ -0,0 +1,5 @@
1
+ from keras_hub.src.models.moonshine.moonshine_backbone import MoonshineBackbone
2
+ from keras_hub.src.models.moonshine.moonshine_presets import backbone_presets
3
+ from keras_hub.src.utils.preset_utils import register_presets
4
+
5
+ register_presets(backbone_presets, MoonshineBackbone)
@@ -1,6 +1,7 @@
1
1
  import keras
2
2
 
3
3
  from keras_hub.src.api_export import keras_hub_export
4
+ from keras_hub.src.models.audio_to_text import AudioToText
4
5
  from keras_hub.src.models.moonshine.moonshine_audio_to_text_preprocessor import ( # noqa: E501
5
6
  MoonshineAudioToTextPreprocessor,
6
7
  )
@@ -9,12 +10,11 @@ from keras_hub.src.models.moonshine.moonshine_backbone import MoonshineBackbone
9
10
  from keras_hub.src.models.moonshine.moonshine_backbone import (
10
11
  compute_output_lengths,
11
12
  )
12
- from keras_hub.src.models.seq_2_seq_lm import Seq2SeqLM
13
13
  from keras_hub.src.utils.tensor_utils import any_equal
14
14
 
15
15
 
16
16
  @keras_hub_export("keras_hub.models.MoonshineAudioToText")
17
- class MoonshineAudioToText(Seq2SeqLM):
17
+ class MoonshineAudioToText(AudioToText):
18
18
  """An end-to-end Moonshine model for audio-to-text tasks.
19
19
 
20
20
  A Seq2Seq LM designed for audio-to-text tasks, such as speech recognition.
@@ -6,16 +6,18 @@ except ImportError:
6
6
  tf = None
7
7
  from keras_hub.src.api_export import keras_hub_export
8
8
  from keras_hub.src.layers.preprocessing.start_end_packer import StartEndPacker
9
+ from keras_hub.src.models.audio_to_text_preprocessor import (
10
+ AudioToTextPreprocessor,
11
+ )
9
12
  from keras_hub.src.models.moonshine.moonshine_backbone import MoonshineBackbone
10
13
  from keras_hub.src.models.moonshine.moonshine_tokenizer import (
11
14
  MoonshineTokenizer,
12
15
  )
13
- from keras_hub.src.models.seq_2_seq_lm_preprocessor import Seq2SeqLMPreprocessor
14
16
  from keras_hub.src.utils.tensor_utils import preprocessing_function
15
17
 
16
18
 
17
19
  @keras_hub_export("keras_hub.models.MoonshineAudioToTextPreprocessor")
18
- class MoonshineAudioToTextPreprocessor(Seq2SeqLMPreprocessor):
20
+ class MoonshineAudioToTextPreprocessor(AudioToTextPreprocessor):
19
21
  """Moonshine Seq2Seq LM preprocessor for audio-to-text tasks.
20
22
 
21
23
  This preprocessor converts raw audio and text inputs into a format suitable
@@ -9,7 +9,7 @@ backbone_presets = {
9
9
  "params": 27092736,
10
10
  "path": "moonshine",
11
11
  },
12
- "kaggle_handle": "kaggle://keras/moonshine/Keras/moonshine_tiny_en",
12
+ "kaggle_handle": "kaggle://keras/moonshine/Keras/moonshine_tiny_en/1",
13
13
  },
14
14
  "moonshine_base_en": {
15
15
  "metadata": {
@@ -20,6 +20,6 @@ backbone_presets = {
20
20
  "params": 61513920,
21
21
  "path": "moonshine",
22
22
  },
23
- "kaggle_handle": "kaggle://keras/moonshine/Keras/moonshine_base_en",
23
+ "kaggle_handle": "kaggle://keras/moonshine/Keras/moonshine_base_en/1",
24
24
  },
25
25
  }
@@ -81,7 +81,7 @@ backbone_presets = {
81
81
  "path": "pali_gemma2",
82
82
  "model_card": "https://www.kaggle.com/models/google/paligemma-2",
83
83
  },
84
- "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_ft_docci_10b_448/2",
84
+ "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_ft_docci_10b_448/3",
85
85
  },
86
86
  "pali_gemma2_mix_3b_224": {
87
87
  "metadata": {
@@ -126,7 +126,7 @@ backbone_presets = {
126
126
  "path": "pali_gemma2",
127
127
  "model_card": "https://www.kaggle.com/models/google/paligemma-2",
128
128
  },
129
- "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_mix_10b_224/2",
129
+ "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_mix_10b_224/3",
130
130
  },
131
131
  "pali_gemma2_mix_10b_448": {
132
132
  "metadata": {
@@ -141,7 +141,7 @@ backbone_presets = {
141
141
  "path": "pali_gemma2",
142
142
  "model_card": "https://www.kaggle.com/models/google/paligemma-2",
143
143
  },
144
- "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_mix_10b_448/2",
144
+ "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_mix_10b_448/3",
145
145
  },
146
146
  "pali_gemma2_mix_28b_224": {
147
147
  "metadata": {
@@ -156,7 +156,7 @@ backbone_presets = {
156
156
  "path": "pali_gemma2",
157
157
  "model_card": "https://www.kaggle.com/models/google/paligemma-2",
158
158
  },
159
- "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_28b_mix_224/2",
159
+ "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_28b_mix_224/3",
160
160
  },
161
161
  "pali_gemma2_mix_28b_448": {
162
162
  "metadata": {
@@ -171,7 +171,7 @@ backbone_presets = {
171
171
  "path": "pali_gemma2",
172
172
  "model_card": "https://www.kaggle.com/models/google/paligemma-2",
173
173
  },
174
- "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_28b_mix_448/2",
174
+ "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_28b_mix_448/3",
175
175
  },
176
176
  "pali_gemma2_pt_3b_224": {
177
177
  "metadata": {
@@ -231,7 +231,7 @@ backbone_presets = {
231
231
  "path": "pali_gemma2",
232
232
  "model_card": "https://www.kaggle.com/models/google/paligemma-2",
233
233
  },
234
- "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_pt_10b_224/2",
234
+ "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_pt_10b_224/3",
235
235
  },
236
236
  "pali_gemma2_pt_10b_448": {
237
237
  "metadata": {
@@ -246,7 +246,7 @@ backbone_presets = {
246
246
  "path": "pali_gemma2",
247
247
  "model_card": "https://www.kaggle.com/models/google/paligemma-2",
248
248
  },
249
- "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_pt_10b_448/2",
249
+ "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_pt_10b_448/3",
250
250
  },
251
251
  "pali_gemma2_pt_10b_896": {
252
252
  "metadata": {
@@ -261,7 +261,7 @@ backbone_presets = {
261
261
  "path": "pali_gemma2",
262
262
  "model_card": "https://www.kaggle.com/models/google/paligemma-2",
263
263
  },
264
- "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_pt_10b_896/2",
264
+ "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_pt_10b_896/3",
265
265
  },
266
266
  "pali_gemma2_pt_28b_224": {
267
267
  "metadata": {
@@ -276,7 +276,7 @@ backbone_presets = {
276
276
  "path": "pali_gemma2",
277
277
  "model_card": "https://www.kaggle.com/models/google/paligemma-2",
278
278
  },
279
- "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_pt_28b_224/3",
279
+ "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_pt_28b_224/4",
280
280
  },
281
281
  "pali_gemma2_pt_28b_448": {
282
282
  "metadata": {
@@ -291,7 +291,7 @@ backbone_presets = {
291
291
  "path": "pali_gemma2",
292
292
  "model_card": "https://www.kaggle.com/models/google/paligemma-2",
293
293
  },
294
- "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_pt_28b_448/2",
294
+ "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_pt_28b_448/3",
295
295
  },
296
296
  "pali_gemma2_pt_28b_896": {
297
297
  "metadata": {
@@ -306,6 +306,6 @@ backbone_presets = {
306
306
  "path": "pali_gemma2",
307
307
  "model_card": "https://www.kaggle.com/models/google/paligemma-2",
308
308
  },
309
- "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_pt_28b_896/2",
309
+ "kaggle_handle": "kaggle://keras/paligemma2/keras/pali_gemma2_pt_28b_896/3",
310
310
  },
311
311
  }
@@ -7,7 +7,7 @@ backbone_presets = {
7
7
  "params": 494032768,
8
8
  "path": "qwen",
9
9
  },
10
- "kaggle_handle": "kaggle://keras/qwen/keras/qwen2.5_0.5b_en",
10
+ "kaggle_handle": "kaggle://keras/qwen/keras/qwen2.5_0.5b_en/1",
11
11
  },
12
12
  "qwen2.5_3b_en": {
13
13
  "metadata": {
@@ -15,7 +15,7 @@ backbone_presets = {
15
15
  "params": 3085938688,
16
16
  "path": "qwen",
17
17
  },
18
- "kaggle_handle": "kaggle://keras/qwen/keras/qwen2.5_3b_en",
18
+ "kaggle_handle": "kaggle://keras/qwen/keras/qwen2.5_3b_en/1",
19
19
  },
20
20
  "qwen2.5_7b_en": {
21
21
  "metadata": {
@@ -23,7 +23,7 @@ backbone_presets = {
23
23
  "params": 6993420288,
24
24
  "path": "qwen",
25
25
  },
26
- "kaggle_handle": "kaggle://keras/qwen/keras/qwen2.5_7b_en/2",
26
+ "kaggle_handle": "kaggle://keras/qwen/keras/qwen2.5_7b_en/3",
27
27
  },
28
28
  "qwen2.5_instruct_0.5b_en": {
29
29
  "metadata": {
@@ -34,7 +34,7 @@ backbone_presets = {
34
34
  "params": 494032768,
35
35
  "path": "qwen",
36
36
  },
37
- "kaggle_handle": "kaggle://keras/qwen/keras/qwen2.5_instruct_0.5b_en",
37
+ "kaggle_handle": "kaggle://keras/qwen/keras/qwen2.5_instruct_0.5b_en/1",
38
38
  },
39
39
  "qwen2.5_instruct_32b_en": {
40
40
  "metadata": {
@@ -45,7 +45,7 @@ backbone_presets = {
45
45
  "params": 32763876352,
46
46
  "path": "qwen",
47
47
  },
48
- "kaggle_handle": "kaggle://keras/qwen/keras/qwen2.5_instruct_32b_en",
48
+ "kaggle_handle": "kaggle://keras/qwen/keras/qwen2.5_instruct_32b_en/2",
49
49
  },
50
50
  "qwen2.5_instruct_72b_en": {
51
51
  "metadata": {
@@ -56,6 +56,6 @@ backbone_presets = {
56
56
  "params": 72706203648,
57
57
  "path": "qwen",
58
58
  },
59
- "kaggle_handle": "kaggle://keras/qwen/keras/qwen2.5_instruct_72b_en",
59
+ "kaggle_handle": "kaggle://keras/qwen/keras/qwen2.5_instruct_72b_en/2",
60
60
  },
61
61
  }
@@ -10,6 +10,6 @@ backbone_presets = {
10
10
  "params": 14315784192,
11
11
  "path": "qwen-1.5-moe",
12
12
  },
13
- "kaggle_handle": "kaggle://keras/qwen-1.5-moe/Keras/qwen1.5_moe_2.7b_en",
13
+ "kaggle_handle": "kaggle://keras/qwen-1.5-moe/Keras/qwen1.5_moe_2.7b_en/3",
14
14
  },
15
15
  }
keras_hub/src/version.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from keras_hub.src.api_export import keras_hub_export
2
2
 
3
3
  # Unique source of truth for the version number.
4
- __version__ = "0.21.0.dev202505220409"
4
+ __version__ = "0.21.0.dev202505240409"
5
5
 
6
6
 
7
7
  @keras_hub_export("keras_hub.version")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: keras-hub-nightly
3
- Version: 0.21.0.dev202505220409
3
+ Version: 0.21.0.dev202505240409
4
4
  Summary: Pretrained models for Keras.
5
5
  Author-email: Keras team <keras-users@googlegroups.com>
6
6
  License-Expression: Apache-2.0
@@ -5,7 +5,7 @@ keras_hub/models/__init__.py,sha256=itSzodVUeuX6HQnmsSXY0Wv-5Htbu397410R-SFW_4I,
5
5
  keras_hub/samplers/__init__.py,sha256=aFQIkiqbZpi8vjrPp2MVII4QUfE-eQjra5fMeHsoy7k,886
6
6
  keras_hub/src/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  keras_hub/src/api_export.py,sha256=9pQZK27JObxWZ96QPLBp1OBsjWigh1iuV6RglPGMRk0,1499
8
- keras_hub/src/version.py,sha256=ZWHai9U-yJxL-dj1yBgjl16y6XtOeP2SreCCjSf9xgA,222
8
+ keras_hub/src/version.py,sha256=AnU8tBqSqSoLY34F6O-fFt47PXgrGHUbqORa6_sXy6w,222
9
9
  keras_hub/src/layers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  keras_hub/src/layers/modeling/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  keras_hub/src/layers/modeling/alibi_bias.py,sha256=1XBTHI52L_iJDhN_w5ydu_iMhCuTgQAxEPwcLA6BPuk,4411
@@ -41,6 +41,8 @@ keras_hub/src/metrics/rouge_base.py,sha256=Pt2DUznhTTeR-fX1nQ_wSbPtmuTgxQTvrGpu8
41
41
  keras_hub/src/metrics/rouge_l.py,sha256=JlZhMBV6wS_6zMd57pkTc6yxHkEJT9fVQMlPZKekQzQ,2729
42
42
  keras_hub/src/metrics/rouge_n.py,sha256=JoFtmgjF4Ic263ny6bfD6vMHKreH9le3HnOOxemupRc,3620
43
43
  keras_hub/src/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
+ keras_hub/src/models/audio_to_text.py,sha256=XoOjXtKBX6K1fz-zOXcdVo3FpjuxCMnJZh2LQcYXb_0,2726
45
+ keras_hub/src/models/audio_to_text_preprocessor.py,sha256=GS-WWyJ6aSsPRxi_0bxvxA00h2mT2FEwSdAoQXAUYVI,3249
44
46
  keras_hub/src/models/backbone.py,sha256=KS2x3HFWKhEYhroUFT3uZgSkeW_48zPGqUNvxCDDIQQ,11534
45
47
  keras_hub/src/models/causal_lm.py,sha256=ReaF-i3SHsCkHh4c28jM72QjMQ8x7yiCwG39FRb-7KE,16786
46
48
  keras_hub/src/models/causal_lm_preprocessor.py,sha256=YY7VJZicdmnjDSWi9g4_pEpd5bdJK166GlWcapvokF0,6663
@@ -189,7 +191,7 @@ keras_hub/src/models/gemma/gemma_backbone.py,sha256=GzAUSArw_pN9dtWQzTVhWDbW-XyW
189
191
  keras_hub/src/models/gemma/gemma_causal_lm.py,sha256=3OXaIXlrKqMIuUnBk-bUz-0SYFL-XkkQTWm8qRY2YII,16770
190
192
  keras_hub/src/models/gemma/gemma_causal_lm_preprocessor.py,sha256=bpKkEurWIfa6Kp9s4pz84-sBDSA6ZFNHP8nXG1fFQrg,2912
191
193
  keras_hub/src/models/gemma/gemma_decoder_block.py,sha256=f5UsRO-VNsKJfm_WHVJWK4UahhzYm3sKprJ8jjr-zm4,7628
192
- keras_hub/src/models/gemma/gemma_presets.py,sha256=lWPjEb_6pFC1vdX7mwxf-C2im93YygmlSPjWvqnLWic,7178
194
+ keras_hub/src/models/gemma/gemma_presets.py,sha256=ZOZEZP3MaIn4-y5i0-QxNeAVtNoWvVYTAu96wvIFMpA,7178
193
195
  keras_hub/src/models/gemma/gemma_tokenizer.py,sha256=FhcyNL4lo63MqOhTQPFr07-u3BddL0fVM4TmOm8ku-I,2622
194
196
  keras_hub/src/models/gemma/rms_normalization.py,sha256=fku-JEo2sNy-ytX7ySD1sRzdhRAPmYex_z8oFk1NiG8,833
195
197
  keras_hub/src/models/gemma3/__init__.py,sha256=oPFadkdK5DRLD6sYx83iTetY5daWuSzmJilLjokHcbU,257
@@ -200,7 +202,7 @@ keras_hub/src/models/gemma3/gemma3_causal_lm_preprocessor.py,sha256=vjt4N-zr0Eb5
200
202
  keras_hub/src/models/gemma3/gemma3_decoder_block.py,sha256=6PLlpDxxF67stDv74fw9nNgUHBWmTLx6qGygJwyu5FY,10819
201
203
  keras_hub/src/models/gemma3/gemma3_image_converter.py,sha256=czi5JrTyKiK0nFzvonviBIX8jjvLHqvGNA9RyheB31k,536
202
204
  keras_hub/src/models/gemma3/gemma3_interleave_embeddings.py,sha256=_Q5hvhA93HAJe-A2IBRKVu0_RDVht61lFQiYse_9Rm4,4597
203
- keras_hub/src/models/gemma3/gemma3_presets.py,sha256=--V8AV9VLk8GJ9JmyVAfZfSPe_d_p0L60bdyED04ig0,5124
205
+ keras_hub/src/models/gemma3/gemma3_presets.py,sha256=tVxug3rX3w_lqZlFfyqUlVdOrfBjN0GJY5ooBx1Fe0M,5124
204
206
  keras_hub/src/models/gemma3/gemma3_tokenizer.py,sha256=ZaBclFIwzJkSXDuZMBQLHUKV8RWEdZ_dsJMvMcc3qXw,3215
205
207
  keras_hub/src/models/gemma3/gemma3_vision_encoder.py,sha256=7XI0oBjIfJItV5w90t5bWb3C2KzjhvDnIC7wjIq4Cns,20850
206
208
  keras_hub/src/models/gemma3/rms_normalization.py,sha256=fku-JEo2sNy-ytX7ySD1sRzdhRAPmYex_z8oFk1NiG8,833
@@ -225,14 +227,14 @@ keras_hub/src/models/llama/llama_causal_lm.py,sha256=9bP4-XDCMgsZuH1ILIMzmwq2Fyy
225
227
  keras_hub/src/models/llama/llama_causal_lm_preprocessor.py,sha256=VTboOMiRBoxHrwP343upLUTsv3AG65r2H8h_PNPVphE,3047
226
228
  keras_hub/src/models/llama/llama_decoder.py,sha256=CfWI8ru1-uWjDs0sL6H7g8ElYXWu6h7c5XIx-2Y8lX8,9668
227
229
  keras_hub/src/models/llama/llama_layernorm.py,sha256=LfRbePHUJs00Ptf7dvNaw3Aj9n1xBMBpE_rS5zzsYMo,1050
228
- keras_hub/src/models/llama/llama_presets.py,sha256=k0JPQggSQ0XUkhiPlfM0gTqHXGOt39InVLglPUi4AJU,1902
230
+ keras_hub/src/models/llama/llama_presets.py,sha256=B-WwL4g0Oiml1pyVwQrfIwvjm8jyLlBvwEE-KvkXayU,1902
229
231
  keras_hub/src/models/llama/llama_rotary_embedding.py,sha256=nqQGl7lFXJq7xGBfoONx2-wuuvKdoydnzUjy6FGQjwo,7300
230
232
  keras_hub/src/models/llama/llama_tokenizer.py,sha256=NKWhxTutQ2jd6sd3NSTy9plQyKGCmuNG7U6kVxhZU4Y,1981
231
233
  keras_hub/src/models/llama3/__init__.py,sha256=Vqvr2E10cnANkrRQGNBJtVLNAu-Bg9Lx6sqKOZWFy_8,257
232
234
  keras_hub/src/models/llama3/llama3_backbone.py,sha256=TEocD8X7GihQFGJAz3jPwLCqDb86nyeZ1DqBF7RgQLE,3366
233
235
  keras_hub/src/models/llama3/llama3_causal_lm.py,sha256=qk_onuf7S6d7rxAntilq2Q2orggMbPEJbNHJNVe2G0U,1541
234
236
  keras_hub/src/models/llama3/llama3_causal_lm_preprocessor.py,sha256=twbXel9hsQgGxDAoQhEQuVm2udnEybI4fAQTJzXAuBs,3064
235
- keras_hub/src/models/llama3/llama3_presets.py,sha256=--_6Uao-fK4xD4ShgsqzKmlyQPyO9tRkF0VDYKjGpNw,4302
237
+ keras_hub/src/models/llama3/llama3_presets.py,sha256=m5WEWOKm58wp7w_SDhYFVv3XhdY6d0GfSwxlbH07rwM,4302
236
238
  keras_hub/src/models/llama3/llama3_tokenizer.py,sha256=J-KxRc08vGs4olFw_4mtJs0W_dTeUyj_XxMycazBmxI,1934
237
239
  keras_hub/src/models/mistral/__init__.py,sha256=vjBlzcrIsFSwJKnfwfTNMKstIEKGFTE3kVcdAdfwlnE,263
238
240
  keras_hub/src/models/mistral/mistral_attention.py,sha256=nGDlD4NcIwIGlfbt3ArxdT5QAvamY7yiNEGDlTgWirU,8609
@@ -240,7 +242,7 @@ keras_hub/src/models/mistral/mistral_backbone.py,sha256=oatoqSX0z-xjKfXeSveL4P0D
240
242
  keras_hub/src/models/mistral/mistral_causal_lm.py,sha256=ujCKfsbuYzr8VusqPYcnTH6rTb0MRfzsinEraVhQksc,13234
241
243
  keras_hub/src/models/mistral/mistral_causal_lm_preprocessor.py,sha256=_4qq-uKktfIg_i081ZWjZGEIYZpedBwtBGpchQQ-qEk,3079
242
244
  keras_hub/src/models/mistral/mistral_layer_norm.py,sha256=nimMZ5CTPK8v9eflfrGuzqmv-2vd2rGlPvcHOMwYZyg,1063
243
- keras_hub/src/models/mistral/mistral_presets.py,sha256=76Cctnl-UXFtl76OFzMl7Q0E-oJuizbpIHoDlYA1pBI,939
245
+ keras_hub/src/models/mistral/mistral_presets.py,sha256=ggWQwKGDMFPzUWkQIJ6Tlk7NS-dClRO95WoSTaImL9s,939
244
246
  keras_hub/src/models/mistral/mistral_tokenizer.py,sha256=wyzR_Y2XwrDiBV3jIeBChSPiaOkVVaxFuLxMH2F6EYA,2005
245
247
  keras_hub/src/models/mistral/mistral_transformer_decoder.py,sha256=z5FCh9TEaznvhW3JOSKmFTotRbiuQhzJTZClW2m9sEw,9556
246
248
  keras_hub/src/models/mit/__init__.py,sha256=F70_0PR_nPzPdMI8XOpXDRR_nxclGjcHv3iWSWUX3w8,316
@@ -256,7 +258,7 @@ keras_hub/src/models/mixtral/mixtral_causal_lm.py,sha256=JA1t6xTeaYX_fNo9ftRyvzd
256
258
  keras_hub/src/models/mixtral/mixtral_causal_lm_preprocessor.py,sha256=q2qXa9QAUWBvOWv9DeNvwsBNXSORJAbQFoQsWQ7e8V8,3079
257
259
  keras_hub/src/models/mixtral/mixtral_decoder.py,sha256=CvOjhTxPnGQ_HNknZXRI6Cx1kpuHG99_TiOh-mNcsDw,18190
258
260
  keras_hub/src/models/mixtral/mixtral_layer_norm.py,sha256=zfbDKZEb45FTwP0zQd7WPPp8tuiGoSNfS-DRYWkZyWw,1031
259
- keras_hub/src/models/mixtral/mixtral_presets.py,sha256=XRwOkSDzZ8h42Onhf9AYh3DS_IxAQGICISiPLW_e5po,852
261
+ keras_hub/src/models/mixtral/mixtral_presets.py,sha256=AteLrYXyVjooz_DHLnBA1OMlZS6LMu7Y7gGUWddn6go,856
260
262
  keras_hub/src/models/mixtral/mixtral_tokenizer.py,sha256=Kc233k879QMyX164X_CzWbqpnqEkKWNqa648guTGkBk,661
261
263
  keras_hub/src/models/mobilenet/__init__.py,sha256=hxkNGGj_iAMu62iooUDEPA818sNOIgjG7pXMLEMOsAE,275
262
264
  keras_hub/src/models/mobilenet/mobilenet_backbone.py,sha256=aZBSFeLUObYYoi3od9DI1KfgPCqh5GHTcAI8Y2ZHShA,29536
@@ -265,16 +267,16 @@ keras_hub/src/models/mobilenet/mobilenet_image_classifier_preprocessor.py,sha256
265
267
  keras_hub/src/models/mobilenet/mobilenet_image_converter.py,sha256=a3Ka0UYYK5wHSOjf2oMHSgofRazTAeUfttklVefq14w,360
266
268
  keras_hub/src/models/mobilenet/mobilenet_presets.py,sha256=--nhaM6LmaiCtQlZPDwoQTHW7ciU0igzS4f9ssdD9Lo,1903
267
269
  keras_hub/src/models/mobilenet/util.py,sha256=S7j4UacmVIJ3fU8cymyAoK49eHcpWIKTOyUQiEjcbzQ,721
268
- keras_hub/src/models/moonshine/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
270
+ keras_hub/src/models/moonshine/__init__.py,sha256=WK_9Cy1dp5KplNAaTsaJbd-2DGLsiHQsIL5ZnXuCbDQ,275
269
271
  keras_hub/src/models/moonshine/moonshine_audio_converter.py,sha256=FnvR7SP44uVOsA3g9azUhQjsVg809eJ5nqoJZQ-DAq0,11854
270
- keras_hub/src/models/moonshine/moonshine_audio_to_text.py,sha256=295kTM-XfUqb5mYjVSApKzMGPtnRyQdwynqqcPS7a_M,15860
271
- keras_hub/src/models/moonshine/moonshine_audio_to_text_preprocessor.py,sha256=TZPvyMcPJ4Ojjv7r6ZUeafssIIVtFvPPzdiRHkK9O_A,10002
272
+ keras_hub/src/models/moonshine/moonshine_audio_to_text.py,sha256=dXFtjaxL1jpcIAiiZY1-kcNL-S4RiRJiAC2uR_a3Fyc,15865
273
+ keras_hub/src/models/moonshine/moonshine_audio_to_text_preprocessor.py,sha256=hTw941ww8cJrP5DRrxv2DtZUNLJ9A3cayFhnsG5Ef4g,10016
272
274
  keras_hub/src/models/moonshine/moonshine_backbone.py,sha256=XtRUBe_VusXsFRk7-t1JNXM0lxp2UBOJk9v7gfTNDhA,19623
273
275
  keras_hub/src/models/moonshine/moonshine_decoder.py,sha256=Exf5Gg1gsCBST53wxOgBetKkhjS8E8QIUIlUwHlOkIY,11816
274
276
  keras_hub/src/models/moonshine/moonshine_encoder.py,sha256=NjjMO_FEBlWFSv6Appv8a3V7XovW2afvxxjXwQRgV60,8148
275
277
  keras_hub/src/models/moonshine/moonshine_layers.py,sha256=EIiIMz-UK1nikrC7iusGqjb3jcvmu6VdNcnhWAQHs_M,9538
276
278
  keras_hub/src/models/moonshine/moonshine_multi_head_attention.py,sha256=YaxWxdywUyOQDW-KSX9DqXkX0ttGL-p1hRtWuAnlMaE,13598
277
- keras_hub/src/models/moonshine/moonshine_presets.py,sha256=p0_U8K86RDZlrW2Sd4O_7KFespgZ-Da1raDf9PUXXMw,872
279
+ keras_hub/src/models/moonshine/moonshine_presets.py,sha256=oqawiALSEwZVUhGejyprF4r1009k8930bz3EWJ6YpU8,876
278
280
  keras_hub/src/models/moonshine/moonshine_tokenizer.py,sha256=grD-x4hMZDJYEyxvCyV-FYvUFInYsUI08-vnBKLAl5Y,2215
279
281
  keras_hub/src/models/opt/__init__.py,sha256=6Ybj8etxNaPsVcuZvaeHnKB3As92Px--dbiFAqOCIT0,239
280
282
  keras_hub/src/models/opt/opt_backbone.py,sha256=mK5z_E5mSiIX5s0w4hr4IVQpT7K46W2ajZBmuMjxwaY,5873
@@ -288,7 +290,7 @@ keras_hub/src/models/pali_gemma/pali_gemma_causal_lm.py,sha256=AViEs6YltUqWnIVo7
288
290
  keras_hub/src/models/pali_gemma/pali_gemma_causal_lm_preprocessor.py,sha256=F57y0fZ0wYYxfGIjfrJc1W9uQpViYFx5bvFjj5CqUbI,4814
289
291
  keras_hub/src/models/pali_gemma/pali_gemma_decoder_block.py,sha256=24ABQ1vGlppV-KfWh0YqJjzM_Lu2GIwvyJ4X2XXie_A,5616
290
292
  keras_hub/src/models/pali_gemma/pali_gemma_image_converter.py,sha256=5yM_jUtrFsWIieiwfFBoP7mtPmQAwywkeLKbd7fhmzk,371
291
- keras_hub/src/models/pali_gemma/pali_gemma_presets.py,sha256=zF04iShXky_c3IfUbmLlBN2FYb6iCWH1DWTgDdTCqrI,13006
293
+ keras_hub/src/models/pali_gemma/pali_gemma_presets.py,sha256=DAaSzquR4_AnSjToDjgXj2zbrT5skUpXmzKoyATwwHk,13006
292
294
  keras_hub/src/models/pali_gemma/pali_gemma_tokenizer.py,sha256=ljTiADHo0Ok88q-jVzwJIle2C8xcxnudLTsBLzIySaM,2415
293
295
  keras_hub/src/models/pali_gemma/pali_gemma_vit.py,sha256=SbWanwCoONSwgiWQsc6lFdvhqKZ-zDW42XzQt8CNMtU,18311
294
296
  keras_hub/src/models/phi3/__init__.py,sha256=zIbf1MU-ks91mEkjTRJAsk51N3BBnXDF2JM1vO-13PQ,245
@@ -308,7 +310,7 @@ keras_hub/src/models/qwen/qwen_causal_lm.py,sha256=_f-UHaKHp0ncxknpkpEJiW3jlng3E
308
310
  keras_hub/src/models/qwen/qwen_causal_lm_preprocessor.py,sha256=Va-4TLJD3ycEnkS41rF3dVj4_6K0j-gxLTrREFRcyr0,609
309
311
  keras_hub/src/models/qwen/qwen_decoder.py,sha256=utmAvZlU7_nP-6pjGPDinK4JaMzsQSwOARG0ote-jAg,11771
310
312
  keras_hub/src/models/qwen/qwen_layernorm.py,sha256=DS35r3qd6g5ocL7Nhf_vNzLLMo1aI9VCSmL64dgNOYI,924
311
- keras_hub/src/models/qwen/qwen_presets.py,sha256=_jRG7bB4yBGWteBLbK2elc1e9doRl8zdzQRZgxFvnfc,1988
313
+ keras_hub/src/models/qwen/qwen_presets.py,sha256=DpRplWNwktM4KDgIP495PTUBJxQE_mS6KQSK5LGWOyc,1998
312
314
  keras_hub/src/models/qwen/qwen_tokenizer.py,sha256=LCv3IyiDDHqVnM9N3lf5-BE3iwicIh0nKS1hjoPw9lE,1532
313
315
  keras_hub/src/models/qwen_moe/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
314
316
  keras_hub/src/models/qwen_moe/qwen_moe_attention.py,sha256=pE79_iHUm2LGkoWL6zMJw_pNfzIvmyq3yJaiq47W2TY,13242
@@ -317,7 +319,7 @@ keras_hub/src/models/qwen_moe/qwen_moe_causal_lm.py,sha256=MeP60v7GcN_SmH5_ULRpq
317
319
  keras_hub/src/models/qwen_moe/qwen_moe_causal_lm_preprocessor.py,sha256=uKaXRrJs02vkVudjdehzJPp0B84tPMkxNHlp166kceE,589
318
320
  keras_hub/src/models/qwen_moe/qwen_moe_decoder.py,sha256=kmUjLpYTbJQ3J_31qWhLOd0Dg2_9cl_JX_zM8ZMH1Qo,23130
319
321
  keras_hub/src/models/qwen_moe/qwen_moe_layernorm.py,sha256=DbkWJo7U0-cwdZwHPeAnFznYwtao6o0fjpoDJ9UWnpc,927
320
- keras_hub/src/models/qwen_moe/qwen_moe_presets.py,sha256=gzNtpVValNi1tjHWa-OHw6BChys9cEJk98aP56zyyUg,455
322
+ keras_hub/src/models/qwen_moe/qwen_moe_presets.py,sha256=uKrA9xLV3P3jtYUUsqdhKq_HPkB4lXmOYseB1wXTZnI,457
321
323
  keras_hub/src/models/qwen_moe/qwen_moe_tokenizer.py,sha256=2c3X8jNGO0q0UL5NtUqSgHWLqhyJGi2ohNcTeOGhd84,1407
322
324
  keras_hub/src/models/resnet/__init__.py,sha256=C5UqlQ6apm8WSp1bnrxB6Bi3BGaknxRQs-r3b2wpaGA,257
323
325
  keras_hub/src/models/resnet/resnet_backbone.py,sha256=Q7nlqcTXZzjqd0e-DsjHC4ok58yOX7qxseotym3uZpM,31276
@@ -499,7 +501,7 @@ keras_hub/src/utils/transformers/preset_loader.py,sha256=1nfS5xVsl-JROGXJXltTqV1
499
501
  keras_hub/src/utils/transformers/safetensor_utils.py,sha256=CYUHyA4y-B61r7NDnCsFb4t_UmSwZ1k9L-8gzEd6KRg,3339
500
502
  keras_hub/tokenizers/__init__.py,sha256=uMjjm0mzUkRb0e4Ac_JK8aJ9cKGUi5UqmzWoWAFJprE,4164
501
503
  keras_hub/utils/__init__.py,sha256=jXPqVGBpJr_PpYmqD8aDG-fRMlxH-ulqCR2SZMn288Y,646
502
- keras_hub_nightly-0.21.0.dev202505220409.dist-info/METADATA,sha256=EqRkCDIuHYBX4sLxSObub9YnmlNwhf_d2-IKG1tm4Xw,7393
503
- keras_hub_nightly-0.21.0.dev202505220409.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
504
- keras_hub_nightly-0.21.0.dev202505220409.dist-info/top_level.txt,sha256=N4J6piIWBKa38A4uV-CnIopnOEf8mHAbkNXafXm_CuA,10
505
- keras_hub_nightly-0.21.0.dev202505220409.dist-info/RECORD,,
504
+ keras_hub_nightly-0.21.0.dev202505240409.dist-info/METADATA,sha256=BJHRD68RtZc8CA6kIFWZxphjYr6g2t62j1FvwLar_LU,7393
505
+ keras_hub_nightly-0.21.0.dev202505240409.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
506
+ keras_hub_nightly-0.21.0.dev202505240409.dist-info/top_level.txt,sha256=N4J6piIWBKa38A4uV-CnIopnOEf8mHAbkNXafXm_CuA,10
507
+ keras_hub_nightly-0.21.0.dev202505240409.dist-info/RECORD,,