keras-hub-nightly 0.23.0.dev202509150421__py3-none-any.whl → 0.23.0.dev202509170415__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of keras-hub-nightly might be problematic. Click here for more details.

@@ -11,7 +11,7 @@ backbone_presets = {
11
11
  "params": 149620934,
12
12
  "path": "clip",
13
13
  },
14
- "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_base_patch16/2",
14
+ "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_base_patch16/3",
15
15
  },
16
16
  "clip_vit_base_patch32": {
17
17
  "metadata": {
@@ -22,7 +22,7 @@ backbone_presets = {
22
22
  "params": 151277363,
23
23
  "path": "clip",
24
24
  },
25
- "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_base_patch32/2",
25
+ "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_base_patch32/3",
26
26
  },
27
27
  "clip_vit_large_patch14": {
28
28
  "metadata": {
@@ -33,7 +33,7 @@ backbone_presets = {
33
33
  "params": 427616770,
34
34
  "path": "clip",
35
35
  },
36
- "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_large_patch14/2",
36
+ "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_large_patch14/3",
37
37
  },
38
38
  "clip_vit_large_patch14_336": {
39
39
  "metadata": {
@@ -44,7 +44,7 @@ backbone_presets = {
44
44
  "params": 427944770,
45
45
  "path": "clip",
46
46
  },
47
- "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_large_patch14_336/2",
47
+ "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_large_patch14_336/3",
48
48
  },
49
49
  "clip_vit_b_32_laion2b_s34b_b79k": {
50
50
  "metadata": {
@@ -55,7 +55,7 @@ backbone_presets = {
55
55
  "params": 151277363,
56
56
  "path": "clip",
57
57
  },
58
- "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_b_32_laion2b_s34b_b79k/2",
58
+ "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_b_32_laion2b_s34b_b79k/3",
59
59
  },
60
60
  "clip_vit_h_14_laion2b_s32b_b79k": {
61
61
  "metadata": {
@@ -66,7 +66,7 @@ backbone_presets = {
66
66
  "params": 986109698,
67
67
  "path": "clip",
68
68
  },
69
- "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_h_14_laion2b_s32b_b79k/2",
69
+ "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_h_14_laion2b_s32b_b79k/3",
70
70
  },
71
71
  "clip_vit_g_14_laion2b_s12b_b42k": {
72
72
  "metadata": {
@@ -77,7 +77,7 @@ backbone_presets = {
77
77
  "params": 1366678530,
78
78
  "path": "clip",
79
79
  },
80
- "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_g_14_laion2b_s12b_b42k/2",
80
+ "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_g_14_laion2b_s12b_b42k/3",
81
81
  },
82
82
  "clip_vit_bigg_14_laion2b_39b_b160k": {
83
83
  "metadata": {
@@ -88,6 +88,6 @@ backbone_presets = {
88
88
  "params": 2539567362,
89
89
  "path": "clip",
90
90
  },
91
- "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_bigg_14_laion2b_39b_b160k/2",
91
+ "kaggle_handle": "kaggle://keras/clip/keras/clip_vit_bigg_14_laion2b_39b_b160k/3",
92
92
  },
93
93
  }
@@ -0,0 +1,5 @@
1
+ from keras_hub.src.models.d_fine.d_fine_backbone import DFineBackbone
2
+ from keras_hub.src.models.d_fine.d_fine_presets import backbone_presets
3
+ from keras_hub.src.utils.preset_utils import register_presets
4
+
5
+ register_presets(backbone_presets, DFineBackbone)
@@ -1,2 +1,155 @@
1
1
  # Metadata for loading pretrained model weights.
2
- backbone_presets = {}
2
+ backbone_presets = {
3
+ "dfine_nano_coco": {
4
+ "metadata": {
5
+ "description": (
6
+ "D-FINE Nano model, the smallest variant in the family, "
7
+ "pretrained on the COCO dataset. Ideal for applications "
8
+ "where computational resources are limited."
9
+ ),
10
+ "params": 3788625,
11
+ "path": "d_fine",
12
+ },
13
+ "kaggle_handle": "kaggle://keras/d-fine/keras/dfine_nano_coco/1",
14
+ },
15
+ "dfine_small_coco": {
16
+ "metadata": {
17
+ "description": (
18
+ "D-FINE Small model pretrained on the COCO dataset. Offers a "
19
+ "balance between performance and computational efficiency."
20
+ ),
21
+ "params": 10329321,
22
+ "path": "d_fine",
23
+ },
24
+ "kaggle_handle": "kaggle://keras/d-fine/keras/dfine_small_coco/1",
25
+ },
26
+ "dfine_medium_coco": {
27
+ "metadata": {
28
+ "description": (
29
+ "D-FINE Medium model pretrained on the COCO dataset. A solid "
30
+ "baseline with strong performance for general-purpose "
31
+ "object detection."
32
+ ),
33
+ "params": 19621160,
34
+ "path": "d_fine",
35
+ },
36
+ "kaggle_handle": "kaggle://keras/d-fine/keras/dfine_medium_coco/1",
37
+ },
38
+ "dfine_large_coco": {
39
+ "metadata": {
40
+ "description": (
41
+ "D-FINE Large model pretrained on the COCO dataset. Provides "
42
+ "high accuracy and is suitable for more demanding tasks."
43
+ ),
44
+ "params": 31344064,
45
+ "path": "d_fine",
46
+ },
47
+ "kaggle_handle": "kaggle://keras/d-fine/keras/dfine_large_coco/1",
48
+ },
49
+ "dfine_xlarge_coco": {
50
+ "metadata": {
51
+ "description": (
52
+ "D-FINE X-Large model, the largest COCO-pretrained variant, "
53
+ "designed for state-of-the-art performance where accuracy "
54
+ "is the top priority."
55
+ ),
56
+ "params": 62834048,
57
+ "path": "d_fine",
58
+ },
59
+ "kaggle_handle": "kaggle://keras/d-fine/keras/dfine_xlarge_coco/1",
60
+ },
61
+ "dfine_small_obj365": {
62
+ "metadata": {
63
+ "description": (
64
+ "D-FINE Small model pretrained on the large-scale Objects365 "
65
+ "dataset, enhancing its ability to recognize a wider "
66
+ "variety of objects."
67
+ ),
68
+ "params": 10623329,
69
+ "path": "d_fine",
70
+ },
71
+ "kaggle_handle": "kaggle://keras/d-fine/keras/dfine_small_obj365/1",
72
+ },
73
+ "dfine_medium_obj365": {
74
+ "metadata": {
75
+ "description": (
76
+ "D-FINE Medium model pretrained on the Objects365 dataset. "
77
+ "Benefits from a larger and more diverse pretraining corpus."
78
+ ),
79
+ "params": 19988670,
80
+ "path": "d_fine",
81
+ },
82
+ "kaggle_handle": "kaggle://keras/d-fine/keras/dfine_medium_obj365/1",
83
+ },
84
+ "dfine_large_obj365": {
85
+ "metadata": {
86
+ "description": (
87
+ "D-FINE Large model pretrained on the Objects365 dataset for "
88
+ "improved generalization and performance on diverse object "
89
+ "categories."
90
+ ),
91
+ "params": 31858578,
92
+ "path": "d_fine",
93
+ },
94
+ "kaggle_handle": "kaggle://keras/d-fine/keras/dfine_large_obj365/1",
95
+ },
96
+ "dfine_xlarge_obj365": {
97
+ "metadata": {
98
+ "description": (
99
+ "D-FINE X-Large model pretrained on the Objects365 dataset, "
100
+ "offering maximum performance by leveraging a vast number "
101
+ "of object categories during pretraining."
102
+ ),
103
+ "params": 63348562,
104
+ "path": "d_fine",
105
+ },
106
+ "kaggle_handle": "kaggle://keras/d-fine/keras/dfine_xlarge_obj365/1",
107
+ },
108
+ "dfine_small_obj2coco": {
109
+ "metadata": {
110
+ "description": (
111
+ "D-FINE Small model first pretrained on Objects365 and then "
112
+ "fine-tuned on COCO, combining broad feature learning with "
113
+ "benchmark-specific adaptation."
114
+ ),
115
+ "params": 10329321,
116
+ "path": "d_fine",
117
+ },
118
+ "kaggle_handle": "kaggle://keras/d-fine/keras/dfine_small_obj2coco/1",
119
+ },
120
+ "dfine_medium_obj2coco": {
121
+ "metadata": {
122
+ "description": (
123
+ "D-FINE Medium model using a two-stage training process: "
124
+ "pretraining on Objects365 followed by fine-tuning on COCO."
125
+ ),
126
+ "params": 19621160,
127
+ "path": "d_fine",
128
+ },
129
+ "kaggle_handle": "kaggle://keras/d-fine/keras/dfine_medium_obj2coco/1",
130
+ },
131
+ "dfine_large_obj2coco_e25": {
132
+ "metadata": {
133
+ "description": (
134
+ "D-FINE Large model pretrained on Objects365 and then "
135
+ "fine-tuned on COCO for 25 epochs. A high-performance model "
136
+ "with specialized tuning."
137
+ ),
138
+ "params": 31344064,
139
+ "path": "d_fine",
140
+ },
141
+ "kaggle_handle": "kaggle://keras/d-fine/keras/dfine_large_obj2coco_e25/1",
142
+ },
143
+ "dfine_xlarge_obj2coco": {
144
+ "metadata": {
145
+ "description": (
146
+ "D-FINE X-Large model, pretrained on Objects365 and fine-tuned "
147
+ "on COCO, representing the most powerful model in this "
148
+ "series for COCO-style tasks."
149
+ ),
150
+ "params": 62834048,
151
+ "path": "d_fine",
152
+ },
153
+ "kaggle_handle": "kaggle://keras/d-fine/keras/dfine_xlarge_obj2coco/1",
154
+ },
155
+ }
@@ -114,7 +114,6 @@ class GemmaBackbone(Backbone):
114
114
  scale=1.0,
115
115
  mode="fan_in",
116
116
  distribution="untruncated_normal",
117
- seed=None,
118
117
  ),
119
118
  dtype=dtype,
120
119
  logit_soft_cap=final_logit_soft_cap,
@@ -210,7 +210,6 @@ class Gemma3Backbone(Backbone):
210
210
  scale=1.0,
211
211
  mode="fan_in",
212
212
  distribution="untruncated_normal",
213
- seed=None,
214
213
  ),
215
214
  dtype=dtype,
216
215
  logit_soft_cap=final_logit_soft_cap,
@@ -1,5 +1,85 @@
1
1
  # Metadata for loading pretrained model weights.
2
2
  backbone_presets = {
3
+ "t5gemma_s_s_ul2": {
4
+ "metadata": {
5
+ "description": (
6
+ "T5Gemma S/S model with a small encoder and small decoder, "
7
+ "adapted as a UL2 model."
8
+ ),
9
+ "params": 312517632,
10
+ "path": "t5gemma",
11
+ },
12
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_s_s_ul2/1",
13
+ },
14
+ "t5gemma_s_s_prefixlm": {
15
+ "metadata": {
16
+ "description": (
17
+ "T5Gemma S/S model with a small encoder and small decoder, "
18
+ "adapted as a prefix language model."
19
+ ),
20
+ "params": 312517632,
21
+ "path": "t5gemma",
22
+ },
23
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_s_s_prefixlm/1",
24
+ },
25
+ "t5gemma_s_s_ul2_it": {
26
+ "metadata": {
27
+ "description": (
28
+ "T5Gemma S/S model with a small encoder and small decoder, "
29
+ "adapted as a UL2 model and fine-tuned for instruction "
30
+ "following."
31
+ ),
32
+ "params": 312517632,
33
+ "path": "t5gemma",
34
+ },
35
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_s_s_ul2_it/1",
36
+ },
37
+ "t5gemma_s_s_prefixlm_it": {
38
+ "metadata": {
39
+ "description": (
40
+ "T5Gemma S/S model with a small encoder and small decoder, "
41
+ "adapted as a prefix language model and fine-tuned for "
42
+ "instruction following."
43
+ ),
44
+ "params": 312517632,
45
+ "path": "t5gemma",
46
+ },
47
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_s_s_prefixlm_it/1",
48
+ },
49
+ "t5gemma_b_b_ul2": {
50
+ "metadata": {
51
+ "description": (
52
+ "T5Gemma B/B model with a base encoder and base decoder, "
53
+ "adapted as a UL2 model."
54
+ ),
55
+ "params": 591490560,
56
+ "path": "t5gemma",
57
+ },
58
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_b_b_ul2/1",
59
+ },
60
+ "t5gemma_b_b_prefixlm": {
61
+ "metadata": {
62
+ "description": (
63
+ "T5Gemma B/B model with a base encoder and base decoder, "
64
+ "adapted as a prefix language model."
65
+ ),
66
+ "params": 591490560,
67
+ "path": "t5gemma",
68
+ },
69
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_b_b_prefixlm/1",
70
+ },
71
+ "t5gemma_b_b_ul2_it": {
72
+ "metadata": {
73
+ "description": (
74
+ "T5Gemma B/B model with a base encoder and base decoder, "
75
+ "adapted as a UL2 model and fine-tuned for instruction "
76
+ "following."
77
+ ),
78
+ "params": 591490560,
79
+ "path": "t5gemma",
80
+ },
81
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_b_b_ul2_it/1",
82
+ },
3
83
  "t5gemma_b_b_prefixlm_it": {
4
84
  "metadata": {
5
85
  "description": (
@@ -10,6 +90,285 @@ backbone_presets = {
10
90
  "params": 591490560,
11
91
  "path": "t5gemma",
12
92
  },
13
- "kaggle_handle": "kaggle://harshaljanjani/t5gemma/keras/t5gemma_b_b_prefixlm_it",
93
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_b_b_prefixlm_it/1",
94
+ },
95
+ "t5gemma_l_l_ul2": {
96
+ "metadata": {
97
+ "description": (
98
+ "T5Gemma L/L model with a large encoder and large decoder, "
99
+ "adapted as a UL2 model."
100
+ ),
101
+ "params": 1241761792,
102
+ "path": "t5gemma",
103
+ },
104
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_l_l_ul2/1",
105
+ },
106
+ "t5gemma_l_l_prefixlm": {
107
+ "metadata": {
108
+ "description": (
109
+ "T5Gemma L/L model with a large encoder and large decoder, "
110
+ "adapted as a prefix language model."
111
+ ),
112
+ "params": 1241761792,
113
+ "path": "t5gemma",
114
+ },
115
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_l_l_prefixlm/1",
116
+ },
117
+ "t5gemma_l_l_ul2_it": {
118
+ "metadata": {
119
+ "description": (
120
+ "T5Gemma L/L model with a large encoder and large decoder, "
121
+ "adapted as a UL2 model and fine-tuned for instruction "
122
+ "following."
123
+ ),
124
+ "params": 1241761792,
125
+ "path": "t5gemma",
126
+ },
127
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_l_l_ul2_it/1",
128
+ },
129
+ "t5gemma_l_l_prefixlm_it": {
130
+ "metadata": {
131
+ "description": (
132
+ "T5Gemma L/L model with a large encoder and large decoder, "
133
+ "adapted as a prefix language model and fine-tuned for "
134
+ "instruction following."
135
+ ),
136
+ "params": 1241761792,
137
+ "path": "t5gemma",
138
+ },
139
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_l_l_prefixlm_it/1",
140
+ },
141
+ "t5gemma_ml_ml_ul2": {
142
+ "metadata": {
143
+ "description": (
144
+ "T5Gemma ML/ML model with a medium-large encoder and "
145
+ "medium-large decoder, adapted as a UL2 model."
146
+ ),
147
+ "params": 2200345344,
148
+ "path": "t5gemma",
149
+ },
150
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_ml_ml_ul2/1",
151
+ },
152
+ "t5gemma_ml_ml_prefixlm": {
153
+ "metadata": {
154
+ "description": (
155
+ "T5Gemma ML/ML model with a medium-large encoder and "
156
+ "medium-large decoder, adapted as a prefix language model."
157
+ ),
158
+ "params": 2200345344,
159
+ "path": "t5gemma",
160
+ },
161
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_ml_ml_prefixlm/1",
162
+ },
163
+ "t5gemma_ml_ml_ul2_it": {
164
+ "metadata": {
165
+ "description": (
166
+ "T5Gemma ML/ML model with a medium-large encoder and "
167
+ "medium-large decoder, adapted as a UL2 model and fine-tuned "
168
+ "for instruction following."
169
+ ),
170
+ "params": 2200345344,
171
+ "path": "t5gemma",
172
+ },
173
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_ml_ml_ul2_it/1",
174
+ },
175
+ "t5gemma_ml_ml_prefixlm_it": {
176
+ "metadata": {
177
+ "description": (
178
+ "T5Gemma ML/ML model with a medium-large encoder and "
179
+ "medium-large decoder, adapted as a prefix language model and "
180
+ "fine-tuned for instruction following."
181
+ ),
182
+ "params": 2200345344,
183
+ "path": "t5gemma",
184
+ },
185
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_ml_ml_prefixlm_it/1",
186
+ },
187
+ "t5gemma_xl_xl_ul2": {
188
+ "metadata": {
189
+ "description": (
190
+ "T5Gemma XL/XL model with an extra-large encoder and "
191
+ "extra-large decoder, adapted as a UL2 model."
192
+ ),
193
+ "params": 3766980608,
194
+ "path": "t5gemma",
195
+ },
196
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_xl_xl_ul2/1",
197
+ },
198
+ "t5gemma_xl_xl_prefixlm": {
199
+ "metadata": {
200
+ "description": (
201
+ "T5Gemma XL/XL model with an extra-large encoder and "
202
+ "extra-large decoder, adapted as a prefix language model."
203
+ ),
204
+ "params": 3766980608,
205
+ "path": "t5gemma",
206
+ },
207
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_xl_xl_prefixlm/1",
208
+ },
209
+ "t5gemma_xl_xl_ul2_it": {
210
+ "metadata": {
211
+ "description": (
212
+ "T5Gemma XL/XL model with an extra-large encoder and "
213
+ "extra-large decoder, adapted as a UL2 model and fine-tuned "
214
+ "for instruction following."
215
+ ),
216
+ "params": 3766980608,
217
+ "path": "t5gemma",
218
+ },
219
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_xl_xl_ul2_it/1",
220
+ },
221
+ "t5gemma_xl_xl_prefixlm_it": {
222
+ "metadata": {
223
+ "description": (
224
+ "T5Gemma XL/XL model with an extra-large encoder and "
225
+ "extra-large decoder, adapted as a prefix language model and "
226
+ "fine-tuned for instruction following."
227
+ ),
228
+ "params": 3766980608,
229
+ "path": "t5gemma",
230
+ },
231
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_xl_xl_prefixlm_it/1",
232
+ },
233
+ "t5gemma_2b_2b_ul2": {
234
+ "metadata": {
235
+ "description": (
236
+ "T5Gemma 2B/2B model with a 2-billion-parameter encoder and "
237
+ "2-billion-parameter decoder, adapted as a UL2 model."
238
+ ),
239
+ "params": 5596853760,
240
+ "path": "t5gemma",
241
+ },
242
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_2b_2b_ul2/1",
243
+ },
244
+ "t5gemma_2b_2b_prefixlm": {
245
+ "metadata": {
246
+ "description": (
247
+ "T5Gemma 2B/2B model with a 2-billion-parameter encoder and "
248
+ "2-billion-parameter decoder, adapted as a prefix language "
249
+ "model."
250
+ ),
251
+ "params": 5596853760,
252
+ "path": "t5gemma",
253
+ },
254
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_2b_2b_prefixlm/1",
255
+ },
256
+ "t5gemma_2b_2b_ul2_it": {
257
+ "metadata": {
258
+ "description": (
259
+ "T5Gemma 2B/2B model with a 2-billion-parameter encoder and "
260
+ "2-billion-parameter decoder, adapted as a UL2 model and "
261
+ "fine-tuned for instruction following."
262
+ ),
263
+ "params": 5596853760,
264
+ "path": "t5gemma",
265
+ },
266
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_2b_2b_ul2_it/1",
267
+ },
268
+ "t5gemma_2b_2b_prefixlm_it": {
269
+ "metadata": {
270
+ "description": (
271
+ "T5Gemma 2B/2B model with a 2-billion-parameter encoder and "
272
+ "2-billion-parameter decoder, adapted as a prefix language "
273
+ "model and fine-tuned for instruction following."
274
+ ),
275
+ "params": 5596853760,
276
+ "path": "t5gemma",
277
+ },
278
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_2b_2b_prefixlm_it/1",
279
+ },
280
+ "t5gemma_9b_2b_ul2": {
281
+ "metadata": {
282
+ "description": (
283
+ "T5Gemma 9B/2B model with a 9-billion-parameter encoder and "
284
+ "2-billion-parameter decoder, adapted as a UL2 model."
285
+ ),
286
+ "params": 12292375296,
287
+ "path": "t5gemma",
288
+ },
289
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_9b_2b_ul2/1",
290
+ },
291
+ "t5gemma_9b_2b_prefixlm": {
292
+ "metadata": {
293
+ "description": (
294
+ "T5Gemma 9B/2B model with a 9-billion-parameter encoder and "
295
+ "2-billion-parameter decoder, adapted as a prefix language "
296
+ "model."
297
+ ),
298
+ "params": 12292375296,
299
+ "path": "t5gemma",
300
+ },
301
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_9b_2b_prefixlm/1",
302
+ },
303
+ "t5gemma_9b_2b_ul2_it": {
304
+ "metadata": {
305
+ "description": (
306
+ "T5Gemma 9B/2B model with a 9-billion-parameter encoder and "
307
+ "2-billion-parameter decoder, adapted as a UL2 model and "
308
+ "fine-tuned for instruction following."
309
+ ),
310
+ "params": 12292375296,
311
+ "path": "t5gemma",
312
+ },
313
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_9b_2b_ul2_it/1",
314
+ },
315
+ "t5gemma_9b_2b_prefixlm_it": {
316
+ "metadata": {
317
+ "description": (
318
+ "T5Gemma 9B/2B model with a 9-billion-parameter encoder and "
319
+ "2-billion-parameter decoder, adapted as a prefix language "
320
+ "model and fine-tuned for instruction following."
321
+ ),
322
+ "params": 12292375296,
323
+ "path": "t5gemma",
324
+ },
325
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_9b_2b_prefixlm_it/1",
326
+ },
327
+ "t5gemma_9b_9b_ul2": {
328
+ "metadata": {
329
+ "description": (
330
+ "T5Gemma 9B/9B model with a 9-billion-parameter encoder and "
331
+ "9-billion-parameter decoder, adapted as a UL2 model."
332
+ ),
333
+ "params": 20333401088,
334
+ "path": "t5gemma",
335
+ },
336
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_9b_9b_ul2/1",
337
+ },
338
+ "t5gemma_9b_9b_prefixlm": {
339
+ "metadata": {
340
+ "description": (
341
+ "T5Gemma 9B/9B model with a 9-billion-parameter encoder and "
342
+ "9-billion-parameter decoder, adapted as a prefix language "
343
+ "model."
344
+ ),
345
+ "params": 20333401088,
346
+ "path": "t5gemma",
347
+ },
348
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_9b_9b_prefixlm/1",
349
+ },
350
+ "t5gemma_9b_9b_ul2_it": {
351
+ "metadata": {
352
+ "description": (
353
+ "T5Gemma 9B/9B model with a 9-billion-parameter encoder and "
354
+ "9-billion-parameter decoder, adapted as a UL2 model and "
355
+ "fine-tuned for instruction following."
356
+ ),
357
+ "params": 20333401088,
358
+ "path": "t5gemma",
359
+ },
360
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_9b_9b_ul2_it/1",
361
+ },
362
+ "t5gemma_9b_9b_prefixlm_it": {
363
+ "metadata": {
364
+ "description": (
365
+ "T5Gemma 9B/9B model with a 9-billion-parameter encoder and "
366
+ "9-billion-parameter decoder, adapted as a prefix language "
367
+ "model and fine-tuned for instruction following."
368
+ ),
369
+ "params": 20333401088,
370
+ "path": "t5gemma",
371
+ },
372
+ "kaggle_handle": "kaggle://keras/t5-gemma/keras/t5gemma_9b_9b_prefixlm_it/1",
14
373
  },
15
374
  }
keras_hub/src/version.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from keras_hub.src.api_export import keras_hub_export
2
2
 
3
3
  # Unique source of truth for the version number.
4
- __version__ = "0.23.0.dev202509150421"
4
+ __version__ = "0.23.0.dev202509170415"
5
5
 
6
6
 
7
7
  @keras_hub_export("keras_hub.version")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: keras-hub-nightly
3
- Version: 0.23.0.dev202509150421
3
+ Version: 0.23.0.dev202509170415
4
4
  Summary: Pretrained models for Keras.
5
5
  Author-email: Keras team <keras-users@googlegroups.com>
6
6
  License-Expression: Apache-2.0
@@ -5,7 +5,7 @@ keras_hub/models/__init__.py,sha256=Est6LugIjoAFkpTgqZWfISk-1NVMH_k-4soHCHaMmyM,
5
5
  keras_hub/samplers/__init__.py,sha256=aFQIkiqbZpi8vjrPp2MVII4QUfE-eQjra5fMeHsoy7k,886
6
6
  keras_hub/src/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  keras_hub/src/api_export.py,sha256=9pQZK27JObxWZ96QPLBp1OBsjWigh1iuV6RglPGMRk0,1499
8
- keras_hub/src/version.py,sha256=ywQCH4lmmFTa16Vn8wjE2OfysJGDeN1ZU0TMjxT1OEE,222
8
+ keras_hub/src/version.py,sha256=c0SNMygFvb51czNKC9l8WE00dnIElyFuZOGH4AkiGdU,222
9
9
  keras_hub/src/layers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  keras_hub/src/layers/modeling/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  keras_hub/src/layers/modeling/alibi_bias.py,sha256=1XBTHI52L_iJDhN_w5ydu_iMhCuTgQAxEPwcLA6BPuk,4411
@@ -106,7 +106,7 @@ keras_hub/src/models/clip/clip_backbone.py,sha256=DRAXEJFVPcgf1-AeVDDmuoxplwTCl4
106
106
  keras_hub/src/models/clip/clip_image_converter.py,sha256=XyHEDB4RbYiveMN1hLQxHgGADb_goyWyE0TceAd2owM,330
107
107
  keras_hub/src/models/clip/clip_layers.py,sha256=ns3Zzm5UzMpm-ynyU3aJu2d4i3HmzNiZKdAea624ako,10184
108
108
  keras_hub/src/models/clip/clip_preprocessor.py,sha256=xj-FzK7gLIUyvTo2iM1zHh9f2Ff25tZCYFxsPE3dwFU,4771
109
- keras_hub/src/models/clip/clip_presets.py,sha256=b9Azial1dUtuNV96Q0Ahz-bcBRmlIjnZPUzMvAMb8OY,3348
109
+ keras_hub/src/models/clip/clip_presets.py,sha256=vrLk5UpOk4fvo4kRn05BPKUnhtvkRoSg7iS9UJEqNw8,3348
110
110
  keras_hub/src/models/clip/clip_text_encoder.py,sha256=lZa9Ditvn4DH9As3NEML_Wl6g2qeYer_LzRHGu1hqCM,5449
111
111
  keras_hub/src/models/clip/clip_tokenizer.py,sha256=6gIm_LWRbCeBQUI9M2gA8-OXb4tXGygixkbcL6joV1c,7444
112
112
  keras_hub/src/models/clip/clip_vision_encoder.py,sha256=C5grKgIgFF8ls-kkGdYorpw5tbfgbmBQe6VJg_3yWII,6368
@@ -116,7 +116,7 @@ keras_hub/src/models/cspnet/cspnet_image_classifier.py,sha256=JqfBHIBTFxaLOyAWx6
116
116
  keras_hub/src/models/cspnet/cspnet_image_classifier_preprocessor.py,sha256=ACRnOhjslk2ZZhpPfJioW4um4RLYa-Suk59z9wa5vfo,543
117
117
  keras_hub/src/models/cspnet/cspnet_image_converter.py,sha256=f-ICTY2T-RlCykU6qOHDxg0fY7ECfZ_xpSJzIVmbvpc,342
118
118
  keras_hub/src/models/cspnet/cspnet_presets.py,sha256=n01_7DTvbmaA_qs2GWiNLkBXNrrEvigPXSGc2NDTot8,1870
119
- keras_hub/src/models/d_fine/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
119
+ keras_hub/src/models/d_fine/__init__.py,sha256=-1dG2O0zjDhODJG8DEWuZo6MCbmlGgIsIqJwBhDXDU4,255
120
120
  keras_hub/src/models/d_fine/d_fine_attention.py,sha256=RlsgB9XxTz88wkGSRVFYpKSdiKMVxyb-fCnnpEfVQqo,17848
121
121
  keras_hub/src/models/d_fine/d_fine_backbone.py,sha256=KDBVu5LNKqBfNmKsnyJGY0YmJZRLOEo9Pi0VSjjJr5M,37363
122
122
  keras_hub/src/models/d_fine/d_fine_decoder.py,sha256=7b4yZaLf2BLA51szoJCgUdqw91QGzv7oxs-DvqVjsvg,38658
@@ -127,7 +127,7 @@ keras_hub/src/models/d_fine/d_fine_layers.py,sha256=hClOattmgjUcxcAS3LgpX36xKvD9
127
127
  keras_hub/src/models/d_fine/d_fine_loss.py,sha256=zO-LBBXJvbmSpsQ-DvTWN2N5qJmToIp61DMfnp31XE8,36046
128
128
  keras_hub/src/models/d_fine/d_fine_object_detector.py,sha256=ap5ZQypupCDhsdFhm4hVQuMY3767r5cYEQZwOY3LYDI,32762
129
129
  keras_hub/src/models/d_fine/d_fine_object_detector_preprocessor.py,sha256=738VvyHGQdsGN3sSP1yDnOOiC4RpYSQSES7OySynVm8,532
130
- keras_hub/src/models/d_fine/d_fine_presets.py,sha256=FIe3owE5HOWrr_kWvn2r8v9vjetFd-fMoe4b4y9HvgY,71
130
+ keras_hub/src/models/d_fine/d_fine_presets.py,sha256=KCrx2ZwprCcm_uYPrJaMwiy_FDSqdsG_v2YAljYloDk,5737
131
131
  keras_hub/src/models/d_fine/d_fine_utils.py,sha256=-EL5zanBgwDe6-RV4N9dwp-fkd7cy4SrGZDhc3WRR5A,31130
132
132
  keras_hub/src/models/deberta_v3/__init__.py,sha256=6E-QtAD1uvTBobrn5bUoyB1qtaCJU-t73TtbAEH6i9g,288
133
133
  keras_hub/src/models/deberta_v3/deberta_v3_backbone.py,sha256=oXdV7naTiMowuU3GsXEUo5K0GXiKbPKxdo27o5fXWjc,7258
@@ -221,7 +221,7 @@ keras_hub/src/models/flux/flux_text_to_image.py,sha256=Rf5dD2EhG0bE8Gyg9sqaA8YEe
221
221
  keras_hub/src/models/flux/flux_text_to_image_preprocessor.py,sha256=2kI2vSZvTia5ISb4BVPgC_e1l5rkirLSjhm13P-UR_k,2362
222
222
  keras_hub/src/models/gemma/__init__.py,sha256=rVzOJMJ39bgVlT8UdC0t8PlN2c237GKTBmfHIsbPuOQ,251
223
223
  keras_hub/src/models/gemma/gemma_attention.py,sha256=wmU5FgQu1Ajg-KHKVXTLHWH7pXqN4_zVJTCp_FXMcAs,10095
224
- keras_hub/src/models/gemma/gemma_backbone.py,sha256=GzAUSArw_pN9dtWQzTVhWDbW-XyWt4GyMcFLn9hwmh0,13391
224
+ keras_hub/src/models/gemma/gemma_backbone.py,sha256=pAAVaVKB6nlA0PncVnFXvNgJV7SeZy_ko2AxoIs0jF0,13364
225
225
  keras_hub/src/models/gemma/gemma_causal_lm.py,sha256=3OXaIXlrKqMIuUnBk-bUz-0SYFL-XkkQTWm8qRY2YII,16770
226
226
  keras_hub/src/models/gemma/gemma_causal_lm_preprocessor.py,sha256=bpKkEurWIfa6Kp9s4pz84-sBDSA6ZFNHP8nXG1fFQrg,2912
227
227
  keras_hub/src/models/gemma/gemma_decoder_block.py,sha256=f5UsRO-VNsKJfm_WHVJWK4UahhzYm3sKprJ8jjr-zm4,7628
@@ -230,7 +230,7 @@ keras_hub/src/models/gemma/gemma_tokenizer.py,sha256=FhcyNL4lo63MqOhTQPFr07-u3Bd
230
230
  keras_hub/src/models/gemma/rms_normalization.py,sha256=fku-JEo2sNy-ytX7ySD1sRzdhRAPmYex_z8oFk1NiG8,833
231
231
  keras_hub/src/models/gemma3/__init__.py,sha256=oPFadkdK5DRLD6sYx83iTetY5daWuSzmJilLjokHcbU,257
232
232
  keras_hub/src/models/gemma3/gemma3_attention.py,sha256=u3RNI8dva5lzzqFNTAe9996s87cNJ_GEWc9BIJD337Q,15473
233
- keras_hub/src/models/gemma3/gemma3_backbone.py,sha256=pjCEg-T9ZiP7KTL7XpwM6vKwhcqhke2TCKpTURWJklg,16713
233
+ keras_hub/src/models/gemma3/gemma3_backbone.py,sha256=HdWDRuF9MMwIzNVZEd1j53ILzptskvCxFiO__nfVQYU,16686
234
234
  keras_hub/src/models/gemma3/gemma3_causal_lm.py,sha256=U3C9TWlIz8VefAxQ0wJ6bDz18wqHBie8B26Ub_nFZs4,13843
235
235
  keras_hub/src/models/gemma3/gemma3_causal_lm_preprocessor.py,sha256=vjt4N-zr0Eb5kvkOR-WUgskDTNe64L_6tYnhyNb6xaE,29601
236
236
  keras_hub/src/models/gemma3/gemma3_decoder_block.py,sha256=CYwYazqwakLNfhOLBl_8Q2TVZcMcOxMtiZtuVlk_hoo,11470
@@ -463,7 +463,7 @@ keras_hub/src/models/t5gemma/t5gemma_backbone.py,sha256=wV5UTSlHm9P5AhsK-Bnab_my
463
463
  keras_hub/src/models/t5gemma/t5gemma_decoder.py,sha256=BHzdk5akm7sVbEyL7e176YYeuT2gVtSW7ol41b0PdSM,14375
464
464
  keras_hub/src/models/t5gemma/t5gemma_encoder.py,sha256=KW5xZTVS9UgzoQspHwKcYkqKWYxob2wACZKQUv-zIC0,8675
465
465
  keras_hub/src/models/t5gemma/t5gemma_layers.py,sha256=19_CLs6_lYTqdQJQTlalI50VEI8F3buNgXWoBoIgjas,4381
466
- keras_hub/src/models/t5gemma/t5gemma_presets.py,sha256=tfAu9fOxrrvF6vf8_zOY8be2RKU-0BoQpshCupOqH80,532
466
+ keras_hub/src/models/t5gemma/t5gemma_presets.py,sha256=vTL0DMAR-r0-Qco6cgdDGriZrwFUFgXD0CrqjWVoA1M,13901
467
467
  keras_hub/src/models/t5gemma/t5gemma_seq_2_seq_lm.py,sha256=-dRXqt1DbKQVUKqUqafBft2rJUB89tEj7NuRMlhX5og,17836
468
468
  keras_hub/src/models/t5gemma/t5gemma_seq_2_seq_lm_preprocessor.py,sha256=AXjmd0vOQ2J__E9GACeKWTosGrlkzcriC2OstQi0-x0,8186
469
469
  keras_hub/src/models/t5gemma/t5gemma_tokenizer.py,sha256=4EUX_kUEDqB6QAKSv2VxBVUVrF16TIBBX34Dir7f-70,2740
@@ -578,7 +578,7 @@ keras_hub/src/utils/transformers/export/gemma.py,sha256=xX_vfQwvFZ_-lQX4kgMNOGKL
578
578
  keras_hub/src/utils/transformers/export/hf_exporter.py,sha256=Qk52c6LIA2eMHUNY9Vy4STJSpnhLMdJ_t-3ljqhSr4k,5081
579
579
  keras_hub/tokenizers/__init__.py,sha256=YEr_cwyX6MACxQOgyRwETilOFYBXpQLNXH22ZdSSv3o,4450
580
580
  keras_hub/utils/__init__.py,sha256=jXPqVGBpJr_PpYmqD8aDG-fRMlxH-ulqCR2SZMn288Y,646
581
- keras_hub_nightly-0.23.0.dev202509150421.dist-info/METADATA,sha256=F93VWq6t2ovCs-bEH8GPO2r6-chwnd1ZR7_FaBHMvbw,7395
582
- keras_hub_nightly-0.23.0.dev202509150421.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
583
- keras_hub_nightly-0.23.0.dev202509150421.dist-info/top_level.txt,sha256=N4J6piIWBKa38A4uV-CnIopnOEf8mHAbkNXafXm_CuA,10
584
- keras_hub_nightly-0.23.0.dev202509150421.dist-info/RECORD,,
581
+ keras_hub_nightly-0.23.0.dev202509170415.dist-info/METADATA,sha256=rZPTlEyMDvT0xXOrXnspf-0bTGKX8fYpbW33Q_s0EmI,7395
582
+ keras_hub_nightly-0.23.0.dev202509170415.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
583
+ keras_hub_nightly-0.23.0.dev202509170415.dist-info/top_level.txt,sha256=N4J6piIWBKa38A4uV-CnIopnOEf8mHAbkNXafXm_CuA,10
584
+ keras_hub_nightly-0.23.0.dev202509170415.dist-info/RECORD,,