keras-hub-nightly 0.23.0.dev202510100415__py3-none-any.whl → 0.23.0.dev202510110411__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of keras-hub-nightly might be problematic. Click here for more details.

@@ -0,0 +1,321 @@
1
+ import types
2
+
3
+ import keras
4
+ import numpy as np
5
+
6
+ from keras_hub.src.models.mobilenetv5.mobilenetv5_attention import (
7
+ MobileAttention,
8
+ )
9
+ from keras_hub.src.models.mobilenetv5.mobilenetv5_backbone import (
10
+ MobileNetV5Backbone,
11
+ )
12
+ from keras_hub.src.models.mobilenetv5.mobilenetv5_blocks import EdgeResidual
13
+ from keras_hub.src.models.mobilenetv5.mobilenetv5_blocks import (
14
+ UniversalInvertedResidual,
15
+ )
16
+ from keras_hub.src.models.mobilenetv5.mobilenetv5_builder import (
17
+ convert_arch_def_to_stackwise,
18
+ )
19
+ from keras_hub.src.models.mobilenetv5.mobilenetv5_layers import ConvNormAct
20
+ from keras_hub.src.models.mobilenetv5.mobilenetv5_layers import RmsNorm2d
21
+
22
+ backbone_cls = MobileNetV5Backbone
23
+
24
+ MODEL_CONFIGS = {
25
+ "mobilenetv5_300m": {
26
+ "backbone": convert_arch_def_to_stackwise(
27
+ [
28
+ # Stage 0: 128x128 in
29
+ [
30
+ "er_r1_k3_s2_e4_c128",
31
+ "er_r1_k3_s1_e4_c128",
32
+ "er_r1_k3_s1_e4_c128",
33
+ ],
34
+ # Stage 1: 256x256 in
35
+ [
36
+ "uir_r1_a3_k5_s2_e6_c256",
37
+ "uir_r1_a5_k0_s1_e4_c256",
38
+ "uir_r1_a3_k0_s1_e4_c256",
39
+ "uir_r1_a5_k0_s1_e4_c256",
40
+ "uir_r1_a3_k0_s1_e4_c256",
41
+ ],
42
+ # Stage 2: 640x640 in
43
+ [
44
+ "uir_r1_a5_k5_s2_e6_c640",
45
+ "uir_r1_a5_k0_s1_e4_c640",
46
+ "uir_r1_a5_k0_s1_e4_c640",
47
+ "uir_r1_a5_k0_s1_e4_c640",
48
+ "uir_r1_a5_k0_s1_e4_c640",
49
+ "uir_r1_a5_k0_s1_e4_c640",
50
+ "uir_r1_a5_k0_s1_e4_c640",
51
+ "uir_r1_a5_k0_s1_e4_c640",
52
+ "uir_r1_a0_k0_s1_e1_c640",
53
+ "mqa_r1_k3_h12_v2_s1_d64_c640",
54
+ "uir_r1_a0_k0_s1_e2_c640",
55
+ "mqa_r1_k3_h12_v2_s1_d64_c640",
56
+ "uir_r1_a0_k0_s1_e2_c640",
57
+ "mqa_r1_k3_h12_v2_s1_d64_c640",
58
+ "uir_r1_a0_k0_s1_e2_c640",
59
+ "mqa_r1_k3_h12_v2_s1_d64_c640",
60
+ "uir_r1_a0_k0_s1_e2_c640",
61
+ "mqa_r1_k3_h12_v2_s1_d64_c640",
62
+ "uir_r1_a0_k0_s1_e2_c640",
63
+ "mqa_r1_k3_h12_v2_s1_d64_c640",
64
+ "uir_r1_a0_k0_s1_e2_c640",
65
+ "mqa_r1_k3_h12_v2_s1_d64_c640",
66
+ "uir_r1_a0_k0_s1_e2_c640",
67
+ "mqa_r1_k3_h12_v2_s1_d64_c640",
68
+ "uir_r1_a0_k0_s1_e2_c640",
69
+ "mqa_r1_k3_h12_v2_s1_d64_c640",
70
+ "uir_r1_a0_k0_s1_e2_c640",
71
+ "mqa_r1_k3_h12_v2_s1_d64_c640",
72
+ "uir_r1_a0_k0_s1_e2_c640",
73
+ "mqa_r1_k3_h12_v2_s1_d64_c640",
74
+ "uir_r1_a0_k0_s1_e2_c640",
75
+ "mqa_r1_k3_h12_v2_s1_d64_c640",
76
+ "uir_r1_a0_k0_s1_e2_c640",
77
+ "mqa_r1_k3_h12_v2_s1_d64_c640",
78
+ "uir_r1_a0_k0_s1_e2_c640",
79
+ "mqa_r1_k3_h12_v2_s1_d64_c640",
80
+ "uir_r1_a0_k0_s1_e2_c640",
81
+ ],
82
+ # Stage 3: 1280x1280 in
83
+ [
84
+ "uir_r1_a5_k5_s2_e6_c1280",
85
+ "mqa_r1_k3_h16_s1_d96_c1280",
86
+ "uir_r1_a0_k0_s1_e2_c1280",
87
+ "mqa_r1_k3_h16_s1_d96_c1280",
88
+ "uir_r1_a0_k0_s1_e2_c1280",
89
+ "mqa_r1_k3_h16_s1_d96_c1280",
90
+ "uir_r1_a0_k0_s1_e2_c1280",
91
+ "mqa_r1_k3_h16_s1_d96_c1280",
92
+ "uir_r1_a0_k0_s1_e2_c1280",
93
+ "mqa_r1_k3_h16_s1_d96_c1280",
94
+ "uir_r1_a0_k0_s1_e2_c1280",
95
+ "mqa_r1_k3_h16_s1_d96_c1280",
96
+ "uir_r1_a0_k0_s1_e2_c1280",
97
+ "mqa_r1_k3_h16_s1_d96_c1280",
98
+ "uir_r1_a0_k0_s1_e2_c1280",
99
+ "mqa_r1_k3_h16_s1_d96_c1280",
100
+ "uir_r1_a0_k0_s1_e2_c1280",
101
+ "mqa_r1_k3_h16_s1_d96_c1280",
102
+ "uir_r1_a0_k0_s1_e2_c1280",
103
+ "mqa_r1_k3_h16_s1_d96_c1280",
104
+ "uir_r1_a0_k0_s1_e2_c1280",
105
+ "mqa_r1_k3_h16_s1_d96_c1280",
106
+ "uir_r1_a0_k0_s1_e2_c1280",
107
+ "mqa_r1_k3_h16_s1_d96_c1280",
108
+ "uir_r1_a0_k0_s1_e2_c1280",
109
+ "mqa_r1_k3_h16_s1_d96_c1280",
110
+ "uir_r1_a0_k0_s1_e2_c1280",
111
+ "mqa_r1_k3_h16_s1_d96_c1280",
112
+ "uir_r1_a0_k0_s1_e2_c1280",
113
+ "mqa_r1_k3_h16_s1_d96_c1280",
114
+ "uir_r1_a0_k0_s1_e2_c1280",
115
+ "mqa_r1_k3_h16_s1_d96_c1280",
116
+ "uir_r1_a0_k0_s1_e2_c1280",
117
+ "mqa_r1_k3_h16_s1_d96_c1280",
118
+ "uir_r1_a0_k0_s1_e2_c1280",
119
+ "mqa_r1_k3_h16_s1_d96_c1280",
120
+ "uir_r1_a0_k0_s1_e2_c1280",
121
+ "mqa_r1_k3_h16_s1_d96_c1280",
122
+ "uir_r1_a0_k0_s1_e2_c1280",
123
+ ],
124
+ ]
125
+ ),
126
+ "stem_size": 64,
127
+ "num_features": 2048,
128
+ "norm_layer": "rms_norm",
129
+ "act_layer": "gelu",
130
+ "use_msfa": True,
131
+ "layer_scale_init_value": 1e-5,
132
+ },
133
+ }
134
+
135
+
136
+ def convert_head(task, loader, timm_config):
137
+ pass
138
+
139
+
140
+ def convert_backbone_config(timm_config):
141
+ timm_architecture = timm_config["architecture"]
142
+ if timm_architecture not in MODEL_CONFIGS:
143
+ raise ValueError(f"Unsupported architecture: {timm_architecture}")
144
+ config = MODEL_CONFIGS[timm_architecture].copy()
145
+ backbone_config = config.pop("backbone")
146
+ backbone_config.update(config)
147
+ return backbone_config
148
+
149
+
150
+ def convert_weights(backbone, loader, timm_config):
151
+ def key_exists(key):
152
+ try:
153
+ loader.get_tensor(key)
154
+ return True
155
+ except Exception:
156
+ return False
157
+
158
+ def _port_weights(layer, timm_key, transpose_dims=None):
159
+ hf_weight_key = f"{timm_key}.weight"
160
+ if not key_exists(hf_weight_key):
161
+ return
162
+ hook_fn = None
163
+ if transpose_dims:
164
+
165
+ def transpose_hook(x, _):
166
+ return np.transpose(x, transpose_dims)
167
+
168
+ hook_fn = transpose_hook
169
+ loader.port_weight(
170
+ layer.kernel, hf_weight_key=hf_weight_key, hook_fn=hook_fn
171
+ )
172
+ if layer.bias is not None:
173
+ hf_bias_key = f"{timm_key}.bias"
174
+ if key_exists(hf_bias_key):
175
+ loader.port_weight(
176
+ layer.bias,
177
+ hf_weight_key=hf_bias_key,
178
+ )
179
+
180
+ def _port_bn(layer, timm_prefix):
181
+ loader.port_weight(layer.gamma, f"{timm_prefix}.weight")
182
+ loader.port_weight(layer.beta, f"{timm_prefix}.bias")
183
+ loader.port_weight(layer.moving_mean, f"{timm_prefix}.running_mean")
184
+ loader.port_weight(layer.moving_variance, f"{timm_prefix}.running_var")
185
+
186
+ def _port_rms_norm(layer, timm_prefix):
187
+ loader.port_weight(layer.gamma, f"{timm_prefix}.weight")
188
+
189
+ def _port_cna(cna_layer: ConvNormAct, timm_conv_prefix, timm_norm_prefix):
190
+ if isinstance(cna_layer.conv, keras.layers.DepthwiseConv2D):
191
+ _port_weights(
192
+ cna_layer.conv,
193
+ timm_conv_prefix,
194
+ transpose_dims=(2, 3, 0, 1),
195
+ )
196
+ else:
197
+ _port_weights(
198
+ cna_layer.conv,
199
+ timm_conv_prefix,
200
+ transpose_dims=(2, 3, 1, 0),
201
+ )
202
+ if key_exists(f"{timm_norm_prefix}.running_mean"):
203
+ _port_bn(cna_layer.norm, timm_norm_prefix)
204
+ else:
205
+ _port_rms_norm(cna_layer.norm, timm_norm_prefix)
206
+
207
+ def _port_attn(attn_layer, attn_prefix):
208
+ _port_weights(
209
+ attn_layer.query_layers[-1],
210
+ f"{attn_prefix}.query.proj",
211
+ (2, 3, 1, 0),
212
+ )
213
+ if len(attn_layer.key_layers) > 1:
214
+ _port_weights(
215
+ attn_layer.key_layers[0],
216
+ f"{attn_prefix}.key.down_conv",
217
+ (2, 3, 0, 1),
218
+ )
219
+ key_norm_layer = attn_layer.key_layers[1]
220
+ if isinstance(key_norm_layer, RmsNorm2d):
221
+ _port_rms_norm(key_norm_layer, f"{attn_prefix}.key.norm")
222
+ else:
223
+ _port_bn(key_norm_layer, f"{attn_prefix}.key.norm")
224
+ _port_weights(
225
+ attn_layer.key_layers[-1], f"{attn_prefix}.key.proj", (2, 3, 1, 0)
226
+ )
227
+ if len(attn_layer.value_layers) > 1:
228
+ _port_weights(
229
+ attn_layer.value_layers[0],
230
+ f"{attn_prefix}.value.down_conv",
231
+ (2, 3, 0, 1),
232
+ )
233
+ value_norm_layer = attn_layer.value_layers[1]
234
+ if isinstance(value_norm_layer, RmsNorm2d):
235
+ _port_rms_norm(value_norm_layer, f"{attn_prefix}.value.norm")
236
+ else:
237
+ _port_bn(value_norm_layer, f"{attn_prefix}.value.norm")
238
+ _port_weights(
239
+ attn_layer.value_layers[-1],
240
+ f"{attn_prefix}.value.proj",
241
+ (2, 3, 1, 0),
242
+ )
243
+ _port_weights(
244
+ attn_layer.output_proj_layers[-2],
245
+ f"{attn_prefix}.output.proj",
246
+ (2, 3, 1, 0),
247
+ )
248
+
249
+ stem_layer = backbone.get_layer("conv_stem")
250
+ _port_cna(stem_layer, "conv_stem.conv", "conv_stem.bn")
251
+ block_layers = [
252
+ layer
253
+ for layer in backbone.layers
254
+ if isinstance(
255
+ layer, (EdgeResidual, UniversalInvertedResidual, MobileAttention)
256
+ )
257
+ ]
258
+ block_counter = 0
259
+ for stack_idx in range(len(backbone.stackwise_num_blocks)):
260
+ for block_idx_in_stage in range(
261
+ backbone.stackwise_num_blocks[stack_idx]
262
+ ):
263
+ block = block_layers[block_counter]
264
+ timm_prefix = f"blocks.{stack_idx}.{block_idx_in_stage}"
265
+ if isinstance(block, EdgeResidual):
266
+ _port_cna(
267
+ block.conv_exp,
268
+ f"{timm_prefix}.conv_exp",
269
+ f"{timm_prefix}.bn1",
270
+ )
271
+ _port_cna(
272
+ block.conv_pwl,
273
+ f"{timm_prefix}.conv_pwl",
274
+ f"{timm_prefix}.bn2",
275
+ )
276
+ elif isinstance(block, UniversalInvertedResidual):
277
+ if hasattr(block, "dw_start") and not isinstance(
278
+ block.dw_start, types.FunctionType
279
+ ):
280
+ _port_cna(
281
+ block.dw_start,
282
+ f"{timm_prefix}.dw_start.conv",
283
+ f"{timm_prefix}.dw_start.bn",
284
+ )
285
+ _port_cna(
286
+ block.pw_exp,
287
+ f"{timm_prefix}.pw_exp.conv",
288
+ f"{timm_prefix}.pw_exp.bn",
289
+ )
290
+ if hasattr(block, "dw_mid") and not isinstance(
291
+ block.dw_mid, types.FunctionType
292
+ ):
293
+ _port_cna(
294
+ block.dw_mid,
295
+ f"{timm_prefix}.dw_mid.conv",
296
+ f"{timm_prefix}.dw_mid.bn",
297
+ )
298
+ _port_cna(
299
+ block.pw_proj,
300
+ f"{timm_prefix}.pw_proj.conv",
301
+ f"{timm_prefix}.pw_proj.bn",
302
+ )
303
+ gamma_key = f"{timm_prefix}.layer_scale.gamma"
304
+ if key_exists(gamma_key):
305
+ loader.port_weight(block.layer_scale.gamma, gamma_key)
306
+ elif isinstance(block, MobileAttention):
307
+ _port_rms_norm(block.norm, f"{timm_prefix}.norm")
308
+ gamma_key = f"{timm_prefix}.layer_scale.gamma"
309
+ if key_exists(gamma_key):
310
+ loader.port_weight(block.layer_scale.gamma, gamma_key)
311
+ attn_prefix = f"{timm_prefix}.attn"
312
+ _port_attn(block.attn, attn_prefix)
313
+ block_counter += 1
314
+ try:
315
+ msfa_layer = backbone.get_layer("msfa")
316
+ ffn = msfa_layer.ffn
317
+ _port_cna(ffn.pw_exp, "msfa.ffn.pw_exp.conv", "msfa.ffn.pw_exp.bn")
318
+ _port_cna(ffn.pw_proj, "msfa.ffn.pw_proj.conv", "msfa.ffn.pw_proj.bn")
319
+ _port_rms_norm(msfa_layer.norm, "msfa.norm")
320
+ except ValueError:
321
+ pass
@@ -7,6 +7,7 @@ from keras_hub.src.utils.timm import convert_cspnet
7
7
  from keras_hub.src.utils.timm import convert_densenet
8
8
  from keras_hub.src.utils.timm import convert_efficientnet
9
9
  from keras_hub.src.utils.timm import convert_mobilenet
10
+ from keras_hub.src.utils.timm import convert_mobilenetv5
10
11
  from keras_hub.src.utils.timm import convert_resnet
11
12
  from keras_hub.src.utils.timm import convert_vgg
12
13
  from keras_hub.src.utils.transformers.safetensor_utils import SafetensorLoader
@@ -22,6 +23,8 @@ class TimmPresetLoader(PresetLoader):
22
23
  self.converter = convert_cspnet
23
24
  elif architecture.startswith("densenet"):
24
25
  self.converter = convert_densenet
26
+ elif architecture.startswith("mobilenetv5"):
27
+ self.converter = convert_mobilenetv5
25
28
  elif architecture.startswith("mobilenet"):
26
29
  self.converter = convert_mobilenet
27
30
  elif architecture.startswith("vgg"):
@@ -41,7 +44,8 @@ class TimmPresetLoader(PresetLoader):
41
44
  keras_config = self.converter.convert_backbone_config(self.config)
42
45
  backbone = cls(**{**keras_config, **kwargs})
43
46
  if load_weights:
44
- jax_memory_cleanup(backbone)
47
+ if not self.config["architecture"].startswith("mobilenetv5"):
48
+ jax_memory_cleanup(backbone)
45
49
  # Use prefix="" to avoid using `get_prefixed_key`.
46
50
  with SafetensorLoader(self.preset, prefix="") as loader:
47
51
  self.converter.convert_weights(backbone, loader, self.config)
@@ -54,9 +58,9 @@ class TimmPresetLoader(PresetLoader):
54
58
  )
55
59
  # Support loading the classification head for classifier models.
56
60
  kwargs["num_classes"] = self.config["num_classes"]
57
- if (
58
- "num_features" in self.config
59
- and "mobilenet" in self.config["architecture"]
61
+ if "num_features" in self.config and (
62
+ "mobilenet" in self.config["architecture"]
63
+ or "mobilenetv5" in self.config["architecture"]
60
64
  ):
61
65
  kwargs["num_features"] = self.config["num_features"]
62
66
 
keras_hub/src/version.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from keras_hub.src.api_export import keras_hub_export
2
2
 
3
3
  # Unique source of truth for the version number.
4
- __version__ = "0.23.0.dev202510100415"
4
+ __version__ = "0.23.0.dev202510110411"
5
5
 
6
6
 
7
7
  @keras_hub_export("keras_hub.version")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: keras-hub-nightly
3
- Version: 0.23.0.dev202510100415
3
+ Version: 0.23.0.dev202510110411
4
4
  Summary: Pretrained models for Keras.
5
5
  Author-email: Keras team <keras-users@googlegroups.com>
6
6
  License-Expression: Apache-2.0
@@ -1,11 +1,11 @@
1
1
  keras_hub/__init__.py,sha256=bJbUZkqwhZvTb1Tqx1fbkq6mzBYiEyq-Hin3oQIkhdE,558
2
- keras_hub/layers/__init__.py,sha256=8FTy8HwjgFdBvbl_QKTxXmOc13TXjUUBgLYrSTtkc0M,5807
2
+ keras_hub/layers/__init__.py,sha256=ufJKHxMTFhwp--E3ixfGCZqq89pZOUOxCQYgI5pEUA8,5944
3
3
  keras_hub/metrics/__init__.py,sha256=KYalsMPBnfwim9BdGHFfJ5WxUKFXOQ1QoKIMT_0lwlM,439
4
- keras_hub/models/__init__.py,sha256=wy75CGuTVxRIEXSCqmXgMyf23vUbuRbByWrlAaPWXB0,30737
4
+ keras_hub/models/__init__.py,sha256=E9_kQFlM75iKRiSUFEF7F8e9aqMo89-88XSSLy2sbtY,31172
5
5
  keras_hub/samplers/__init__.py,sha256=aFQIkiqbZpi8vjrPp2MVII4QUfE-eQjra5fMeHsoy7k,886
6
6
  keras_hub/src/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  keras_hub/src/api_export.py,sha256=9pQZK27JObxWZ96QPLBp1OBsjWigh1iuV6RglPGMRk0,1499
8
- keras_hub/src/version.py,sha256=XWjJrPwsNeGHKdXmdJIHeXWcFLk329AcGB-njcIbEik,222
8
+ keras_hub/src/version.py,sha256=cgZOW8wAMr-nVcvJ7qZLsTzj0scV7mGTt7a3Nt5Uvl4,222
9
9
  keras_hub/src/layers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  keras_hub/src/layers/modeling/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  keras_hub/src/layers/modeling/alibi_bias.py,sha256=1XBTHI52L_iJDhN_w5ydu_iMhCuTgQAxEPwcLA6BPuk,4411
@@ -321,6 +321,16 @@ keras_hub/src/models/mobilenet/mobilenet_image_classifier_preprocessor.py,sha256
321
321
  keras_hub/src/models/mobilenet/mobilenet_image_converter.py,sha256=a3Ka0UYYK5wHSOjf2oMHSgofRazTAeUfttklVefq14w,360
322
322
  keras_hub/src/models/mobilenet/mobilenet_presets.py,sha256=hR_3xxI_PigE8UprXW4lAuKRa3LFGdidBaN8LklxwRQ,1895
323
323
  keras_hub/src/models/mobilenet/util.py,sha256=S7j4UacmVIJ3fU8cymyAoK49eHcpWIKTOyUQiEjcbzQ,721
324
+ keras_hub/src/models/mobilenetv5/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
325
+ keras_hub/src/models/mobilenetv5/mobilenetv5_attention.py,sha256=rH4cp1B5_r8g7gKvDdMvfEGfmMHUB2OMEbWQbX9yUMg,26499
326
+ keras_hub/src/models/mobilenetv5/mobilenetv5_backbone.py,sha256=y19FpVh0M3w9jSmP34E-GixdjMsU2cEJKrtjLcFfGZU,17167
327
+ keras_hub/src/models/mobilenetv5/mobilenetv5_blocks.py,sha256=aw2H-duaCkxGSHB-nKnG8nQhAPxNkmlPUn0FHDb_cTs,34026
328
+ keras_hub/src/models/mobilenetv5/mobilenetv5_builder.py,sha256=_vgjuqJq9GXlccKaL783q77rgtzfa0Oc9aNOhGWeprc,17092
329
+ keras_hub/src/models/mobilenetv5/mobilenetv5_image_classifier.py,sha256=BvL0yClapvG9I5hNMUFuYCXXfnBsBGmKR0ICL6MQqrc,5944
330
+ keras_hub/src/models/mobilenetv5/mobilenetv5_image_classifier_preprocessor.py,sha256=4yhM71JqOzQWiCXTidWAMCNaaIO8QVq5vXl_129ylsI,602
331
+ keras_hub/src/models/mobilenetv5/mobilenetv5_image_converter.py,sha256=HroX3OOwajIz9CIqlcGf9K9MYUEQ86wifABy9ZGRql4,381
332
+ keras_hub/src/models/mobilenetv5/mobilenetv5_layers.py,sha256=wLyR_gTcqiNLUt86fhxhEbbhZH3YA9CbvMSPnA4vTvg,15889
333
+ keras_hub/src/models/mobilenetv5/mobilenetv5_utils.py,sha256=ij2Dfguotb6RGSXuX-MsxF8JBXBWLdSi0LfDOKGp2rk,4868
324
334
  keras_hub/src/models/moonshine/__init__.py,sha256=WK_9Cy1dp5KplNAaTsaJbd-2DGLsiHQsIL5ZnXuCbDQ,275
325
335
  keras_hub/src/models/moonshine/moonshine_audio_converter.py,sha256=FnvR7SP44uVOsA3g9azUhQjsVg809eJ5nqoJZQ-DAq0,11854
326
336
  keras_hub/src/models/moonshine/moonshine_audio_to_text.py,sha256=dXFtjaxL1jpcIAiiZY1-kcNL-S4RiRJiAC2uR_a3Fyc,15865
@@ -558,7 +568,7 @@ keras_hub/src/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
558
568
  keras_hub/src/utils/keras_utils.py,sha256=IWsbg-p-XVLuOkba8PAYNf9zDo4G2RkINLr58p12MhA,5291
559
569
  keras_hub/src/utils/openvino_utils.py,sha256=P1ZvedLv91LZD-UAgAo2dy6WC5305elh1qvgmdYQIGc,4512
560
570
  keras_hub/src/utils/pipeline_model.py,sha256=jgzB6NQPSl0KOu08N-TazfOnXnUJbZjH2EXXhx25Ftg,9084
561
- keras_hub/src/utils/preset_utils.py,sha256=vSs7U9cy0p6UqOEyGvudzL-o3mxl3FX22r4XH6rOgMg,37309
571
+ keras_hub/src/utils/preset_utils.py,sha256=lyCg_PRcYH1Jy8lGKaO8sgpIbMrP-Ik66EbjGD4gizc,37677
562
572
  keras_hub/src/utils/python_utils.py,sha256=N8nWeO3san4YnGkffRXG3Ix7VEIMTKSN21FX5TuL7G8,202
563
573
  keras_hub/src/utils/tensor_utils.py,sha256=bGM0pK-x0R4640emul49GfSZ3p4OSvOaVzZZPlm6eiM,16957
564
574
  keras_hub/src/utils/coco/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -570,9 +580,10 @@ keras_hub/src/utils/timm/convert_cspnet.py,sha256=9p1IF0B4UPbDTruQQXR6mJEUdhvQvH
570
580
  keras_hub/src/utils/timm/convert_densenet.py,sha256=fu8HBIQis5o3ib2tyI2qnmYScVrVIQySok8vTfa1qJ8,3393
571
581
  keras_hub/src/utils/timm/convert_efficientnet.py,sha256=SgEIlyyinS04qoQpEgh3WazHq544zNUCCpfmWh3EjSs,17100
572
582
  keras_hub/src/utils/timm/convert_mobilenet.py,sha256=XTqHOK4nJwigKefsw7ktWJtOgRpEVMO9MtRhuP5qP_k,9219
583
+ keras_hub/src/utils/timm/convert_mobilenetv5.py,sha256=B4qDcVH_v0dZCwcDmUnufbORbwpj-al8atnFMQX3bcg,12437
573
584
  keras_hub/src/utils/timm/convert_resnet.py,sha256=8JFkVtdpy5z9h83LJ97rD-a8FRejXPZvMNksNuStqjM,5834
574
585
  keras_hub/src/utils/timm/convert_vgg.py,sha256=MT5jGnLrzenPpe66Af_Lp1IdR9KGtsSrcmn6_UPqHvQ,2419
575
- keras_hub/src/utils/timm/preset_loader.py,sha256=4hULdq2K2hgPYTZR71PGV4YNDHLG1zcoxF9TXpg6fGE,3905
586
+ keras_hub/src/utils/timm/preset_loader.py,sha256=3bNmKinKjwc5-ToPCrT2dC2MsvD9tpIMzEKRknrnB5A,4190
576
587
  keras_hub/src/utils/transformers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
577
588
  keras_hub/src/utils/transformers/convert_albert.py,sha256=VdKclZpCxtDWq3UbUUQZf4fR9DJK_JYZ73B4O_G9skg,7695
578
589
  keras_hub/src/utils/transformers/convert_bart.py,sha256=Tk4h9Md9rwN5wjQbGIVrC7qzDpF8kI8qm-FKL8HlUok,14411
@@ -599,7 +610,7 @@ keras_hub/src/utils/transformers/export/gemma.py,sha256=xX_vfQwvFZ_-lQX4kgMNOGKL
599
610
  keras_hub/src/utils/transformers/export/hf_exporter.py,sha256=Qk52c6LIA2eMHUNY9Vy4STJSpnhLMdJ_t-3ljqhSr4k,5081
600
611
  keras_hub/tokenizers/__init__.py,sha256=kyFWYm4mb--U4xYU-2Gb1COM8xEFWNK6LcKxr8h9Ivc,4561
601
612
  keras_hub/utils/__init__.py,sha256=jXPqVGBpJr_PpYmqD8aDG-fRMlxH-ulqCR2SZMn288Y,646
602
- keras_hub_nightly-0.23.0.dev202510100415.dist-info/METADATA,sha256=xpB7dOP6MkBQwzdaZL_NOKsxidg3ZUivQ2PWJjpEwhI,7395
603
- keras_hub_nightly-0.23.0.dev202510100415.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
604
- keras_hub_nightly-0.23.0.dev202510100415.dist-info/top_level.txt,sha256=N4J6piIWBKa38A4uV-CnIopnOEf8mHAbkNXafXm_CuA,10
605
- keras_hub_nightly-0.23.0.dev202510100415.dist-info/RECORD,,
613
+ keras_hub_nightly-0.23.0.dev202510110411.dist-info/METADATA,sha256=MxgUS9-n5WEsrwX-tpKOROxlSp55X-Ygd6XC9TaBx5c,7395
614
+ keras_hub_nightly-0.23.0.dev202510110411.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
615
+ keras_hub_nightly-0.23.0.dev202510110411.dist-info/top_level.txt,sha256=N4J6piIWBKa38A4uV-CnIopnOEf8mHAbkNXafXm_CuA,10
616
+ keras_hub_nightly-0.23.0.dev202510110411.dist-info/RECORD,,