keras-nightly 3.14.0.dev2026012204__py3-none-any.whl → 3.14.0.dev2026012304__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. keras/_tf_keras/keras/ops/__init__.py +1 -0
  2. keras/_tf_keras/keras/ops/numpy/__init__.py +1 -0
  3. keras/ops/__init__.py +1 -0
  4. keras/ops/numpy/__init__.py +1 -0
  5. keras/src/backend/jax/numpy.py +5 -0
  6. keras/src/backend/numpy/numpy.py +4 -0
  7. keras/src/backend/openvino/numpy.py +37 -2
  8. keras/src/backend/tensorflow/numpy.py +20 -0
  9. keras/src/backend/torch/numpy.py +18 -0
  10. keras/src/layers/layer.py +10 -1
  11. keras/src/layers/preprocessing/image_preprocessing/aug_mix.py +13 -0
  12. keras/src/layers/preprocessing/image_preprocessing/base_image_preprocessing_layer.py +58 -0
  13. keras/src/layers/preprocessing/image_preprocessing/cut_mix.py +13 -0
  14. keras/src/layers/preprocessing/image_preprocessing/max_num_bounding_box.py +23 -0
  15. keras/src/layers/preprocessing/image_preprocessing/rand_augment.py +15 -0
  16. keras/src/layers/preprocessing/image_preprocessing/random_color_degeneration.py +15 -0
  17. keras/src/layers/preprocessing/image_preprocessing/random_color_jitter.py +15 -0
  18. keras/src/layers/preprocessing/image_preprocessing/random_contrast.py +15 -0
  19. keras/src/layers/preprocessing/image_preprocessing/random_crop.py +15 -0
  20. keras/src/layers/preprocessing/image_preprocessing/random_elastic_transform.py +14 -0
  21. keras/src/layers/preprocessing/image_preprocessing/random_erasing.py +15 -0
  22. keras/src/layers/preprocessing/image_preprocessing/random_flip.py +15 -0
  23. keras/src/layers/preprocessing/image_preprocessing/random_gaussian_blur.py +15 -0
  24. keras/src/layers/preprocessing/image_preprocessing/random_grayscale.py +15 -0
  25. keras/src/layers/preprocessing/image_preprocessing/random_invert.py +15 -0
  26. keras/src/layers/preprocessing/image_preprocessing/random_perspective.py +14 -0
  27. keras/src/layers/preprocessing/image_preprocessing/random_posterization.py +15 -0
  28. keras/src/layers/preprocessing/image_preprocessing/random_rotation.py +15 -0
  29. keras/src/layers/preprocessing/image_preprocessing/random_sharpness.py +15 -0
  30. keras/src/layers/preprocessing/image_preprocessing/random_shear.py +15 -0
  31. keras/src/layers/preprocessing/image_preprocessing/random_translation.py +15 -0
  32. keras/src/ops/numpy.py +56 -0
  33. keras/src/regularizers/regularizers.py +2 -2
  34. keras/src/version.py +1 -1
  35. {keras_nightly-3.14.0.dev2026012204.dist-info → keras_nightly-3.14.0.dev2026012304.dist-info}/METADATA +1 -1
  36. {keras_nightly-3.14.0.dev2026012204.dist-info → keras_nightly-3.14.0.dev2026012304.dist-info}/RECORD +38 -38
  37. {keras_nightly-3.14.0.dev2026012204.dist-info → keras_nightly-3.14.0.dev2026012304.dist-info}/WHEEL +0 -0
  38. {keras_nightly-3.14.0.dev2026012204.dist-info → keras_nightly-3.14.0.dev2026012304.dist-info}/top_level.txt +0 -0
@@ -245,6 +245,7 @@ from keras.src.ops.numpy import mod as mod
245
245
  from keras.src.ops.numpy import moveaxis as moveaxis
246
246
  from keras.src.ops.numpy import multiply as multiply
247
247
  from keras.src.ops.numpy import nan_to_num as nan_to_num
248
+ from keras.src.ops.numpy import nanmin as nanmin
248
249
  from keras.src.ops.numpy import nansum as nansum
249
250
  from keras.src.ops.numpy import ndim as ndim
250
251
  from keras.src.ops.numpy import negative as negative
@@ -129,6 +129,7 @@ from keras.src.ops.numpy import mod as mod
129
129
  from keras.src.ops.numpy import moveaxis as moveaxis
130
130
  from keras.src.ops.numpy import multiply as multiply
131
131
  from keras.src.ops.numpy import nan_to_num as nan_to_num
132
+ from keras.src.ops.numpy import nanmin as nanmin
132
133
  from keras.src.ops.numpy import nansum as nansum
133
134
  from keras.src.ops.numpy import ndim as ndim
134
135
  from keras.src.ops.numpy import negative as negative
keras/ops/__init__.py CHANGED
@@ -245,6 +245,7 @@ from keras.src.ops.numpy import mod as mod
245
245
  from keras.src.ops.numpy import moveaxis as moveaxis
246
246
  from keras.src.ops.numpy import multiply as multiply
247
247
  from keras.src.ops.numpy import nan_to_num as nan_to_num
248
+ from keras.src.ops.numpy import nanmin as nanmin
248
249
  from keras.src.ops.numpy import nansum as nansum
249
250
  from keras.src.ops.numpy import ndim as ndim
250
251
  from keras.src.ops.numpy import negative as negative
@@ -129,6 +129,7 @@ from keras.src.ops.numpy import mod as mod
129
129
  from keras.src.ops.numpy import moveaxis as moveaxis
130
130
  from keras.src.ops.numpy import multiply as multiply
131
131
  from keras.src.ops.numpy import nan_to_num as nan_to_num
132
+ from keras.src.ops.numpy import nanmin as nanmin
132
133
  from keras.src.ops.numpy import nansum as nansum
133
134
  from keras.src.ops.numpy import ndim as ndim
134
135
  from keras.src.ops.numpy import negative as negative
@@ -1013,6 +1013,11 @@ def moveaxis(x, source, destination):
1013
1013
  return jnp.moveaxis(x, source=source, destination=destination)
1014
1014
 
1015
1015
 
1016
+ def nanmin(x, axis=None, keepdims=False):
1017
+ x = convert_to_tensor(x)
1018
+ return jnp.nanmin(x, axis=axis, keepdims=keepdims)
1019
+
1020
+
1016
1021
  def nansum(x, axis=None, keepdims=False):
1017
1022
  x = convert_to_tensor(x)
1018
1023
  return jnp.nansum(x, axis=axis, keepdims=keepdims)
@@ -960,6 +960,10 @@ def moveaxis(x, source, destination):
960
960
  return np.moveaxis(x, source=source, destination=destination)
961
961
 
962
962
 
963
+ def nanmin(x, axis=None, keepdims=False):
964
+ return np.nanmin(x, axis=axis, keepdims=keepdims)
965
+
966
+
963
967
  def nansum(x, axis=None, keepdims=False):
964
968
  axis = standardize_axis_for_numpy(axis)
965
969
  dtype = standardize_dtype(x.dtype)
@@ -1522,7 +1522,25 @@ def lcm(x1, x2):
1522
1522
 
1523
1523
 
1524
1524
  def ldexp(x1, x2):
1525
- raise NotImplementedError("`ldexp` is not supported with openvino backend")
1525
+ element_type = None
1526
+ if isinstance(x1, OpenVINOKerasTensor):
1527
+ element_type = x1.output.get_element_type()
1528
+ if isinstance(x2, OpenVINOKerasTensor):
1529
+ element_type = x2.output.get_element_type()
1530
+ x1 = get_ov_output(x1, element_type)
1531
+ x2 = get_ov_output(x2, element_type)
1532
+ x1, x2 = _align_operand_types(x1, x2, "ldexp()")
1533
+
1534
+ float_dtype = OPENVINO_DTYPES[config.floatx()]
1535
+ if x1.get_element_type().is_integral():
1536
+ x1 = ov_opset.convert(x1, float_dtype)
1537
+ if x2.get_element_type().is_integral():
1538
+ x2 = ov_opset.convert(x2, float_dtype)
1539
+
1540
+ const_two = ov_opset.constant(2, x2.get_element_type())
1541
+ result = ov_opset.multiply(x1, ov_opset.power(const_two, x2))
1542
+
1543
+ return OpenVINOKerasTensor(result.output(0))
1526
1544
 
1527
1545
 
1528
1546
  def less(x1, x2):
@@ -2089,6 +2107,10 @@ def moveaxis(x, source, destination):
2089
2107
  return OpenVINOKerasTensor(ov_opset.transpose(x, axes_const).output(0))
2090
2108
 
2091
2109
 
2110
+ def nanmin(x, axis=None, keepdims=False):
2111
+ raise NotImplementedError("`nanmin` is not supported with openvino backend")
2112
+
2113
+
2092
2114
  def nansum(x, axis=None, keepdims=False):
2093
2115
  raise NotImplementedError("`nansum` is not supported with openvino backend")
2094
2116
 
@@ -3188,7 +3210,20 @@ def correlate(x1, x2, mode="valid"):
3188
3210
 
3189
3211
 
3190
3212
  def select(condlist, choicelist, default=0):
3191
- raise NotImplementedError("`select` is not supported with openvino backend")
3213
+ if len(condlist) != len(choicelist):
3214
+ raise ValueError(
3215
+ "select(): condlist and choicelist must have the same length"
3216
+ )
3217
+ conds = [get_ov_output(c) for c in condlist]
3218
+ choices = [get_ov_output(v) for v in choicelist]
3219
+
3220
+ result = get_ov_output(default)
3221
+ for cond_idx in reversed(range(len(conds))):
3222
+ cond = conds[cond_idx]
3223
+ choice = choices[cond_idx]
3224
+ choice, result = _align_operand_types(choice, result, "select()")
3225
+ result = ov_opset.select(cond, choice, result).output(0)
3226
+ return OpenVINOKerasTensor(result)
3192
3227
 
3193
3228
 
3194
3229
  def slogdet(x):
@@ -2125,6 +2125,26 @@ def moveaxis(x, source, destination):
2125
2125
  return tf.transpose(x, perm)
2126
2126
 
2127
2127
 
2128
+ def nanmin(x, axis=None, keepdims=False):
2129
+ x = convert_to_tensor(x)
2130
+
2131
+ if not x.dtype.is_floating:
2132
+ dtype = standardize_dtype(x.dtype)
2133
+ if dtype == "bool":
2134
+ return tf.reduce_all(x, axis=axis, keepdims=keepdims)
2135
+ return tf.reduce_min(x, axis=axis, keepdims=keepdims)
2136
+
2137
+ x_clean = tf.where(
2138
+ tf.math.is_nan(x), tf.constant(float("inf"), dtype=x.dtype), x
2139
+ )
2140
+
2141
+ return tf.where(
2142
+ tf.reduce_all(tf.math.is_nan(x), axis=axis, keepdims=keepdims),
2143
+ tf.constant(float("nan"), dtype=x.dtype),
2144
+ tf.reduce_min(x_clean, axis=axis, keepdims=keepdims),
2145
+ )
2146
+
2147
+
2128
2148
  def nansum(x, axis=None, keepdims=False):
2129
2149
  x = convert_to_tensor(x)
2130
2150
  dtype = standardize_dtype(x.dtype)
@@ -1272,6 +1272,24 @@ def moveaxis(x, source, destination):
1272
1272
  return torch.moveaxis(x, source=source, destination=destination)
1273
1273
 
1274
1274
 
1275
+ def nanmin(x, axis=None, keepdims=False):
1276
+ x = convert_to_tensor(x)
1277
+ if not torch.is_floating_point(x):
1278
+ return torch.amin(x, dim=axis, keepdim=keepdims)
1279
+
1280
+ if axis == () or axis == []:
1281
+ return x
1282
+
1283
+ x_clean = torch.where(torch.isnan(x), float("inf"), x)
1284
+ out = torch.amin(x_clean, dim=axis, keepdim=keepdims)
1285
+
1286
+ return torch.where(
1287
+ torch.isnan(x).all(dim=axis, keepdim=keepdims),
1288
+ torch.tensor(float("nan"), dtype=x.dtype, device=get_device()),
1289
+ out,
1290
+ )
1291
+
1292
+
1275
1293
  def nansum(x, axis=None, keepdims=False):
1276
1294
  if isinstance(x, (list, tuple)):
1277
1295
  x = stack(x)
keras/src/layers/layer.py CHANGED
@@ -27,6 +27,7 @@ from keras.src import backend
27
27
  from keras.src import constraints
28
28
  from keras.src import dtype_policies
29
29
  from keras.src import initializers
30
+ from keras.src import ops
30
31
  from keras.src import regularizers
31
32
  from keras.src import tree
32
33
  from keras.src import utils
@@ -974,7 +975,15 @@ class Layer(BackendLayer, Operation):
974
975
  if self.activity_regularizer is not None:
975
976
  for output in tree.flatten(outputs):
976
977
  if backend.is_tensor(output):
977
- self.add_loss(self.activity_regularizer(output))
978
+ loss = self.activity_regularizer(output)
979
+ if output.ndim > 0:
980
+ # Normalize by batch size to ensure consistent
981
+ # regularization strength across batch sizes
982
+ batch_size = ops.cast(
983
+ ops.shape(output)[0], dtype=loss.dtype
984
+ )
985
+ loss = ops.divide_no_nan(loss, batch_size)
986
+ self.add_loss(loss)
978
987
 
979
988
  # Set `previous_mask` on outputs if available. It is provided only
980
989
  # for the first positional input arg and its mask.
@@ -5,6 +5,9 @@ from keras.src.api_export import keras_export
5
5
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
6
6
  BaseImagePreprocessingLayer,
7
7
  )
8
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
9
+ base_image_preprocessing_transform_example,
10
+ )
8
11
  from keras.src.random import SeedGenerator
9
12
  from keras.src.utils import backend_utils
10
13
 
@@ -71,6 +74,10 @@ class AugMix(BaseImagePreprocessingLayer):
71
74
  interpolation: The interpolation method to use for resizing operations.
72
75
  Options include `"nearest"`, `"bilinear"`. Default is `"bilinear"`.
73
76
  seed: Integer. Used to create a random seed.
77
+
78
+ Example:
79
+
80
+ {{base_image_preprocessing_transform_example}}
74
81
  """
75
82
 
76
83
  _USE_BASE_FACTOR = False
@@ -326,3 +333,9 @@ class AugMix(BaseImagePreprocessingLayer):
326
333
  }
327
334
  base_config = super().get_config()
328
335
  return {**base_config, **config}
336
+
337
+
338
+ AugMix.__doc__ = AugMix.__doc__.replace(
339
+ "{{base_image_preprocessing_transform_example}}",
340
+ base_image_preprocessing_transform_example.replace("{LayerName}", "AugMix"),
341
+ )
@@ -383,3 +383,61 @@ class BaseImagePreprocessingLayer(DataLayer):
383
383
  )
384
384
 
385
385
  return affine_matrix
386
+
387
+
388
+ base_image_preprocessing_transform_example = """
389
+ ```python
390
+ layer = keras.layers.{LayerName}(bounding_box_format="xyxy")
391
+ images = np.random.randint(0, 255, (4, 224, 224, 3), dtype="uint8")
392
+
393
+ bounding_boxes = {
394
+ "boxes": np.array([
395
+ [[10, 20, 100, 150], [50, 60, 200, 250]],
396
+ [[15, 25, 110, 160], [55, 65, 210, 260]],
397
+ [[20, 30, 120, 170], [60, 70, 220, 270]],
398
+ [[25, 35, 130, 180], [65, 75, 230, 280]],
399
+ ], dtype="float32"),
400
+ "labels": np.array([[0, 1], [1, 2], [2, 3], [0, 3]], dtype="int32")
401
+ }
402
+
403
+ labels = keras.ops.one_hot(
404
+ np.array([0, 1, 2, 3]),
405
+ num_classes=4
406
+ )
407
+
408
+ segmentation_masks = np.random.randint(0, 3, (4, 224, 224, 1), dtype="uint8")
409
+
410
+ output = layer(
411
+ {
412
+ "images": images,
413
+ "bounding_boxes": bounding_boxes,
414
+ "labels": labels,
415
+ "segmentation_masks": segmentation_masks
416
+ },
417
+ training=True
418
+ )
419
+ ```
420
+ """
421
+
422
+ base_image_preprocessing_color_example = """
423
+ ```python
424
+ layer = keras.layers.{LayerName}(value_range=(0, 255))
425
+ images = np.random.randint(0, 255, (8, 224, 224, 3), dtype="uint8")
426
+
427
+ labels = keras.ops.one_hot(
428
+ np.array([0, 1, 2, 0, 1, 2, 0, 1]),
429
+ num_classes=3
430
+ )
431
+
432
+ segmentation_masks = np.random.randint(0, 3, (8, 224, 224, 1), dtype="uint8")
433
+
434
+ output = layer(
435
+ {
436
+ "images": images,
437
+ "labels": labels,
438
+ "segmentation_masks": segmentation_masks
439
+ },
440
+ training=True
441
+ )
442
+ ```
443
+ """
@@ -2,6 +2,9 @@ from keras.src.api_export import keras_export
2
2
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
3
3
  BaseImagePreprocessingLayer,
4
4
  )
5
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
6
+ base_image_preprocessing_color_example,
7
+ )
5
8
  from keras.src.random import SeedGenerator
6
9
 
7
10
 
@@ -29,6 +32,10 @@ class CutMix(BaseImagePreprocessingLayer):
29
32
  in patch sizes, leading to more diverse and larger mixed patches.
30
33
  Defaults to 1.
31
34
  seed: Integer. Used to create a random seed.
35
+
36
+ Example:
37
+
38
+ {{base_image_preprocessing_color_example}}
32
39
  """
33
40
 
34
41
  _USE_BASE_FACTOR = False
@@ -227,3 +234,9 @@ class CutMix(BaseImagePreprocessingLayer):
227
234
  }
228
235
  base_config = super().get_config()
229
236
  return {**base_config, **config}
237
+
238
+
239
+ CutMix.__doc__ = CutMix.__doc__.replace(
240
+ "{{base_image_preprocessing_color_example}}",
241
+ base_image_preprocessing_color_example.replace("{LayerName}", "CutMix"),
242
+ )
@@ -15,6 +15,29 @@ class MaxNumBoundingBoxes(BaseImagePreprocessingLayer):
15
15
  max_number: Desired output number of bounding boxes.
16
16
  padding_value: The padding value of the `boxes` and `labels` in
17
17
  `bounding_boxes`. Defaults to `-1`.
18
+
19
+ Example:
20
+
21
+ ```python
22
+ max_boxes_layer = keras.layers.MaxNumBoundingBoxes(
23
+ max_number=10,
24
+ fill_value=-1
25
+ )
26
+
27
+ images = np.random.randint(0, 255, (1, 224, 224, 3), dtype="uint8")
28
+
29
+ bounding_boxes = {
30
+ "boxes": np.array([
31
+ [[10, 20, 100, 150], [50, 60, 200, 250], [0, 0, 50, 50]],
32
+ ]),
33
+ "labels": np.array([[1, 2, 3]])
34
+ }
35
+
36
+ result = max_boxes_layer({
37
+ "images": images,
38
+ "bounding_boxes": bounding_boxes
39
+ })
40
+ ```
18
41
  """
19
42
 
20
43
  def __init__(self, max_number, fill_value=-1, **kwargs):
@@ -3,6 +3,9 @@ from keras.src.api_export import keras_export
3
3
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
4
4
  BaseImagePreprocessingLayer,
5
5
  )
6
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
7
+ base_image_preprocessing_transform_example,
8
+ )
6
9
  from keras.src.random import SeedGenerator
7
10
  from keras.src.utils import backend_utils
8
11
 
@@ -32,6 +35,10 @@ class RandAugment(BaseImagePreprocessingLayer):
32
35
  interpolation: The interpolation method to use for resizing operations.
33
36
  Options include `nearest`, `bilinear`. Default is `bilinear`.
34
37
  seed: Integer. Used to create a random seed.
38
+
39
+ Example:
40
+
41
+ {{base_image_preprocessing_transform_example}}
35
42
  """
36
43
 
37
44
  _USE_BASE_FACTOR = False
@@ -265,3 +272,11 @@ class RandAugment(BaseImagePreprocessingLayer):
265
272
  }
266
273
  base_config = super().get_config()
267
274
  return {**base_config, **config}
275
+
276
+
277
+ RandAugment.__doc__ = RandAugment.__doc__.replace(
278
+ "{{base_image_preprocessing_transform_example}}",
279
+ base_image_preprocessing_transform_example.replace(
280
+ "{LayerName}", "RandAugment"
281
+ ),
282
+ )
@@ -2,6 +2,9 @@ from keras.src.api_export import keras_export
2
2
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
3
3
  BaseImagePreprocessingLayer,
4
4
  )
5
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
6
+ base_image_preprocessing_color_example,
7
+ )
5
8
  from keras.src.random import SeedGenerator
6
9
 
7
10
 
@@ -29,6 +32,10 @@ class RandomColorDegeneration(BaseImagePreprocessingLayer):
29
32
  passed float is sampled. In order to ensure the value is always the
30
33
  same, please pass a tuple with two identical floats: `(0.5, 0.5)`.
31
34
  seed: Integer. Used to create a random seed.
35
+
36
+ Example:
37
+
38
+ {{base_image_preprocessing_color_example}}
32
39
  """
33
40
 
34
41
  _VALUE_RANGE_VALIDATION_ERROR = (
@@ -133,3 +140,11 @@ class RandomColorDegeneration(BaseImagePreprocessingLayer):
133
140
 
134
141
  def compute_output_shape(self, input_shape):
135
142
  return input_shape
143
+
144
+
145
+ RandomColorDegeneration.__doc__ = RandomColorDegeneration.__doc__.replace(
146
+ "{{base_image_preprocessing_color_example}}",
147
+ base_image_preprocessing_color_example.replace(
148
+ "{LayerName}", "RandomColorDegeneration"
149
+ ),
150
+ )
@@ -6,6 +6,9 @@ from keras.src.api_export import keras_export
6
6
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
7
7
  BaseImagePreprocessingLayer,
8
8
  )
9
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
10
+ base_image_preprocessing_color_example,
11
+ )
9
12
  from keras.src.random.seed_generator import SeedGenerator
10
13
  from keras.src.utils import backend_utils
11
14
 
@@ -60,6 +63,10 @@ class RandomColorJitter(BaseImagePreprocessingLayer):
60
63
  always the same, please pass a tuple with two identical
61
64
  floats: `(0.5, 0.5)`.
62
65
  seed: Integer. Used to create a random seed.
66
+
67
+ Example:
68
+
69
+ {{base_image_preprocessing_color_example}}
63
70
  """
64
71
 
65
72
  def __init__(
@@ -211,3 +218,11 @@ class RandomColorJitter(BaseImagePreprocessingLayer):
211
218
  }
212
219
  base_config = super().get_config()
213
220
  return {**base_config, **config}
221
+
222
+
223
+ RandomColorJitter.__doc__ = RandomColorJitter.__doc__.replace(
224
+ "{{base_image_preprocessing_color_example}}",
225
+ base_image_preprocessing_color_example.replace(
226
+ "{LayerName}", "RandomColorJitter"
227
+ ),
228
+ )
@@ -2,6 +2,9 @@ from keras.src.api_export import keras_export
2
2
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
3
3
  BaseImagePreprocessingLayer,
4
4
  )
5
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
6
+ base_image_preprocessing_color_example,
7
+ )
5
8
  from keras.src.random.seed_generator import SeedGenerator
6
9
 
7
10
 
@@ -45,6 +48,10 @@ class RandomContrast(BaseImagePreprocessingLayer):
45
48
  typically either `[0, 1]` or `[0, 255]` depending on how your
46
49
  preprocessing pipeline is set up.
47
50
  seed: Integer. Used to create a random seed.
51
+
52
+ Example:
53
+
54
+ {{base_image_preprocessing_color_example}}
48
55
  """
49
56
 
50
57
  _FACTOR_BOUNDS = (0, 1)
@@ -147,3 +154,11 @@ class RandomContrast(BaseImagePreprocessingLayer):
147
154
  }
148
155
  base_config = super().get_config()
149
156
  return {**base_config, **config}
157
+
158
+
159
+ RandomContrast.__doc__ = RandomContrast.__doc__.replace(
160
+ "{{base_image_preprocessing_color_example}}",
161
+ base_image_preprocessing_color_example.replace(
162
+ "{LayerName}", "RandomContrast"
163
+ ),
164
+ )
@@ -3,6 +3,9 @@ from keras.src.api_export import keras_export
3
3
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
4
4
  BaseImagePreprocessingLayer,
5
5
  )
6
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
7
+ base_image_preprocessing_transform_example,
8
+ )
6
9
  from keras.src.layers.preprocessing.image_preprocessing.bounding_boxes.converters import ( # noqa: E501
7
10
  convert_format,
8
11
  )
@@ -47,6 +50,10 @@ class RandomCrop(BaseImagePreprocessingLayer):
47
50
  seed: Integer. Used to create a random seed.
48
51
  **kwargs: Base layer keyword arguments, such as
49
52
  `name` and `dtype`.
53
+
54
+ Example:
55
+
56
+ {{base_image_preprocessing_transform_example}}
50
57
  """
51
58
 
52
59
  def __init__(
@@ -274,3 +281,11 @@ class RandomCrop(BaseImagePreprocessingLayer):
274
281
  }
275
282
  )
276
283
  return config
284
+
285
+
286
+ RandomCrop.__doc__ = RandomCrop.__doc__.replace(
287
+ "{{base_image_preprocessing_transform_example}}",
288
+ base_image_preprocessing_transform_example.replace(
289
+ "{LayerName}", "RandomCrop"
290
+ ),
291
+ )
@@ -2,6 +2,9 @@ from keras.src.api_export import keras_export
2
2
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
3
3
  BaseImagePreprocessingLayer,
4
4
  )
5
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
6
+ base_image_preprocessing_transform_example,
7
+ )
5
8
  from keras.src.random.seed_generator import SeedGenerator
6
9
 
7
10
 
@@ -62,6 +65,9 @@ class RandomElasticTransform(BaseImagePreprocessingLayer):
62
65
  preprocessing pipeline is set up.
63
66
  seed: Integer. Used to create a random seed.
64
67
 
68
+ Example:
69
+
70
+ {{base_image_preprocessing_transform_example}}
65
71
  """
66
72
 
67
73
  _USE_BASE_FACTOR = False
@@ -277,3 +283,11 @@ class RandomElasticTransform(BaseImagePreprocessingLayer):
277
283
  "seed": self.seed,
278
284
  }
279
285
  return {**base_config, **config}
286
+
287
+
288
+ RandomElasticTransform.__doc__ = RandomElasticTransform.__doc__.replace(
289
+ "{{base_image_preprocessing_transform_example}}",
290
+ base_image_preprocessing_transform_example.replace(
291
+ "{LayerName}", "RandomElasticTransform"
292
+ ),
293
+ )
@@ -2,6 +2,9 @@ from keras.src.api_export import keras_export
2
2
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
3
3
  BaseImagePreprocessingLayer,
4
4
  )
5
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
6
+ base_image_preprocessing_color_example,
7
+ )
5
8
  from keras.src.random import SeedGenerator
6
9
 
7
10
 
@@ -41,6 +44,10 @@ class RandomErasing(BaseImagePreprocessingLayer):
41
44
  typically either `[0, 1]` or `[0, 255]` depending on how your
42
45
  preprocessing pipeline is set up.
43
46
  seed: Integer. Used to create a random seed.
47
+
48
+ Example:
49
+
50
+ {{base_image_preprocessing_color_example}}
44
51
  """
45
52
 
46
53
  _USE_BASE_FACTOR = False
@@ -326,3 +333,11 @@ class RandomErasing(BaseImagePreprocessingLayer):
326
333
  }
327
334
  base_config = super().get_config()
328
335
  return {**base_config, **config}
336
+
337
+
338
+ RandomErasing.__doc__ = RandomErasing.__doc__.replace(
339
+ "{{base_image_preprocessing_color_example}}",
340
+ base_image_preprocessing_color_example.replace(
341
+ "{LayerName}", "RandomErasing"
342
+ ),
343
+ )
@@ -2,6 +2,9 @@ from keras.src.api_export import keras_export
2
2
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
3
3
  BaseImagePreprocessingLayer,
4
4
  )
5
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
6
+ base_image_preprocessing_transform_example,
7
+ )
5
8
  from keras.src.layers.preprocessing.image_preprocessing.bounding_boxes.converters import ( # noqa: E501
6
9
  clip_to_image_size,
7
10
  )
@@ -46,6 +49,10 @@ class RandomFlip(BaseImagePreprocessingLayer):
46
49
  seed: Integer. Used to create a random seed.
47
50
  **kwargs: Base layer keyword arguments, such as
48
51
  `name` and `dtype`.
52
+
53
+ Example:
54
+
55
+ {{base_image_preprocessing_transform_example}}
49
56
  """
50
57
 
51
58
  _USE_BASE_FACTOR = False
@@ -234,3 +241,11 @@ class RandomFlip(BaseImagePreprocessingLayer):
234
241
  }
235
242
  )
236
243
  return config
244
+
245
+
246
+ RandomFlip.__doc__ = RandomFlip.__doc__.replace(
247
+ "{{base_image_preprocessing_transform_example}}",
248
+ base_image_preprocessing_transform_example.replace(
249
+ "{LayerName}", "RandomFlip"
250
+ ),
251
+ )
@@ -2,6 +2,9 @@ from keras.src.api_export import keras_export
2
2
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
3
3
  BaseImagePreprocessingLayer,
4
4
  )
5
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
6
+ base_image_preprocessing_color_example,
7
+ )
5
8
  from keras.src.random import SeedGenerator
6
9
 
7
10
 
@@ -35,6 +38,10 @@ class RandomGaussianBlur(BaseImagePreprocessingLayer):
35
38
  typically either `[0, 1]` or `[0, 255]` depending on how your
36
39
  preprocessing pipeline is set up.
37
40
  seed: Integer. Used to create a random seed.
41
+
42
+ Example:
43
+
44
+ {{base_image_preprocessing_color_example}}
38
45
  """
39
46
 
40
47
  _USE_BASE_FACTOR = False
@@ -218,3 +225,11 @@ class RandomGaussianBlur(BaseImagePreprocessingLayer):
218
225
  }
219
226
  )
220
227
  return config
228
+
229
+
230
+ RandomGaussianBlur.__doc__ = RandomGaussianBlur.__doc__.replace(
231
+ "{{base_image_preprocessing_color_example}}",
232
+ base_image_preprocessing_color_example.replace(
233
+ "{LayerName}", "RandomGaussianBlur"
234
+ ),
235
+ )
@@ -3,6 +3,9 @@ from keras.src.api_export import keras_export
3
3
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
4
4
  BaseImagePreprocessingLayer,
5
5
  )
6
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
7
+ base_image_preprocessing_color_example,
8
+ )
6
9
 
7
10
 
8
11
  @keras_export("keras.layers.RandomGrayscale")
@@ -43,6 +46,10 @@ class RandomGrayscale(BaseImagePreprocessingLayer):
43
46
  Same as input shape. The output maintains the same number of channels
44
47
  as the input, even for grayscale-converted images where all channels
45
48
  will have the same value.
49
+
50
+ Example:
51
+
52
+ {{base_image_preprocessing_color_example}}
46
53
  """
47
54
 
48
55
  def __init__(self, factor=0.5, data_format=None, seed=None, **kwargs):
@@ -115,3 +122,11 @@ class RandomGrayscale(BaseImagePreprocessingLayer):
115
122
  config = super().get_config()
116
123
  config.update({"factor": self.factor})
117
124
  return config
125
+
126
+
127
+ RandomGrayscale.__doc__ = RandomGrayscale.__doc__.replace(
128
+ "{{base_image_preprocessing_color_example}}",
129
+ base_image_preprocessing_color_example.replace(
130
+ "{LayerName}", "RandomGrayscale"
131
+ ),
132
+ )
@@ -2,6 +2,9 @@ from keras.src.api_export import keras_export
2
2
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
3
3
  BaseImagePreprocessingLayer,
4
4
  )
5
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
6
+ base_image_preprocessing_color_example,
7
+ )
5
8
 
6
9
 
7
10
  @keras_export("keras.layers.RandomInvert")
@@ -30,6 +33,10 @@ class RandomInvert(BaseImagePreprocessingLayer):
30
33
  represents the upper bound. Images passed to the layer should have
31
34
  values within `value_range`. Defaults to `(0, 255)`.
32
35
  seed: Integer. Used to create a random seed.
36
+
37
+ Example:
38
+
39
+ {{base_image_preprocessing_color_example}}
33
40
  """
34
41
 
35
42
  _USE_BASE_FACTOR = False
@@ -127,3 +134,11 @@ class RandomInvert(BaseImagePreprocessingLayer):
127
134
  }
128
135
  base_config = super().get_config()
129
136
  return {**base_config, **config}
137
+
138
+
139
+ RandomInvert.__doc__ = RandomInvert.__doc__.replace(
140
+ "{{base_image_preprocessing_color_example}}",
141
+ base_image_preprocessing_color_example.replace(
142
+ "{LayerName}", "RandomInvert"
143
+ ),
144
+ )
@@ -2,6 +2,9 @@ from keras.src.api_export import keras_export
2
2
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
3
3
  BaseImagePreprocessingLayer,
4
4
  )
5
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
6
+ base_image_preprocessing_transform_example,
7
+ )
5
8
  from keras.src.layers.preprocessing.image_preprocessing.bounding_boxes.converters import ( # noqa: E501
6
9
  clip_to_image_size,
7
10
  )
@@ -43,6 +46,9 @@ class RandomPerspective(BaseImagePreprocessingLayer):
43
46
  boundaries when `fill_mode="constant"`.
44
47
  seed: Integer. Used to create a random seed.
45
48
 
49
+ Example:
50
+
51
+ {{base_image_preprocessing_transform_example}}
46
52
  """
47
53
 
48
54
  _USE_BASE_FACTOR = False
@@ -337,3 +343,11 @@ class RandomPerspective(BaseImagePreprocessingLayer):
337
343
  "seed": self.seed,
338
344
  }
339
345
  return {**base_config, **config}
346
+
347
+
348
+ RandomPerspective.__doc__ = RandomPerspective.__doc__.replace(
349
+ "{{base_image_preprocessing_transform_example}}",
350
+ base_image_preprocessing_transform_example.replace(
351
+ "{LayerName}", "RandomPerspective"
352
+ ),
353
+ )
@@ -2,6 +2,9 @@ from keras.src.api_export import keras_export
2
2
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
3
3
  BaseImagePreprocessingLayer,
4
4
  )
5
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
6
+ base_image_preprocessing_color_example,
7
+ )
5
8
 
6
9
 
7
10
  @keras_export("keras.layers.RandomPosterization")
@@ -22,6 +25,10 @@ class RandomPosterization(BaseImagePreprocessingLayer):
22
25
  values within `value_range`. Defaults to `(0, 255)`.
23
26
  factor: integer, the number of bits to keep for each channel. Must be a
24
27
  value between 1-8.
28
+
29
+ Example:
30
+
31
+ {{base_image_preprocessing_color_example}}
25
32
  """
26
33
 
27
34
  _USE_BASE_FACTOR = False
@@ -152,3 +159,11 @@ class RandomPosterization(BaseImagePreprocessingLayer):
152
159
 
153
160
  def compute_output_shape(self, input_shape):
154
161
  return input_shape
162
+
163
+
164
+ RandomPosterization.__doc__ = RandomPosterization.__doc__.replace(
165
+ "{{base_image_preprocessing_color_example}}",
166
+ base_image_preprocessing_color_example.replace(
167
+ "{LayerName}", "RandomPosterization"
168
+ ),
169
+ )
@@ -2,6 +2,9 @@ from keras.src.api_export import keras_export
2
2
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
3
3
  BaseImagePreprocessingLayer,
4
4
  )
5
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
6
+ base_image_preprocessing_transform_example,
7
+ )
5
8
  from keras.src.layers.preprocessing.image_preprocessing.bounding_boxes import (
6
9
  converters,
7
10
  )
@@ -75,6 +78,10 @@ class RandomRotation(BaseImagePreprocessingLayer):
75
78
  `image_data_format` value found in your Keras config file at
76
79
  `~/.keras/keras.json`. If you never set it, then it will be
77
80
  `"channels_last"`.
81
+
82
+ Example:
83
+
84
+ {{base_image_preprocessing_transform_example}}
78
85
  """
79
86
 
80
87
  _SUPPORTED_FILL_MODE = ("reflect", "wrap", "constant", "nearest")
@@ -247,3 +254,11 @@ class RandomRotation(BaseImagePreprocessingLayer):
247
254
  }
248
255
  base_config = super().get_config()
249
256
  return {**base_config, **config}
257
+
258
+
259
+ RandomRotation.__doc__ = RandomRotation.__doc__.replace(
260
+ "{{base_image_preprocessing_transform_example}}",
261
+ base_image_preprocessing_transform_example.replace(
262
+ "{LayerName}", "RandomRotation"
263
+ ),
264
+ )
@@ -2,6 +2,9 @@ from keras.src.api_export import keras_export
2
2
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
3
3
  BaseImagePreprocessingLayer,
4
4
  )
5
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
6
+ base_image_preprocessing_color_example,
7
+ )
5
8
  from keras.src.random import SeedGenerator
6
9
 
7
10
 
@@ -32,6 +35,10 @@ class RandomSharpness(BaseImagePreprocessingLayer):
32
35
  typically either `[0, 1]` or `[0, 255]` depending on how your
33
36
  preprocessing pipeline is set up.
34
37
  seed: Integer. Used to create a random seed.
38
+
39
+ Example:
40
+
41
+ {{base_image_preprocessing_color_example}}
35
42
  """
36
43
 
37
44
  _USE_BASE_FACTOR = False
@@ -169,3 +176,11 @@ class RandomSharpness(BaseImagePreprocessingLayer):
169
176
 
170
177
  def compute_output_shape(self, input_shape):
171
178
  return input_shape
179
+
180
+
181
+ RandomSharpness.__doc__ = RandomSharpness.__doc__.replace(
182
+ "{{base_image_preprocessing_color_example}}",
183
+ base_image_preprocessing_color_example.replace(
184
+ "{LayerName}", "RandomSharpness"
185
+ ),
186
+ )
@@ -2,6 +2,9 @@ from keras.src.api_export import keras_export
2
2
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
3
3
  BaseImagePreprocessingLayer,
4
4
  )
5
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
6
+ base_image_preprocessing_transform_example,
7
+ )
5
8
  from keras.src.layers.preprocessing.image_preprocessing.bounding_boxes.converters import ( # noqa: E501
6
9
  clip_to_image_size,
7
10
  )
@@ -61,6 +64,10 @@ class RandomShear(BaseImagePreprocessingLayer):
61
64
  fill_value: A float representing the value to be filled outside the
62
65
  boundaries when `fill_mode="constant"`.
63
66
  seed: Integer. Used to create a random seed.
67
+
68
+ Example:
69
+
70
+ {{base_image_preprocessing_transform_example}}
64
71
  """
65
72
 
66
73
  _USE_BASE_FACTOR = False
@@ -402,3 +409,11 @@ class RandomShear(BaseImagePreprocessingLayer):
402
409
 
403
410
  def compute_output_shape(self, input_shape):
404
411
  return input_shape
412
+
413
+
414
+ RandomShear.__doc__ = RandomShear.__doc__.replace(
415
+ "{{base_image_preprocessing_transform_example}}",
416
+ base_image_preprocessing_transform_example.replace(
417
+ "{LayerName}", "RandomShear"
418
+ ),
419
+ )
@@ -2,6 +2,9 @@ from keras.src.api_export import keras_export
2
2
  from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
3
3
  BaseImagePreprocessingLayer,
4
4
  )
5
+ from keras.src.layers.preprocessing.image_preprocessing.base_image_preprocessing_layer import ( # noqa: E501
6
+ base_image_preprocessing_transform_example,
7
+ )
5
8
  from keras.src.layers.preprocessing.image_preprocessing.bounding_boxes.converters import ( # noqa: E501
6
9
  clip_to_image_size,
7
10
  )
@@ -87,6 +90,10 @@ class RandomTranslation(BaseImagePreprocessingLayer):
87
90
  `~/.keras/keras.json`. If you never set it, then it will be
88
91
  `"channels_last"`.
89
92
  **kwargs: Base layer keyword arguments, such as `name` and `dtype`.
93
+
94
+ Example:
95
+
96
+ {{base_image_preprocessing_transform_example}}
90
97
  """
91
98
 
92
99
  _USE_BASE_FACTOR = False
@@ -382,3 +389,11 @@ class RandomTranslation(BaseImagePreprocessingLayer):
382
389
  "data_format": self.data_format,
383
390
  }
384
391
  return {**base_config, **config}
392
+
393
+
394
+ RandomTranslation.__doc__ = RandomTranslation.__doc__.replace(
395
+ "{{base_image_preprocessing_transform_example}}",
396
+ base_image_preprocessing_transform_example.replace(
397
+ "{LayerName}", "RandomTranslation"
398
+ ),
399
+ )
keras/src/ops/numpy.py CHANGED
@@ -5064,6 +5064,62 @@ def moveaxis(x, source, destination):
5064
5064
  return backend.numpy.moveaxis(x, source=source, destination=destination)
5065
5065
 
5066
5066
 
5067
+ class Nanmin(Operation):
5068
+ def __init__(self, axis=None, keepdims=False, *, name=None):
5069
+ super().__init__(name=name)
5070
+ self.axis = axis
5071
+ self.keepdims = keepdims
5072
+
5073
+ def call(self, x):
5074
+ return backend.numpy.nanmin(x, axis=self.axis, keepdims=self.keepdims)
5075
+
5076
+ def compute_output_spec(self, x):
5077
+ dtype = dtypes.result_type(getattr(x, "dtype", backend.floatx()))
5078
+
5079
+ if backend.backend() == "torch" and dtype == "uint32":
5080
+ dtype = "int32"
5081
+
5082
+ return KerasTensor(
5083
+ reduce_shape(x.shape, axis=self.axis, keepdims=self.keepdims),
5084
+ dtype=dtype,
5085
+ )
5086
+
5087
+
5088
+ @keras_export(["keras.ops.nanmin", "keras.ops.numpy.nanmin"])
5089
+ def nanmin(x, axis=None, keepdims=False):
5090
+ """Minimum of a tensor over the given axes, ignoring NaNs.
5091
+
5092
+ Args:
5093
+ x: Input tensor.
5094
+ axis: Axis or axes along which the minimum is computed.
5095
+ The default is to compute the minimum of the flattened tensor.
5096
+ keepdims: If this is set to `True`, the axes which are reduced are left
5097
+ in the result as dimensions with size one.
5098
+
5099
+ Returns:
5100
+ Output tensor containing the minimum, with NaN values ignored. If all
5101
+ values along a reduced axis are NaN, the result is NaN.
5102
+
5103
+ Examples:
5104
+ >>> import numpy as np
5105
+ >>> from keras import ops
5106
+ >>> x = np.array([[1.0, np.nan, 3.0],
5107
+ ... [np.nan, 2.0, 1.0]])
5108
+ >>> ops.nanmin(x)
5109
+ 1.0
5110
+
5111
+ >>> ops.nanmin(x, axis=1)
5112
+ array([1., 1.])
5113
+
5114
+ >>> ops.nanmin(x, axis=1, keepdims=True)
5115
+ array([[1.],
5116
+ [1.]])
5117
+ """
5118
+ if any_symbolic_tensors((x,)):
5119
+ return Nanmin(axis=axis, keepdims=keepdims).symbolic_call(x)
5120
+ return backend.numpy.nanmin(x, axis=axis, keepdims=keepdims)
5121
+
5122
+
5067
5123
  class Nansum(Operation):
5068
5124
  def __init__(self, axis=None, keepdims=False, *, name=None):
5069
5125
  super().__init__(name=name)
@@ -45,8 +45,8 @@ class Regularizer:
45
45
  >>> out = layer(tensor)
46
46
 
47
47
  >>> # The kernel regularization term is 0.25
48
- >>> # The activity regularization term (after dividing by the batch size)
49
- >>> # is 5
48
+ >>> # The activity regularization term (after dividing by batch size of 5)
49
+ >>> # is 5.0
50
50
  >>> ops.sum(layer.losses)
51
51
  5.25
52
52
 
keras/src/version.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from keras.src.api_export import keras_export
2
2
 
3
3
  # Unique source of truth for the version number.
4
- __version__ = "3.14.0.dev2026012204"
4
+ __version__ = "3.14.0.dev2026012304"
5
5
 
6
6
 
7
7
  @keras_export("keras.version")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: keras-nightly
3
- Version: 3.14.0.dev2026012204
3
+ Version: 3.14.0.dev2026012304
4
4
  Summary: Multi-backend Keras
5
5
  Author-email: Keras team <keras-users@googlegroups.com>
6
6
  License: Apache License 2.0
@@ -45,11 +45,11 @@ keras/_tf_keras/keras/losses/__init__.py,sha256=xBc_KOtSLwp3h3CKQ0EnCuIy-Bsak2SP
45
45
  keras/_tf_keras/keras/metrics/__init__.py,sha256=_wF31PTvua5ahF9JEW4Hx1UVNjVCLqVI8J5JNrZCBf8,6546
46
46
  keras/_tf_keras/keras/mixed_precision/__init__.py,sha256=AM51CzHqzcY75tqdpQiuVcTRUEpUzBqeb-EfLeSDSV8,727
47
47
  keras/_tf_keras/keras/models/__init__.py,sha256=83pyA0pzytqin8JLV6FEbPreCb-V64ToebxFGrHsVdQ,501
48
- keras/_tf_keras/keras/ops/__init__.py,sha256=b95A91bWrAp3S61ui69zIwEJUMsFYVi90E5TfIX7MpE,15618
48
+ keras/_tf_keras/keras/ops/__init__.py,sha256=Exo66cXhtlICdHcwCb9W1aY_kyKGmu_E0gx2g8arqAA,15667
49
49
  keras/_tf_keras/keras/ops/image/__init__.py,sha256=oM_PLh5Jk9OGfi1bbJcfWkjoq0Ye5JQG9a7v_KzDfoc,1034
50
50
  keras/_tf_keras/keras/ops/linalg/__init__.py,sha256=0ab6icK3yuIm4khSfAksGRFLEAJhaOu6gGgarau4iEQ,822
51
51
  keras/_tf_keras/keras/ops/nn/__init__.py,sha256=2eD8IlkfBrsmJjHpzsxMM3_058oGeZVgohdBd27iDnI,2992
52
- keras/_tf_keras/keras/ops/numpy/__init__.py,sha256=jiEp6-gAl22y9Qzz1HS4T3VzDBlw4VViiXti65xkeAM,9680
52
+ keras/_tf_keras/keras/ops/numpy/__init__.py,sha256=Et9qtWaBOwKAOyJJcBnUjmu40MKJz6mRHtUXHdWdH-Q,9729
53
53
  keras/_tf_keras/keras/optimizers/__init__.py,sha256=1fx0vEB-oGu-9dumxoIvX4qVHdgJvf74OLyYoBkE2y0,1267
54
54
  keras/_tf_keras/keras/optimizers/legacy/__init__.py,sha256=uIMQESCV80Q0FY-9ikQUjXYPyZqmTfAM3dfohQ5DzYs,516
55
55
  keras/_tf_keras/keras/optimizers/schedules/__init__.py,sha256=pQF3rQiAPuUSTUdflTr-fpL77oyGIv9xzGdjae3M3kw,1120
@@ -111,11 +111,11 @@ keras/losses/__init__.py,sha256=VIXBHQFNdLUPZ7JuwtIKj_4E-xf2yvNyrmdklvjr_xM,3667
111
111
  keras/metrics/__init__.py,sha256=qeEwtqpSCAaCr8BMUv1eVaqJl2Zb83OB5K0BG3JB0nI,6245
112
112
  keras/mixed_precision/__init__.py,sha256=AM51CzHqzcY75tqdpQiuVcTRUEpUzBqeb-EfLeSDSV8,727
113
113
  keras/models/__init__.py,sha256=83pyA0pzytqin8JLV6FEbPreCb-V64ToebxFGrHsVdQ,501
114
- keras/ops/__init__.py,sha256=b95A91bWrAp3S61ui69zIwEJUMsFYVi90E5TfIX7MpE,15618
114
+ keras/ops/__init__.py,sha256=Exo66cXhtlICdHcwCb9W1aY_kyKGmu_E0gx2g8arqAA,15667
115
115
  keras/ops/image/__init__.py,sha256=oM_PLh5Jk9OGfi1bbJcfWkjoq0Ye5JQG9a7v_KzDfoc,1034
116
116
  keras/ops/linalg/__init__.py,sha256=0ab6icK3yuIm4khSfAksGRFLEAJhaOu6gGgarau4iEQ,822
117
117
  keras/ops/nn/__init__.py,sha256=2eD8IlkfBrsmJjHpzsxMM3_058oGeZVgohdBd27iDnI,2992
118
- keras/ops/numpy/__init__.py,sha256=jiEp6-gAl22y9Qzz1HS4T3VzDBlw4VViiXti65xkeAM,9680
118
+ keras/ops/numpy/__init__.py,sha256=Et9qtWaBOwKAOyJJcBnUjmu40MKJz6mRHtUXHdWdH-Q,9729
119
119
  keras/optimizers/__init__.py,sha256=1fx0vEB-oGu-9dumxoIvX4qVHdgJvf74OLyYoBkE2y0,1267
120
120
  keras/optimizers/legacy/__init__.py,sha256=uIMQESCV80Q0FY-9ikQUjXYPyZqmTfAM3dfohQ5DzYs,516
121
121
  keras/optimizers/schedules/__init__.py,sha256=pQF3rQiAPuUSTUdflTr-fpL77oyGIv9xzGdjae3M3kw,1120
@@ -128,7 +128,7 @@ keras/regularizers/__init__.py,sha256=542Shphw7W8h4Dyf2rmqMKUECVZ8IVBvN9g1LWhz-b
128
128
  keras/saving/__init__.py,sha256=KvL2GZxjvgFgEhvEnkvqjIR9JSNHKz-NWZacXajsjLI,1298
129
129
  keras/src/__init__.py,sha256=Gi4S7EiCMkE03PbdGNpFdaUYySWDs_FcAJ8Taz9Y1BE,684
130
130
  keras/src/api_export.py,sha256=gXOkBOnmscV013WAc75lc4Up01-Kkg9EylIAT_QWctg,1173
131
- keras/src/version.py,sha256=Vic_aSfTjaFpUnuLrDlE8M6IpjKMn3-PBqqD8ygIOls,204
131
+ keras/src/version.py,sha256=zrwbXx271qaOFPQlnpEWKXzwfgQbyWOQOcBlTEB9byM,204
132
132
  keras/src/activations/__init__.py,sha256=0nL3IFDB9unlrMz8ninKOWo-uCHasTUpTo1tXZb2u44,4433
133
133
  keras/src/activations/activations.py,sha256=mogPggtp4CGldI3VOPNmesRxp6EbiR1_i4KLGaVwzL8,17614
134
134
  keras/src/applications/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -171,7 +171,7 @@ keras/src/backend/jax/layer.py,sha256=o6CicT06udwamTRQIjNSDLZLyYHFzBXNbxewXgWe0i
171
171
  keras/src/backend/jax/linalg.py,sha256=LDaLZYz49ChE2kJR3YpaM9xuwusvd3krV7nNAAazTWA,2642
172
172
  keras/src/backend/jax/math.py,sha256=1IEDpdoF8e5ltu3D4wbDQuihzvJHhMXz8W9Z_E-eJqU,9391
173
173
  keras/src/backend/jax/nn.py,sha256=mrRawNvf9EWe8rdTwK_Auz6xdLkVG6hH0nIAP7hyUDE,60271
174
- keras/src/backend/jax/numpy.py,sha256=5C-obBCsAdY288BhjtxDIccqXDDle5eaP2yt7jfeUy8,38869
174
+ keras/src/backend/jax/numpy.py,sha256=e-EU_q5qbWL4tQAmuXgLTzBtAReMbkJ_lHtzmP2J634,38997
175
175
  keras/src/backend/jax/optimizer.py,sha256=5DeXQHcYmUI6F9i1m1VHn3sBt4LEStOeBXnKdESevLM,4134
176
176
  keras/src/backend/jax/random.py,sha256=Uk2huGIk_dlzMrx5eDVrrr2TeCEMitn2vr4yzA0NXjs,3594
177
177
  keras/src/backend/jax/rnn.py,sha256=Ycq0qfLY4M4jhltvztpLQyywjEM17T7CZQFh4hhHOUE,7767
@@ -186,7 +186,7 @@ keras/src/backend/numpy/layer.py,sha256=dTk7W7ql7vRgll7JbOXK5PlIhQw5VHdpSjKciHd8
186
186
  keras/src/backend/numpy/linalg.py,sha256=uzLTxEyuX_gDcnoA5Q59GdTg33py0WooKK5T6T9Td6c,2543
187
187
  keras/src/backend/numpy/math.py,sha256=HdkEA5ro7dtQBTP78GFIgqTFLgNQ49PXHhqI1vLRGfo,10169
188
188
  keras/src/backend/numpy/nn.py,sha256=P9JAnTlwSTI7bVv8WIv1pDQJHpjML_WJ0RsJWy-LJMc,46200
189
- keras/src/backend/numpy/numpy.py,sha256=W2P2A1_Y54xR07xmR-T4ALrqJA_SliDwmoOALeI37P0,38070
189
+ keras/src/backend/numpy/numpy.py,sha256=o71x6rkJgj1Mq2e0iJSIDq9v-xs_PFgskBioLc-_5cs,38168
190
190
  keras/src/backend/numpy/random.py,sha256=wx2nE75q7L2cBMjtQlQx8yKMj4Ie3puFMDQsbrZO8SA,3961
191
191
  keras/src/backend/numpy/rnn.py,sha256=thOsMung1qR3lQsR4_D6hqKMFollQgrB0KwsJLk4BMY,7867
192
192
  keras/src/backend/numpy/trainer.py,sha256=MzWr8_LLHa1P6fxdUWirGw_lQwHGF_vkZ7RUGLUzjUs,11126
@@ -198,7 +198,7 @@ keras/src/backend/openvino/layer.py,sha256=5RdvaH1yOyPAphjKiuQAK1H_yZFYKE1Hp7c5b
198
198
  keras/src/backend/openvino/linalg.py,sha256=L6a4MFGND2wWzPVCh44cwuOgkcC4wJTo8Xy3HwW04lg,1614
199
199
  keras/src/backend/openvino/math.py,sha256=qw9kX2sJ2qr0dBJF12Ey0E2GcwixPUqoev6UcNra4NI,3944
200
200
  keras/src/backend/openvino/nn.py,sha256=zULPxdwVO7JDZUUtsuoEEPCLQ09ew8z8T6G_i_NEqrM,23741
201
- keras/src/backend/openvino/numpy.py,sha256=Dc3tdpAEmNN78c0Q0lI5utFfDWt-hZiH4IChE52UhOU,108532
201
+ keras/src/backend/openvino/numpy.py,sha256=D1ALQlwjLfIUnWslmUbDtvmqCUF4Hy_zYoSPV_O1PIc,109841
202
202
  keras/src/backend/openvino/random.py,sha256=4hRUtIP6qJxO3Qy9uH1x6jSuJna3nWPdUf4x2QU8-ew,5575
203
203
  keras/src/backend/openvino/rnn.py,sha256=ErmuZLPSgG9qU-NfYPPvBZ6Ysy8k-fA4g19Vhqq7OVQ,866
204
204
  keras/src/backend/openvino/trainer.py,sha256=bMmtSALqydqdS6ke-5sYW5fgxZDshDH810p_C0xCRTg,9087
@@ -211,7 +211,7 @@ keras/src/backend/tensorflow/layer.py,sha256=69d40LwL4HhKRsCjj1VRpjfrQXXF8VV3vh0
211
211
  keras/src/backend/tensorflow/linalg.py,sha256=_lZVfdY1tFvrN7xwbt3INGoTR0yC5v-kI1Q0XppVibY,8773
212
212
  keras/src/backend/tensorflow/math.py,sha256=zTu_7Ff6B2Ro862z_xH0OCmIWbV74DjsO5UnfjYuOUQ,12370
213
213
  keras/src/backend/tensorflow/nn.py,sha256=6vtZHzUED6_blUPE1Tnc3GAxPpJ2ebxoaiMn80tTL9k,51328
214
- keras/src/backend/tensorflow/numpy.py,sha256=I5S0igFo2Mq3Q0SodRyNggip9F_gwWfch6TyvVbQj_E,105076
214
+ keras/src/backend/tensorflow/numpy.py,sha256=j_EuTLDE8mgJSZuCt7yWHZUbvwQz3T-ZksSzCP3cl4s,105695
215
215
  keras/src/backend/tensorflow/optimizer.py,sha256=kFlyEOnGjEYdLpd8mpwhUeku78__xBfZbbrDWpJrq60,9307
216
216
  keras/src/backend/tensorflow/random.py,sha256=iO8V_soaDXZm9ewyAVbjudhsMj08C348c9Bz64nxXC4,6475
217
217
  keras/src/backend/tensorflow/rnn.py,sha256=JbOSpt48cm612c7YwiTYOQCQsNXyI_6QeRhtUn8qEvM,34829
@@ -227,7 +227,7 @@ keras/src/backend/torch/layer.py,sha256=htECdpv9ioHWM8_zqQkEdxgDsgLu8XJi5yXgnLl-
227
227
  keras/src/backend/torch/linalg.py,sha256=wgPCfnscp5HOBmX9_-m-57lzxs1ttLNzmHqj2VYYq7k,2108
228
228
  keras/src/backend/torch/math.py,sha256=g-ElDii2Y_o1-t6BAu2nbS7JH-aPqVS5Fqds8aYzIlg,14324
229
229
  keras/src/backend/torch/nn.py,sha256=zmEzXEuwD7fVRDm145zsxzUDmqNmRgZS4LmeIx4Nbus,37498
230
- keras/src/backend/torch/numpy.py,sha256=zZDkUDmph1c_D0VOsSzkYjAj4TKln7laDxypVRBsZ6o,58072
230
+ keras/src/backend/torch/numpy.py,sha256=JeMDIOubCyLqMqid1xg4CQ0Nm4gf5F7WLNggaiTviuE,58582
231
231
  keras/src/backend/torch/random.py,sha256=YhLfC7qkGpzlU_i6gGPVormo3BMSo7OUA3TC3GCehrA,8292
232
232
  keras/src/backend/torch/rnn.py,sha256=MJIVbHKsUA2dZm4Gu2NvRxlrFCWeWSxSZRmFxSsC3Zg,26041
233
233
  keras/src/backend/torch/trainer.py,sha256=dcikz1c5O0FHNzRKSi6WhIHsHfLV2HDlrXPElSd1cgE,17985
@@ -295,7 +295,7 @@ keras/src/initializers/initializer.py,sha256=kNAyRA8CzBdtknT6ZUt5XIO2_Z9NzpN119C
295
295
  keras/src/initializers/random_initializers.py,sha256=AuUeQ3YZGakDKTCs8njQLhozE6iWYHwP6-VstnEMOaQ,23631
296
296
  keras/src/layers/__init__.py,sha256=s7jrOesk0YMUKCxe5BTdQ5cxqrnkYbA-GWRoCXuqpsg,12103
297
297
  keras/src/layers/input_spec.py,sha256=cjBUBmgdneJfhvbI-WLqSapJInCsxliWBygyfMWgkj4,10010
298
- keras/src/layers/layer.py,sha256=Nbs9ke8ecAhTffiHyZ2cJUIt-3yaJb5fcjXKJAnOCHE,79634
298
+ keras/src/layers/layer.py,sha256=uBgdpYjGcdvjAFN4hjd-li7A4UM5Xw3Z-WHm0FdrqvM,80143
299
299
  keras/src/layers/activations/__init__.py,sha256=MhPBye8WWLSf_iDel3BuuqYk4nx6Sym8s4dZKb1KTqQ,272
300
300
  keras/src/layers/activations/activation.py,sha256=c_Q5gUjCTD70a9-I1m5eEPcrWPpE-5iAlkDMt4lxRgA,1287
301
301
  keras/src/layers/activations/elu.py,sha256=jtszCDe6Cs_L3jITK3ascKouqgYUxdbGvT60kxQbcHM,840
@@ -389,34 +389,34 @@ keras/src/layers/preprocessing/stft_spectrogram.py,sha256=D92Gsbx4chANl2xLPXBCSK
389
389
  keras/src/layers/preprocessing/string_lookup.py,sha256=OIkPV7DZbX8rMf2J95bPBoFcaxso7_1yDnpjBJFIZ4M,18495
390
390
  keras/src/layers/preprocessing/text_vectorization.py,sha256=p1uubjplFyPo5yOnNJXtG9Vg0GJMQTJucUGljf3FROM,28161
391
391
  keras/src/layers/preprocessing/image_preprocessing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
392
- keras/src/layers/preprocessing/image_preprocessing/aug_mix.py,sha256=spvWYUG6GcPrYZgedaE8LIwTbYE2yvPg2Hwao9UAang,11221
392
+ keras/src/layers/preprocessing/image_preprocessing/aug_mix.py,sha256=Z6mpMF8QYP3gbfdeWaM6Rw6rzyBkK7MP369SJOnigCQ,11627
393
393
  keras/src/layers/preprocessing/image_preprocessing/auto_contrast.py,sha256=gY7hmXXVTO15dswR8ISf9h_gox4zDSDih2owjzb7WmE,3930
394
- keras/src/layers/preprocessing/image_preprocessing/base_image_preprocessing_layer.py,sha256=Ga1Wewc0Pl9uLGUp3x6dxS2j4Lh-1o7TaOtxxo9kf5o,13853
394
+ keras/src/layers/preprocessing/image_preprocessing/base_image_preprocessing_layer.py,sha256=cn_ieMUfz884kCAmAstdZtNcaO4vtJ9n4Q7uD-TR8XA,15249
395
395
  keras/src/layers/preprocessing/image_preprocessing/center_crop.py,sha256=Pi9GlYTo7kZbfZpfF1FUwkwi0y9v8PcQYQAurixHaeU,9965
396
- keras/src/layers/preprocessing/image_preprocessing/cut_mix.py,sha256=reDSKzm15J7TR5TLrx92mWE-os2H6X0jY2Pd_ra_i_E,7877
396
+ keras/src/layers/preprocessing/image_preprocessing/cut_mix.py,sha256=v3emau6I3pbOMCmaOk0k3HS3XNT2BkL9HM0sx0UT-EI,8267
397
397
  keras/src/layers/preprocessing/image_preprocessing/equalization.py,sha256=Q6URzVSxTxcd166oNFJsVlNO3x8EUMS0plqthDwKzu4,8659
398
- keras/src/layers/preprocessing/image_preprocessing/max_num_bounding_box.py,sha256=BTQaWjx-bMnwtsQDQLmeohs_VQECu1WZzPmi2PkDYHs,3435
398
+ keras/src/layers/preprocessing/image_preprocessing/max_num_bounding_box.py,sha256=szo2mxzLWbHfMDcNpKjg_7q-xPc8aZjLHRxp-DG8bEk,3938
399
399
  keras/src/layers/preprocessing/image_preprocessing/mix_up.py,sha256=wQOq7pmMUmUPUsYyoORkCKzxEZJGRssi5kM7Y5RIwbo,6651
400
- keras/src/layers/preprocessing/image_preprocessing/rand_augment.py,sha256=upDdEgg4IXIGH-jTqjOabHXq8X84g-OtmTbnrFk08ew,8893
400
+ keras/src/layers/preprocessing/image_preprocessing/rand_augment.py,sha256=qfxMaJeLE_7QlMSq1Mjw_1pK59bFZylWRp-GfsqFtlk,9328
401
401
  keras/src/layers/preprocessing/image_preprocessing/random_brightness.py,sha256=Ix01T1xsbf_QknyWcSlK1SxVPvFNtHw20xmWHhuQPZI,6083
402
- keras/src/layers/preprocessing/image_preprocessing/random_color_degeneration.py,sha256=N6rCXPhWCEh-xWqC9ETYwrbJ2f6lIqyCR9Z18uV3xd0,4896
403
- keras/src/layers/preprocessing/image_preprocessing/random_color_jitter.py,sha256=rbQvLhCPPXyAaYfcMiVzyN0yvfFrcfbRbkVruO9o38U,9464
404
- keras/src/layers/preprocessing/image_preprocessing/random_contrast.py,sha256=eJ7aakES1YfSv1JXjv8ZT3ltTqgG6Oo1_XU6BopKDng,5470
405
- keras/src/layers/preprocessing/image_preprocessing/random_crop.py,sha256=y2iHw-xbSV11uK4D34VT9QEkpvKOk-D-TmVSCZUjDn0,10553
406
- keras/src/layers/preprocessing/image_preprocessing/random_elastic_transform.py,sha256=fIfPe-906LUhTUDpiuPwM5oEOJ_1UQ9BhMHBFpItcGM,10208
407
- keras/src/layers/preprocessing/image_preprocessing/random_erasing.py,sha256=O7f44V805Wta9RMZyks4sl-LViglTCdp7_n-qj_nWbI,11233
408
- keras/src/layers/preprocessing/image_preprocessing/random_flip.py,sha256=Lmnbm-RX58fw34n55rjMYcnuJkg1B7xqG8_L2dYVMOQ,8057
409
- keras/src/layers/preprocessing/image_preprocessing/random_gaussian_blur.py,sha256=8QHtZvMEayvi22iPbqBCJriZ8lwLMKtM5LoupJVziak,7713
410
- keras/src/layers/preprocessing/image_preprocessing/random_grayscale.py,sha256=6yfwMky9QJxEmF1lW-RICPq9nLT0fcfPnlIc-jreOQI,4525
402
+ keras/src/layers/preprocessing/image_preprocessing/random_color_degeneration.py,sha256=okHRWZzIpcN20X19FOl4gibunk2NywRIjBKbysdloGc,5351
403
+ keras/src/layers/preprocessing/image_preprocessing/random_color_jitter.py,sha256=gyDuZjvu0MS1czEd2UgnMr7JRBnv7pY7I4JIGlrc3Bs,9901
404
+ keras/src/layers/preprocessing/image_preprocessing/random_contrast.py,sha256=tVNj7ZymKvjk8627LiohGr6D-2vqAwysD4x6R0yN2aI,5898
405
+ keras/src/layers/preprocessing/image_preprocessing/random_crop.py,sha256=sCyUCTEllkcHpMW5d2bsrCjtrXHnBTP--NDLrXbHOsw,10985
406
+ keras/src/layers/preprocessing/image_preprocessing/random_elastic_transform.py,sha256=Qr7NTxZo64qIA0eggbtdEaO5F3ZptTVb2bBd_6pncXE,10675
407
+ keras/src/layers/preprocessing/image_preprocessing/random_erasing.py,sha256=SJGh_uzZ5cA8QgGCLnRWKiigwD_NZg_3LVztq4UXeBY,11658
408
+ keras/src/layers/preprocessing/image_preprocessing/random_flip.py,sha256=x5gmcmIaTekD2s3BcQgQA3hd2RQO8scLkxe-u9KQ2A8,8489
409
+ keras/src/layers/preprocessing/image_preprocessing/random_gaussian_blur.py,sha256=PRjcmmXeJymHip_tbs5sQOB6_2hrqWKcHrWcon-9cyA,8153
410
+ keras/src/layers/preprocessing/image_preprocessing/random_grayscale.py,sha256=AQtfK7glY8n5dCId2KFlESd9zXYxxUPIG99inYh8Sh0,4956
411
411
  keras/src/layers/preprocessing/image_preprocessing/random_hue.py,sha256=wL9Nc3sogNs-zjXtuW55jqqcVE69fTRQo8HNJ-oZpVI,6466
412
- keras/src/layers/preprocessing/image_preprocessing/random_invert.py,sha256=Y_yPzcf9H9rUgk6G9_XMNTqdwVKNBN8xvLygfwQVOkU,4371
413
- keras/src/layers/preprocessing/image_preprocessing/random_perspective.py,sha256=SZofshsQk_QDprFi0Sb1coaabWvaWk07mOC0x8ePjIU,11747
414
- keras/src/layers/preprocessing/image_preprocessing/random_posterization.py,sha256=msyMgi7Gi_dHpfBkOFadcMHL8dW4iCzxR-neh3iRc4Y,5167
415
- keras/src/layers/preprocessing/image_preprocessing/random_rotation.py,sha256=pKVg1Jvqu1zJhM3ewnvFyC8plHJCKkaCDD2hp1bRTMg,9635
412
+ keras/src/layers/preprocessing/image_preprocessing/random_invert.py,sha256=6FcvSFiaydAbUoMJ-pQT1UJq5l_a9DLk8qLGsiVF7ew,4793
413
+ keras/src/layers/preprocessing/image_preprocessing/random_perspective.py,sha256=_mEAGNeyYOdGTI7quUZ82thlSxFdA_TsDuL9c4jqWpg,12199
414
+ keras/src/layers/preprocessing/image_preprocessing/random_posterization.py,sha256=riEGot-yz9BZhYjN_FsJYzKNGpLnJIZnqL8wzLlCAwM,5610
415
+ keras/src/layers/preprocessing/image_preprocessing/random_rotation.py,sha256=OhtkfgmOcQjFAWlGcfw7VdiACe6ovMhsY99qG0LgVUU,10079
416
416
  keras/src/layers/preprocessing/image_preprocessing/random_saturation.py,sha256=ShsqSjkazbg9kJU8e3k6ko2ytN5ZCdAA_Ol9ku4uwOs,6105
417
- keras/src/layers/preprocessing/image_preprocessing/random_sharpness.py,sha256=4RGEEtorNlOyQXNvXMga08TxsaynayD6Ksu-eAzkm8U,6152
418
- keras/src/layers/preprocessing/image_preprocessing/random_shear.py,sha256=uEr1iCCAHdpIhAVz2VZh7u82NEYtiM9eMIhvvIQyA9A,15020
419
- keras/src/layers/preprocessing/image_preprocessing/random_translation.py,sha256=1l1Oufpsu54SunSrrBb-nq6cM9ANehgHJxWx40sTPig,14932
417
+ keras/src/layers/preprocessing/image_preprocessing/random_sharpness.py,sha256=Hh7fBYdxedJdsod4y0gnFJSsuL-7AaUqzxEGwamjlhU,6583
418
+ keras/src/layers/preprocessing/image_preprocessing/random_shear.py,sha256=2KLTCjDdPc_rJvC2IQm45OMsqogp8Ep3-P_1MDtAOvc,15455
419
+ keras/src/layers/preprocessing/image_preprocessing/random_translation.py,sha256=W7ctCMjKXSl7nD7Lva6j-1brD-Drj_ZheNMq5k6UOf4,15385
420
420
  keras/src/layers/preprocessing/image_preprocessing/random_zoom.py,sha256=DBDSep-CGk-lsWP0gwP89SQR2k8-ZjYqKKj0rf-KWWA,16472
421
421
  keras/src/layers/preprocessing/image_preprocessing/resizing.py,sha256=N3_Mw4KA-DC7R0zBNeRnCQWvzKa8Bpg1jRooUJSZLq0,12241
422
422
  keras/src/layers/preprocessing/image_preprocessing/solarization.py,sha256=URBAHjCIRs8mlb1RCt39pHtylRgZuhxC7kFtACsGIbc,8015
@@ -504,7 +504,7 @@ keras/src/ops/linalg.py,sha256=3V8S_cgNxZZCIFcFj-FBHTdRqWNbimDtumMvfoc0f30,26736
504
504
  keras/src/ops/math.py,sha256=4qYMJ5qAPmeSyeF63YWoGbUkQt6f4_VX0enOChU4mXU,37233
505
505
  keras/src/ops/nn.py,sha256=04gjHB2BWusy4tWm59EO5Ns1paJC5umDNGwNCKzaJWQ,104658
506
506
  keras/src/ops/node.py,sha256=aJgn9D-GkteE--Bbt2cZ9JjVxb2W2uS1OWEKoeLsl3Y,5583
507
- keras/src/ops/numpy.py,sha256=c5jXbWiE5jrGh1AteL3XsSgs1wNrpNUKxmTdThpNh-0,259129
507
+ keras/src/ops/numpy.py,sha256=6-nCfjwd4y0oWLoL72ZTmyDu-kNLWdNlL4KDr6TsqC8,260893
508
508
  keras/src/ops/operation.py,sha256=A7sh9Hi6kZb7wkeMmhrDQIq770ofANXuP-Qg-kwCM3o,15485
509
509
  keras/src/ops/operation_utils.py,sha256=C6eThl-haKzlDH0fC1rn5-P1P-pCfIfXs-fy-ADR534,14523
510
510
  keras/src/ops/symbolic_arguments.py,sha256=MKwXxZYkyouD9BPmQ1uUNxILdcwPvTayAqXaUV3P3o4,1628
@@ -541,7 +541,7 @@ keras/src/random/__init__.py,sha256=BmXVYPzxbhADohoLtAEEzB3cesP7YBFDsp1qc6BWWlg,
541
541
  keras/src/random/random.py,sha256=bUADZIVDuCghwIWTk0qBxXTxUdiNGWIdsRi8QJ3ePg4,17581
542
542
  keras/src/random/seed_generator.py,sha256=-a0CQa7--Xt0g0nfdjLmUzlFElY9Y838VcCx05AcllY,5655
543
543
  keras/src/regularizers/__init__.py,sha256=GzK9FTKL2Xxd5H55GfG9gxDqt4eZoVHFWICgb2VW8qM,1731
544
- keras/src/regularizers/regularizers.py,sha256=urXNmMGuqHT7lOmS-yQPl3At3Ny-37Xlo389ErCg84A,11799
544
+ keras/src/regularizers/regularizers.py,sha256=MDtsiFjLgI1sl9z036XcQhZH9OnUmMHM74l27dspum0,11802
545
545
  keras/src/saving/__init__.py,sha256=vnrtfvnzW7Gwtxe5COhaMoEnVYB5iDe2YlqJ-DvqFIk,614
546
546
  keras/src/saving/file_editor.py,sha256=tsUo9mQbMa8433tHTnOKWFhDeathYwDb0CeWcDTTTBQ,32089
547
547
  keras/src/saving/keras_saveable.py,sha256=aGIt1ajtsaamfUq18LM6ql8JEoQzi3HwzJEuwQ9bmKE,1285
@@ -618,7 +618,7 @@ keras/utils/bounding_boxes/__init__.py,sha256=jtvQll4u8ZY0Z96HwNhP1nxWEG9FM3gI-6
618
618
  keras/utils/legacy/__init__.py,sha256=oSYZz6uS8UxSElRaaJYWJEoweJ4GAasZjnn7fNaOlog,342
619
619
  keras/visualization/__init__.py,sha256=UKWmiy6sps4SWlmQi9WX8_Z53cPpLlphz2zIeHdwJpQ,722
620
620
  keras/wrappers/__init__.py,sha256=QkS-O5K8qGS7C3sytF8MpmO6PasATpNVGF8qtb7Ojsw,407
621
- keras_nightly-3.14.0.dev2026012204.dist-info/METADATA,sha256=FPFm1FPTR_fMzfJqiBWR_IOX_YMPs8xoUhjn4gCxO_I,6339
622
- keras_nightly-3.14.0.dev2026012204.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
623
- keras_nightly-3.14.0.dev2026012204.dist-info/top_level.txt,sha256=ptcw_-QuGZ4ZDjMdwi_Z0clZm8QAqFdvzzFnDEOTs9o,6
624
- keras_nightly-3.14.0.dev2026012204.dist-info/RECORD,,
621
+ keras_nightly-3.14.0.dev2026012304.dist-info/METADATA,sha256=WLjmq7_YlxrYTWWRdBTiOboC4zCj0j5pKexkiZZ6C6Y,6339
622
+ keras_nightly-3.14.0.dev2026012304.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
623
+ keras_nightly-3.14.0.dev2026012304.dist-info/top_level.txt,sha256=ptcw_-QuGZ4ZDjMdwi_Z0clZm8QAqFdvzzFnDEOTs9o,6
624
+ keras_nightly-3.14.0.dev2026012304.dist-info/RECORD,,