tf-keras-nightly 2.19.0.dev2025010810__py3-none-any.whl → 2.19.0.dev2025011010__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tf_keras/__init__.py CHANGED
@@ -27,4 +27,4 @@ from tf_keras.src.engine.sequential import Sequential
27
27
  from tf_keras.src.engine.training import Model
28
28
 
29
29
 
30
- __version__ = "2.19.0.dev2025010810"
30
+ __version__ = "2.19.0.dev2025011010"
@@ -14,6 +14,8 @@
14
14
  # ==============================================================================
15
15
  """Boston housing price regression dataset."""
16
16
 
17
+ import os
18
+
17
19
  import numpy as np
18
20
 
19
21
  from tf_keras.src.utils.data_utils import get_file
@@ -23,7 +25,9 @@ from tensorflow.python.util.tf_export import keras_export
23
25
 
24
26
 
25
27
  @keras_export("keras.datasets.boston_housing.load_data")
26
- def load_data(path="boston_housing.npz", test_split=0.2, seed=113):
28
+ def load_data(
29
+ path="boston_housing.npz", test_split=0.2, seed=113, cache_dir=None
30
+ ):
27
31
  """Loads the Boston Housing dataset.
28
32
 
29
33
  This is a dataset taken from the StatLib library which is maintained at
@@ -43,11 +47,12 @@ def load_data(path="boston_housing.npz", test_split=0.2, seed=113):
43
47
  [StatLib website](http://lib.stat.cmu.edu/datasets/boston).
44
48
 
45
49
  Args:
46
- path: path where to cache the dataset locally
47
- (relative to `~/.keras/datasets`).
50
+ path: path where to cache the dataset locally (relative to
51
+ `~/.keras/datasets`).
48
52
  test_split: fraction of the data to reserve as test set.
49
- seed: Random seed for shuffling the data
50
- before computing the test split.
53
+ seed: Random seed for shuffling the data before computing the test split.
54
+ cache_dir: directory where to cache the dataset locally. When None,
55
+ defaults to `~/.keras/datasets`.
51
56
 
52
57
  Returns:
53
58
  Tuple of Numpy arrays: `(x_train, y_train), (x_test, y_test)`.
@@ -64,12 +69,16 @@ def load_data(path="boston_housing.npz", test_split=0.2, seed=113):
64
69
  origin_folder = (
65
70
  "https://storage.googleapis.com/tensorflow/tf-keras-datasets/"
66
71
  )
72
+ if cache_dir:
73
+ cache_dir = os.path.expanduser(cache_dir)
74
+ os.makedirs(cache_dir, exist_ok=True)
67
75
  path = get_file(
68
76
  path,
69
77
  origin=origin_folder + "boston_housing.npz",
70
78
  file_hash=( # noqa: E501
71
79
  "f553886a1f8d56431e820c5b82552d9d95cfcb96d1e678153f8839538947dff5"
72
80
  ),
81
+ cache_dir=cache_dir,
73
82
  )
74
83
  with np.load(path, allow_pickle=True) as f:
75
84
  x = f["x"]
@@ -27,7 +27,7 @@ from tensorflow.python.util.tf_export import keras_export
27
27
 
28
28
 
29
29
  @keras_export("keras.datasets.cifar10.load_data")
30
- def load_data():
30
+ def load_data(cache_dir=None):
31
31
  """Loads the CIFAR10 dataset.
32
32
 
33
33
  This is a dataset of 50,000 32x32 color training images and 10,000 test
@@ -49,6 +49,10 @@ def load_data():
49
49
  | 8 | ship |
50
50
  | 9 | truck |
51
51
 
52
+ Args:
53
+ cache_dir: directory where to cache the dataset locally. When None,
54
+ defaults to `~/.keras/datasets`.
55
+
52
56
  Returns:
53
57
  Tuple of NumPy arrays: `(x_train, y_train), (x_test, y_test)`.
54
58
 
@@ -78,6 +82,9 @@ def load_data():
78
82
  """
79
83
  dirname = "cifar-10-batches-py"
80
84
  origin = "https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz"
85
+ if cache_dir:
86
+ cache_dir = os.path.expanduser(cache_dir)
87
+ os.makedirs(cache_dir, exist_ok=True)
81
88
  path = get_file(
82
89
  dirname,
83
90
  origin=origin,
@@ -85,6 +92,7 @@ def load_data():
85
92
  file_hash=( # noqa: E501
86
93
  "6d958be074577803d12ecdefd02955f39262c83c16fe9348329d7fe0b5c001ce"
87
94
  ),
95
+ cache_dir=cache_dir,
88
96
  )
89
97
 
90
98
  num_train_samples = 50000
@@ -27,7 +27,7 @@ from tensorflow.python.util.tf_export import keras_export
27
27
 
28
28
 
29
29
  @keras_export("keras.datasets.cifar100.load_data")
30
- def load_data(label_mode="fine"):
30
+ def load_data(label_mode="fine", cache_dir=None):
31
31
  """Loads the CIFAR100 dataset.
32
32
 
33
33
  This is a dataset of 50,000 32x32 color training images and
@@ -39,6 +39,8 @@ def load_data(label_mode="fine"):
39
39
  label_mode: one of "fine", "coarse". If it is "fine" the category labels
40
40
  are the fine-grained labels, if it is "coarse" the output labels are the
41
41
  coarse-grained superclasses.
42
+ cache_dir: directory where to cache the dataset locally. When None,
43
+ defaults to `~/.keras/datasets`.
42
44
 
43
45
  Returns:
44
46
  Tuple of NumPy arrays: `(x_train, y_train), (x_test, y_test)`.
@@ -75,6 +77,9 @@ def load_data(label_mode="fine"):
75
77
 
76
78
  dirname = "cifar-100-python"
77
79
  origin = "https://www.cs.toronto.edu/~kriz/cifar-100-python.tar.gz"
80
+ if cache_dir:
81
+ cache_dir = os.path.expanduser(cache_dir)
82
+ os.makedirs(cache_dir, exist_ok=True)
78
83
  path = get_file(
79
84
  dirname,
80
85
  origin=origin,
@@ -82,6 +87,7 @@ def load_data(label_mode="fine"):
82
87
  file_hash=( # noqa: E501
83
88
  "85cd44d02ba6437773c5bbd22e183051d648de2e7d6b014e1ef29b855ba677a7"
84
89
  ),
90
+ cache_dir=cache_dir,
85
91
  )
86
92
 
87
93
  fpath = os.path.join(path, "train")
@@ -26,7 +26,7 @@ from tensorflow.python.util.tf_export import keras_export
26
26
 
27
27
 
28
28
  @keras_export("keras.datasets.fashion_mnist.load_data")
29
- def load_data():
29
+ def load_data(cache_dir=None):
30
30
  """Loads the Fashion-MNIST dataset.
31
31
 
32
32
  This is a dataset of 60,000 28x28 grayscale images of 10 fashion categories,
@@ -48,6 +48,10 @@ def load_data():
48
48
  | 8 | Bag |
49
49
  | 9 | Ankle boot |
50
50
 
51
+ Args:
52
+ cache_dir: directory where to cache the dataset locally. When None,
53
+ defaults to `~/.keras/datasets`.
54
+
51
55
  Returns:
52
56
  Tuple of NumPy arrays: `(x_train, y_train), (x_test, y_test)`.
53
57
 
@@ -77,7 +81,6 @@ def load_data():
77
81
  The copyright for Fashion-MNIST is held by Zalando SE.
78
82
  Fashion-MNIST is licensed under the [MIT license](
79
83
  https://github.com/zalandoresearch/fashion-mnist/blob/master/LICENSE).
80
-
81
84
  """
82
85
  dirname = os.path.join("datasets", "fashion-mnist")
83
86
  base = "https://storage.googleapis.com/tensorflow/tf-keras-datasets/"
@@ -87,10 +90,19 @@ def load_data():
87
90
  "t10k-labels-idx1-ubyte.gz",
88
91
  "t10k-images-idx3-ubyte.gz",
89
92
  ]
90
-
93
+ if cache_dir:
94
+ cache_dir = os.path.expanduser(cache_dir)
95
+ os.makedirs(cache_dir, exist_ok=True)
91
96
  paths = []
92
97
  for fname in files:
93
- paths.append(get_file(fname, origin=base + fname, cache_subdir=dirname))
98
+ paths.append(
99
+ get_file(
100
+ fname,
101
+ origin=base + fname,
102
+ cache_dir=cache_dir,
103
+ cache_subdir=dirname,
104
+ )
105
+ )
94
106
 
95
107
  with gzip.open(paths[0], "rb") as lbpath:
96
108
  y_train = np.frombuffer(lbpath.read(), np.uint8, offset=8)
@@ -15,6 +15,7 @@
15
15
  """IMDB sentiment classification dataset."""
16
16
 
17
17
  import json
18
+ import os
18
19
 
19
20
  import numpy as np
20
21
 
@@ -36,6 +37,7 @@ def load_data(
36
37
  start_char=1,
37
38
  oov_char=2,
38
39
  index_from=3,
40
+ cache_dir=None,
39
41
  **kwargs,
40
42
  ):
41
43
  """Loads the [IMDB dataset](https://ai.stanford.edu/~amaas/data/sentiment/).
@@ -73,6 +75,8 @@ def load_data(
73
75
  Words that were cut out because of the `num_words` or
74
76
  `skip_top` limits will be replaced with this character.
75
77
  index_from: int. Index actual words with this index and higher.
78
+ cache_dir: directory where to cache the dataset locally. When None,
79
+ defaults to `~/.keras/datasets`.
76
80
  **kwargs: Used for backwards compatibility.
77
81
 
78
82
  Returns:
@@ -108,12 +112,16 @@ def load_data(
108
112
  origin_folder = (
109
113
  "https://storage.googleapis.com/tensorflow/tf-keras-datasets/"
110
114
  )
115
+ if cache_dir:
116
+ cache_dir = os.path.expanduser(cache_dir)
117
+ os.makedirs(cache_dir, exist_ok=True)
111
118
  path = get_file(
112
119
  path,
113
120
  origin=origin_folder + "imdb.npz",
114
121
  file_hash=( # noqa: E501
115
122
  "69664113be75683a8fe16e3ed0ab59fda8886cb3cd7ada244f7d9544e4676b9f"
116
123
  ),
124
+ cache_dir=cache_dir,
117
125
  )
118
126
  with np.load(path, allow_pickle=True) as f:
119
127
  x_train, labels_train = f["x_train"], f["y_train"]
@@ -13,6 +13,7 @@
13
13
  # limitations under the License.
14
14
  # ==============================================================================
15
15
  """MNIST handwritten digits dataset."""
16
+ import os
16
17
 
17
18
  import numpy as np
18
19
 
@@ -23,7 +24,7 @@ from tensorflow.python.util.tf_export import keras_export
23
24
 
24
25
 
25
26
  @keras_export("keras.datasets.mnist.load_data")
26
- def load_data(path="mnist.npz"):
27
+ def load_data(path="mnist.npz", cache_dir=None):
27
28
  """Loads the MNIST dataset.
28
29
 
29
30
  This is a dataset of 60,000 28x28 grayscale images of the 10 digits,
@@ -32,8 +33,9 @@ def load_data(path="mnist.npz"):
32
33
  [MNIST homepage](http://yann.lecun.com/exdb/mnist/).
33
34
 
34
35
  Args:
35
- path: path where to cache the dataset locally
36
- (relative to `~/.keras/datasets`).
36
+ path: path where to cache the dataset locally relative to cache_dir.
37
+ cache_dir: dir location where to cache the dataset locally. When None,
38
+ defaults to `~/.keras/datasets`.
37
39
 
38
40
  Returns:
39
41
  Tuple of NumPy arrays: `(x_train, y_train), (x_test, y_test)`.
@@ -72,12 +74,16 @@ def load_data(path="mnist.npz"):
72
74
  origin_folder = (
73
75
  "https://storage.googleapis.com/tensorflow/tf-keras-datasets/"
74
76
  )
77
+ if cache_dir:
78
+ cache_dir = os.path.expanduser(cache_dir)
79
+ os.makedirs(cache_dir, exist_ok=True)
75
80
  path = get_file(
76
81
  path,
77
82
  origin=origin_folder + "mnist.npz",
78
83
  file_hash=( # noqa: E501
79
84
  "731c5ac602752760c8e48fbffcf8c3b850d9dc2a2aedcf2cc48468fc17b673d1"
80
85
  ),
86
+ cache_dir=cache_dir,
81
87
  )
82
88
  with np.load(path, allow_pickle=True) as f:
83
89
  x_train, y_train = f["x_train"], f["y_train"]
@@ -15,6 +15,7 @@
15
15
  """Reuters topic classification dataset."""
16
16
 
17
17
  import json
18
+ import os
18
19
 
19
20
  import numpy as np
20
21
 
@@ -37,6 +38,7 @@ def load_data(
37
38
  start_char=1,
38
39
  oov_char=2,
39
40
  index_from=3,
41
+ cache_dir=None,
40
42
  **kwargs,
41
43
  ):
42
44
  """Loads the Reuters newswire classification dataset.
@@ -83,6 +85,8 @@ def load_data(
83
85
  Words that were cut out because of the `num_words` or
84
86
  `skip_top` limits will be replaced with this character.
85
87
  index_from: int. Index actual words with this index and higher.
88
+ cache_dir: directory where to cache the dataset locally. When None,
89
+ defaults to `~/.keras/datasets`.
86
90
  **kwargs: Used for backwards compatibility.
87
91
 
88
92
  Returns:
@@ -114,12 +118,16 @@ def load_data(
114
118
  origin_folder = (
115
119
  "https://storage.googleapis.com/tensorflow/tf-keras-datasets/"
116
120
  )
121
+ if cache_dir:
122
+ cache_dir = os.path.expanduser(cache_dir)
123
+ os.makedirs(cache_dir, exist_ok=True)
117
124
  path = get_file(
118
125
  path,
119
126
  origin=origin_folder + "reuters.npz",
120
127
  file_hash=( # noqa: E501
121
128
  "d6586e694ee56d7a4e65172e12b3e987c03096cb01eab99753921ef915959916"
122
129
  ),
130
+ cache_dir=cache_dir,
123
131
  )
124
132
  with np.load(path, allow_pickle=True) as f:
125
133
  xs, labels = f["x"], f["y"]
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: tf_keras-nightly
3
- Version: 2.19.0.dev2025010810
3
+ Version: 2.19.0.dev2025011010
4
4
  Summary: Deep learning for humans.
5
5
  Home-page: https://keras.io/
6
6
  Download-URL: https://github.com/keras-team/tf-keras/tags
@@ -27,6 +27,17 @@ Classifier: Topic :: Software Development :: Libraries
27
27
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
28
28
  Requires-Python: >=3.9
29
29
  Requires-Dist: tf-nightly~=2.19.0.dev
30
+ Dynamic: author
31
+ Dynamic: author-email
32
+ Dynamic: classifier
33
+ Dynamic: description
34
+ Dynamic: download-url
35
+ Dynamic: home-page
36
+ Dynamic: keywords
37
+ Dynamic: license
38
+ Dynamic: requires-dist
39
+ Dynamic: requires-python
40
+ Dynamic: summary
30
41
 
31
42
  TF-Keras is a deep learning API written in Python,
32
43
  running on top of the machine learning platform TensorFlow.
@@ -1,4 +1,4 @@
1
- tf_keras/__init__.py,sha256=Ic_RmKhdSiGU7KdY909ubNeO8FMSeGkUMItOhxQutoA,911
1
+ tf_keras/__init__.py,sha256=U72ujZ_e9m_CHu00HtrjM3E2ohC71ZE57uG61bvZtuc,911
2
2
  tf_keras/__internal__/__init__.py,sha256=OHQbeIC0QtRBI7dgXaJaVbH8F00x8dCI-DvEcIfyMsE,671
3
3
  tf_keras/__internal__/backend/__init__.py,sha256=LnMs2A6685gDG79fxqmdulIYlVE_3WmXlBTBo9ZWYcw,162
4
4
  tf_keras/__internal__/layers/__init__.py,sha256=F5SGMhOTPzm-PR44VrfinURHcVeQPIEdwnZlAkSTB3A,176
@@ -239,14 +239,14 @@ tf_keras/src/benchmarks/benchmark_util.py,sha256=Hxepqy7JPolebZo5xxWE4UXZ7WmyAYg
239
239
  tf_keras/src/benchmarks/distribution_util.py,sha256=mbnbRlgDGzJTqhdQw4fRIPhT2JztDviJgXT86MfQEEc,6567
240
240
  tf_keras/src/benchmarks/model_memory_profile.py,sha256=9CMMHvW02qWFPN9lFNXY3w-rQfdLT5IQeWZ34K1v6fE,2248
241
241
  tf_keras/src/datasets/__init__.py,sha256=YSVzC5NDV0KgkQwLZqJgWHuVZRkXkAdEVKlRs73RFXo,51
242
- tf_keras/src/datasets/boston_housing.py,sha256=1s19TzPp3ku8GRIFdQhgPSbMMH0KyKnyLuZku6rDdu8,3391
242
+ tf_keras/src/datasets/boston_housing.py,sha256=7mK_B-HjLdHBiQ-uGX6deQUpnYF3zPPTEBSWuxjIymc,3670
243
243
  tf_keras/src/datasets/cifar.py,sha256=mMMwDOf7IYGeVlLemhiA_RSXzSF3CuwFllGpokh-pKs,1394
244
- tf_keras/src/datasets/cifar10.py,sha256=uGMJJ4Yw10ebIkXI5H3V05DgiwWbvdMRGIP06NPLdr4,3767
245
- tf_keras/src/datasets/cifar100.py,sha256=CqrNuFCiN-AvTKKJoPUbiPWPZEXfpv79p-Ejg2HyMr4,3559
246
- tf_keras/src/datasets/fashion_mnist.py,sha256=w1pbePakOPc4wi5x4UOtD8s2zJ7SA6Lg13FNgc4im7Q,3635
247
- tf_keras/src/datasets/imdb.py,sha256=kHzblPv15KtmCaErMzvATfvoB5QOLfLhIqH8ZDtrlO8,8290
248
- tf_keras/src/datasets/mnist.py,sha256=TMPrS8AE6_qvEM_tmWhNwHZmnxwiiAOetxwFF1KNK-Q,3085
249
- tf_keras/src/datasets/reuters.py,sha256=iadpEM1tmfVwRtE9mdj1F8JVnFUATZAzPU8iDbdyfpw,8309
244
+ tf_keras/src/datasets/cifar10.py,sha256=geRUHLtpFX8anDORscmrktlmfD_WVT6hW2SDVhzgx8U,4050
245
+ tf_keras/src/datasets/cifar100.py,sha256=fa7hntiBxIny7qwhrm6pXl8dlh0SN1891s_QghAko1o,3833
246
+ tf_keras/src/datasets/fashion_mnist.py,sha256=65VWDipkXL9QQCOpjdzyrFo3TLJL62iFjpxYsuL_jmw,4009
247
+ tf_keras/src/datasets/imdb.py,sha256=sx35dNz7cBfvI8ZZetYvaE7zRg23j1gWWEFCBNzOzUA,8580
248
+ tf_keras/src/datasets/mnist.py,sha256=sJe_BNuWNFS03hH5GWOYE6M258NXpxEcjKnhufEh7R0,3352
249
+ tf_keras/src/datasets/reuters.py,sha256=_5TNTUzK3roIr1K3GLjR16QtJ4ch_FRYeEIJzM7hcB0,8599
250
250
  tf_keras/src/distribute/__init__.py,sha256=DbbsbJOIWEtjQiv6Stq1KEVvnoAK-E5-Zdkd2FhBZiI,734
251
251
  tf_keras/src/distribute/dataset_creator_model_fit_test_base.py,sha256=e2RnS8nnCKWZDReVKNWvxjIjVZhdPLMe0xh_ydHPqaM,8547
252
252
  tf_keras/src/distribute/distribute_coordinator_utils.py,sha256=WzJM0rkbxvs-ES2DvJhhf0bKpdG4fNqJBcxU-BrY6Es,29244
@@ -606,7 +606,7 @@ tf_keras/src/utils/legacy/__init__.py,sha256=EfMmeHYDzwvxNaktPhQbkTdcPSIGCqMhBND
606
606
  tf_keras/utils/__init__.py,sha256=b7_d-USe_EmLo02_P99Q1rUCzKBYayPCfiYFStP-0nw,2735
607
607
  tf_keras/utils/experimental/__init__.py,sha256=DzGogE2AosjxOVILQBT8PDDcqbWTc0wWnZRobCdpcec,97
608
608
  tf_keras/utils/legacy/__init__.py,sha256=7ujlDa5HeSRcth2NdqA0S1P2-VZF1kB3n68jye6Dj-8,189
609
- tf_keras_nightly-2.19.0.dev2025010810.dist-info/METADATA,sha256=97BHUyZGXXKN73i6DNcsVIFl6Om5L1xgJa4NwT5XhdQ,1637
610
- tf_keras_nightly-2.19.0.dev2025010810.dist-info/WHEEL,sha256=A3WOREP4zgxI0fKrHUG8DC8013e3dK3n7a6HDbcEIwE,91
611
- tf_keras_nightly-2.19.0.dev2025010810.dist-info/top_level.txt,sha256=LC8FK7zHDNKxB17C6lGKvrZ_fZZGJsRiBK23SfiDegY,9
612
- tf_keras_nightly-2.19.0.dev2025010810.dist-info/RECORD,,
609
+ tf_keras_nightly-2.19.0.dev2025011010.dist-info/METADATA,sha256=T7YzIrf0nKuYtl_SjTePRd-PWj8-dqFTTCZFxUgT9GA,1857
610
+ tf_keras_nightly-2.19.0.dev2025011010.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
611
+ tf_keras_nightly-2.19.0.dev2025011010.dist-info/top_level.txt,sha256=LC8FK7zHDNKxB17C6lGKvrZ_fZZGJsRiBK23SfiDegY,9
612
+ tf_keras_nightly-2.19.0.dev2025011010.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.7.0)
2
+ Generator: setuptools (75.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5