tfds-nightly 4.9.9.dev202508180045__py3-none-any.whl → 4.9.9.dev202508200044__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tensorflow_datasets/core/constants.py +11 -4
- tensorflow_datasets/core/dataset_builder_test.py +7 -21
- tensorflow_datasets/core/dataset_builders/adhoc_builder.py +1 -0
- tensorflow_datasets/core/utils/docs.py +1 -3
- tensorflow_datasets/core/utils/file_utils.py +4 -16
- tensorflow_datasets/core/utils/file_utils_test.py +1 -1
- tensorflow_datasets/datasets/ble_wind_field/ble_wind_field_dataset_builder.py +0 -1
- tensorflow_datasets/datasets/multi_news/README.md +13 -8
- tensorflow_datasets/datasets/multi_news/TAGS.txt +6 -0
- tensorflow_datasets/datasets/multi_news/checksums.tsv +6 -6
- tensorflow_datasets/datasets/multi_news/multi_news_dataset_builder.py +9 -2
- tensorflow_datasets/public_api.py +1 -1
- tensorflow_datasets/scripts/cli/cli_utils.py +1 -1
- tensorflow_datasets/testing/__init__.py +2 -0
- tensorflow_datasets/testing/test_utils.py +11 -0
- {tfds_nightly-4.9.9.dev202508180045.dist-info → tfds_nightly-4.9.9.dev202508200044.dist-info}/METADATA +1 -1
- {tfds_nightly-4.9.9.dev202508180045.dist-info → tfds_nightly-4.9.9.dev202508200044.dist-info}/RECORD +22 -24
- tensorflow_datasets/scripts/documentation/build_api_docs.py +0 -91
- tensorflow_datasets/scripts/documentation/build_api_docs_test.py +0 -65
- {tfds_nightly-4.9.9.dev202508180045.dist-info → tfds_nightly-4.9.9.dev202508200044.dist-info}/WHEEL +0 -0
- {tfds_nightly-4.9.9.dev202508180045.dist-info → tfds_nightly-4.9.9.dev202508200044.dist-info}/entry_points.txt +0 -0
- {tfds_nightly-4.9.9.dev202508180045.dist-info → tfds_nightly-4.9.9.dev202508200044.dist-info}/licenses/AUTHORS +0 -0
- {tfds_nightly-4.9.9.dev202508180045.dist-info → tfds_nightly-4.9.9.dev202508200044.dist-info}/licenses/LICENSE +0 -0
- {tfds_nightly-4.9.9.dev202508180045.dist-info → tfds_nightly-4.9.9.dev202508200044.dist-info}/top_level.txt +0 -0
@@ -28,10 +28,17 @@ SRC_BASE_URL = 'https://github.com/tensorflow/datasets/tree/master/'
|
|
28
28
|
|
29
29
|
# Directory where to store processed datasets.
|
30
30
|
# If modifying this, should also update `scripts/cli/build.py` `--data_dir`
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
31
|
+
|
32
|
+
|
33
|
+
def get_default_data_dir() -> str:
|
34
|
+
"""Returns the TFDS default data directory."""
|
35
|
+
return os.environ.get(
|
36
|
+
'TFDS_DATA_DIR',
|
37
|
+
os.path.join(os.path.expanduser('~'), 'tensorflow_datasets'),
|
38
|
+
)
|
39
|
+
|
40
|
+
|
41
|
+
DATA_DIR: Final[str] = get_default_data_dir()
|
35
42
|
|
36
43
|
# Prefix of files / directories which aren't finished downloading / extracting.
|
37
44
|
INCOMPLETE_PREFIX = 'incomplete.'
|
@@ -27,7 +27,6 @@ import numpy as np
|
|
27
27
|
import pytest
|
28
28
|
import tensorflow as tf
|
29
29
|
from tensorflow_datasets import testing
|
30
|
-
from tensorflow_datasets.core import constants
|
31
30
|
from tensorflow_datasets.core import dataset_builder
|
32
31
|
from tensorflow_datasets.core import dataset_info
|
33
32
|
from tensorflow_datasets.core import dataset_utils
|
@@ -831,32 +830,19 @@ class DatasetBuilderMultiDirTest(testing.TestCase):
|
|
831
830
|
|
832
831
|
@classmethod
|
833
832
|
def setUpClass(cls):
|
834
|
-
super(
|
833
|
+
super().setUpClass()
|
835
834
|
cls.builder = DummyDatasetSharedGenerator()
|
836
835
|
|
837
836
|
def setUp(self):
|
838
|
-
super(
|
837
|
+
super().setUp()
|
839
838
|
# Sanity check to make sure that no dir is registered
|
840
839
|
file_utils.clear_registered_data_dirs()
|
841
|
-
# Create a new temp dir
|
842
|
-
self.other_data_dir = os.path.join(self.get_temp_dir(), "other_dir")
|
843
840
|
# Overwrite the default data_dir (as files get created)
|
844
|
-
|
845
|
-
|
846
|
-
|
847
|
-
|
848
|
-
|
849
|
-
def tearDown(self):
|
850
|
-
super(DatasetBuilderMultiDirTest, self).tearDown()
|
851
|
-
# Restore to the default `_registered_data_dir`
|
852
|
-
file_utils._registered_data_dir = set()
|
853
|
-
# Clear-up existing dirs
|
854
|
-
if tf.io.gfile.exists(self.other_data_dir):
|
855
|
-
tf.io.gfile.rmtree(self.other_data_dir)
|
856
|
-
if tf.io.gfile.exists(self.default_data_dir):
|
857
|
-
tf.io.gfile.rmtree(self.default_data_dir)
|
858
|
-
# Restore the orgininal data dir
|
859
|
-
constants.DATA_DIR = self._original_data_dir
|
841
|
+
default_data_dir = self.enter_context(testing.mock_default_data_dir())
|
842
|
+
# Create a new temp dir
|
843
|
+
self.other_data_dir = os.path.join(
|
844
|
+
os.path.dirname(default_data_dir), "other_dir"
|
845
|
+
)
|
860
846
|
|
861
847
|
def test_load_data_dir(self):
|
862
848
|
"""Ensure that `tfds.load` also supports multiple data_dir."""
|
@@ -85,6 +85,7 @@ from typing import Any, Dict
|
|
85
85
|
|
86
86
|
from absl import logging
|
87
87
|
from etils import epath
|
88
|
+
from tensorflow_datasets.core import constants
|
88
89
|
from tensorflow_datasets.core import dataset_builder
|
89
90
|
from tensorflow_datasets.core import dataset_info
|
90
91
|
from tensorflow_datasets.core import dataset_utils
|
@@ -29,9 +29,7 @@ class A:
|
|
29
29
|
```
|
30
30
|
|
31
31
|
The functions exposed below are dummy decorators. This allows not having to load
|
32
|
-
TensorFlow.
|
33
|
-
scripts/documentation/build_api_docs.py with actual TensorFlow documentation
|
34
|
-
decorators.
|
32
|
+
TensorFlow.
|
35
33
|
"""
|
36
34
|
|
37
35
|
from typing import Any, TypeVar
|
@@ -156,23 +156,11 @@ def list_data_dirs(
|
|
156
156
|
else:
|
157
157
|
return [given_data_dir]
|
158
158
|
else:
|
159
|
-
default_data_dir =
|
159
|
+
default_data_dir = Path(constants.DATA_DIR)
|
160
160
|
all_data_dirs = _REGISTERED_DATA_DIRS | {default_data_dir}
|
161
161
|
return sorted(d.expanduser() for d in all_data_dirs)
|
162
162
|
|
163
163
|
|
164
|
-
def get_default_data_dir(given_data_dir: epath.PathLike | None = None) -> Path:
|
165
|
-
"""Returns the default data_dir."""
|
166
|
-
if given_data_dir:
|
167
|
-
data_dir = os.path.expanduser(given_data_dir)
|
168
|
-
elif 'TFDS_DATA_DIR' in os.environ:
|
169
|
-
data_dir = os.environ['TFDS_DATA_DIR']
|
170
|
-
else:
|
171
|
-
data_dir = constants.DATA_DIR
|
172
|
-
|
173
|
-
return Path(data_dir)
|
174
|
-
|
175
|
-
|
176
164
|
def get_dataset_dir(
|
177
165
|
data_dir: epath.PathLike,
|
178
166
|
builder_name: str,
|
@@ -189,11 +177,11 @@ def get_dataset_dir(
|
|
189
177
|
|
190
178
|
|
191
179
|
def get_data_dir_and_dataset_dir(
|
192
|
-
given_data_dir:
|
180
|
+
given_data_dir: PathLike | None,
|
193
181
|
builder_name: str,
|
194
182
|
config_name: str | None,
|
195
183
|
version: version_lib.Version | str | None,
|
196
|
-
) -> tuple[
|
184
|
+
) -> tuple[Path, Path]:
|
197
185
|
"""Returns the data and dataset directories for the given dataset.
|
198
186
|
|
199
187
|
Args:
|
@@ -249,7 +237,7 @@ def get_data_dir_and_dataset_dir(
|
|
249
237
|
return next(iter(dataset_dir_by_data_dir.items()))
|
250
238
|
|
251
239
|
# No dataset found, use default directory
|
252
|
-
default_data_dir =
|
240
|
+
default_data_dir = Path(constants.DATA_DIR)
|
253
241
|
dataset_dir = get_dataset_dir(
|
254
242
|
data_dir=default_data_dir,
|
255
243
|
builder_name=builder_name,
|
@@ -76,7 +76,6 @@ class Builder(tfds.core.GeneratorBasedBuilder):
|
|
76
76
|
zarr_array = zarr.open_array(
|
77
77
|
store=gcsfs_store(f'{self.GCS_URL}/{self.GCS_FILENAME}'),
|
78
78
|
mode='r',
|
79
|
-
synchronizer=zarr.ThreadSynchronizer(),
|
80
79
|
)
|
81
80
|
|
82
81
|
# During normal execution we don't expect `self.builder_config.num_fields`
|
@@ -1,8 +1,13 @@
|
|
1
|
-
Multi-News
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
1
|
+
# Multi-News Dataset
|
2
|
+
|
3
|
+
Multi-News consists of news articles and human-written summaries of these
|
4
|
+
articles from the news site `newser.com`. Each summary is professionally written
|
5
|
+
by editors and includes links to the original articles cited.
|
6
|
+
|
7
|
+
This is the first large-scale dataset for multi-document summarization on news
|
8
|
+
articles.
|
9
|
+
|
10
|
+
Each record has two features:
|
11
|
+
|
12
|
+
* `document`: Texts of news articles, separated by special token "|||||".
|
13
|
+
* `summary`: Summary of the news.
|
@@ -0,0 +1,6 @@
|
|
1
|
+
content.data-type.text # Contains text data.
|
2
|
+
content.subject.news # Relates to news.
|
3
|
+
content.language.en # Contains text in language English / en.
|
4
|
+
ml.task.abstractive-text-summarization # Relates to Abstractive Text Summarization, a machine learning task.
|
5
|
+
ml.task.natural-language-understanding # Relates to Natural Language Understanding, a machine learning task.
|
6
|
+
ml.task.text-summarization # Relates to Text Summarization, a machine learning task.
|
@@ -1,6 +1,6 @@
|
|
1
|
-
https://huggingface.co/datasets/alexfabbri/multi_news/
|
2
|
-
https://huggingface.co/datasets/alexfabbri/multi_news/
|
3
|
-
https://huggingface.co/datasets/alexfabbri/multi_news/
|
4
|
-
https://huggingface.co/datasets/alexfabbri/multi_news/
|
5
|
-
https://huggingface.co/datasets/alexfabbri/multi_news/
|
6
|
-
https://huggingface.co/datasets/alexfabbri/multi_news/
|
1
|
+
https://huggingface.co/datasets/alexfabbri/multi_news/resolve/main/data/test.src.cleaned 68999509 138d3ac2dc899cbcd2e3745aaa94d1c1db55fb7058d9df4ba3ef2dac05a3a186 test.src.cleaned
|
2
|
+
https://huggingface.co/datasets/alexfabbri/multi_news/resolve/main/data/test.tgt 7309099 fa97cf91a62ae82a0af6da88f2ddf8e06eb4e3b90f7971d8e0c516436518fae3 test.tgt
|
3
|
+
https://huggingface.co/datasets/alexfabbri/multi_news/resolve/main/data/train.src.cleaned 547512283 627781c8ce55d528fcdacd495db45583a915e2d24b7983b0a5a6693ede933bb1 train.src.cleaned
|
4
|
+
https://huggingface.co/datasets/alexfabbri/multi_news/resolve/main/data/train.tgt 58793912 e9e82b8f413b0f1ed4eb7c883f93bb744f829c218c1608b6ba7615d687d07121 train.tgt
|
5
|
+
https://huggingface.co/datasets/alexfabbri/multi_news/resolve/main/data/val.src.cleaned 66875522 f0a43902da366eea2b882e39ddd4c0975ad44aba6b61095a2ea90362e9e2bb65 val.src.cleaned
|
6
|
+
https://huggingface.co/datasets/alexfabbri/multi_news/resolve/main/data/val.tgt 7295302 bb08a078e0cb2b8ca9cc0fe3bfbe9d4098dee706bd00eb97449155e41b880157 val.tgt
|
@@ -19,8 +19,9 @@ from etils import epath
|
|
19
19
|
import tensorflow_datasets.public_api as tfds
|
20
20
|
|
21
21
|
_URL_PATH = (
|
22
|
-
"https://huggingface.co/datasets/alexfabbri/multi_news/
|
22
|
+
"https://huggingface.co/datasets/alexfabbri/multi_news/resolve/main/data/"
|
23
23
|
)
|
24
|
+
_LICENSE = "For non-commercial research and educational purposes only"
|
24
25
|
|
25
26
|
|
26
27
|
_DOCUMENT = "document"
|
@@ -30,7 +31,12 @@ _SUMMARY = "summary"
|
|
30
31
|
class Builder(tfds.core.GeneratorBasedBuilder):
|
31
32
|
"""DatasetBuilder for multi_news dataset."""
|
32
33
|
|
33
|
-
VERSION = tfds.core.Version("2.
|
34
|
+
VERSION = tfds.core.Version("2.1.0")
|
35
|
+
RELEASE_NOTES = {
|
36
|
+
"1.0.0": "Initial release.",
|
37
|
+
"2.0.0": "Update the dataset with valid URLs.",
|
38
|
+
"2.1.0": "Update the dataset with cleaned URLs.",
|
39
|
+
}
|
34
40
|
|
35
41
|
def _info(self) -> tfds.core.DatasetInfo:
|
36
42
|
"""Returns the dataset metadata."""
|
@@ -40,6 +46,7 @@ class Builder(tfds.core.GeneratorBasedBuilder):
|
|
40
46
|
),
|
41
47
|
supervised_keys=(_DOCUMENT, _SUMMARY),
|
42
48
|
homepage="https://github.com/Alex-Fabbri/Multi-News",
|
49
|
+
license=_LICENSE,
|
43
50
|
)
|
44
51
|
|
45
52
|
def _split_generators(self, dl_manager: tfds.download.DownloadManager):
|
@@ -27,7 +27,7 @@ from tensorflow_datasets.core import deprecated
|
|
27
27
|
from tensorflow_datasets.core import download
|
28
28
|
from tensorflow_datasets.core import features
|
29
29
|
from tensorflow_datasets.core import folder_dataset
|
30
|
-
from tensorflow_datasets.core import transform
|
30
|
+
from tensorflow_datasets.core import transform
|
31
31
|
from tensorflow_datasets.core import visualization
|
32
32
|
from tensorflow_datasets.core.as_dataframe import as_dataframe
|
33
33
|
from tensorflow_datasets.core.dataset_utils import as_numpy
|
@@ -197,7 +197,7 @@ class PathOptions:
|
|
197
197
|
"""
|
198
198
|
|
199
199
|
data_dir: epath.Path = simple_parsing.field(
|
200
|
-
default=epath.Path(constants.
|
200
|
+
default=epath.Path(constants.get_default_data_dir())
|
201
201
|
)
|
202
202
|
download_dir: epath.Path | None = None
|
203
203
|
extract_dir: epath.Path | None = None
|
@@ -50,6 +50,7 @@ if typing.TYPE_CHECKING:
|
|
50
50
|
from tensorflow_datasets.testing.test_utils import enable_gcs_access
|
51
51
|
from tensorflow_datasets.testing.test_utils import fake_examples_dir
|
52
52
|
from tensorflow_datasets.testing.test_utils import make_tmp_dir
|
53
|
+
from tensorflow_datasets.testing.test_utils import mock_default_data_dir
|
53
54
|
from tensorflow_datasets.testing.test_utils import mock_kaggle_api
|
54
55
|
from tensorflow_datasets.testing.test_utils import MockFs
|
55
56
|
from tensorflow_datasets.testing.test_utils import rm_tmp_dir
|
@@ -84,6 +85,7 @@ _API = {
|
|
84
85
|
# TODO(afrozm): rm from here and add as methods to TestCase
|
85
86
|
"make_tmp_dir": "tensorflow_datasets.testing.test_utils",
|
86
87
|
"mock_data": "tensorflow_datasets.testing.mocking",
|
88
|
+
"mock_default_data_dir": "tensorflow_datasets.testing.test_utils",
|
87
89
|
"mock_kaggle_api": "tensorflow_datasets.testing.test_utils",
|
88
90
|
"MockFs": "tensorflow_datasets.testing.test_utils",
|
89
91
|
"MockPolicy": "tensorflow_datasets.testing.mocking",
|
@@ -33,6 +33,7 @@ from unittest import mock
|
|
33
33
|
from etils import epath
|
34
34
|
from etils import epy
|
35
35
|
import numpy as np
|
36
|
+
from tensorflow_datasets.core import constants
|
36
37
|
from tensorflow_datasets.core import dataset_builder
|
37
38
|
from tensorflow_datasets.core import dataset_collection_builder
|
38
39
|
from tensorflow_datasets.core import dataset_info
|
@@ -884,3 +885,13 @@ def dummy_croissant_file(
|
|
884
885
|
croissant_file.write_text(json.dumps(dummy_metadata.to_json(), indent=2))
|
885
886
|
|
886
887
|
yield croissant_file
|
888
|
+
|
889
|
+
|
890
|
+
@contextlib.contextmanager
|
891
|
+
def mock_default_data_dir() -> Iterator[str]:
|
892
|
+
"""Mocks the `constants.DATA_DIR`."""
|
893
|
+
with tempfile.TemporaryDirectory() as tempdir:
|
894
|
+
tmp_data_dir = os.path.join(tempdir, 'default_dir')
|
895
|
+
os.makedirs(tmp_data_dir)
|
896
|
+
constants.DATA_DIR = tmp_data_dir
|
897
|
+
yield tmp_data_dir
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: tfds-nightly
|
3
|
-
Version: 4.9.9.
|
3
|
+
Version: 4.9.9.dev202508200044
|
4
4
|
Summary: tensorflow/datasets is a library of datasets ready to use with TensorFlow.
|
5
5
|
Home-page: https://github.com/tensorflow/datasets
|
6
6
|
Download-URL: https://github.com/tensorflow/datasets/tags
|
{tfds_nightly-4.9.9.dev202508180045.dist-info → tfds_nightly-4.9.9.dev202508200044.dist-info}/RECORD
RENAMED
@@ -4,7 +4,7 @@ tensorflow_datasets/conftest.py,sha256=95ocbFU_SvVgWHI7NVkH1wXDrb-OGZrd_ihiVVkpk
|
|
4
4
|
tensorflow_datasets/import_public_api_test.py,sha256=NPjteyVJL2ZDPEznMWBpP6hHbCaWiaZPxOcgnyr-HqY,852
|
5
5
|
tensorflow_datasets/import_test.py,sha256=6rtuyyIwF9QNPyu6c-s5t-aA0fKPy2R9ondIHJVOkhY,801
|
6
6
|
tensorflow_datasets/import_without_tf_test.py,sha256=znenZUnnltG7Jh2-PhhMEl_APadgiz1qPXi5P3Z85xo,3459
|
7
|
-
tensorflow_datasets/public_api.py,sha256=
|
7
|
+
tensorflow_datasets/public_api.py,sha256=omCZpyf4uc9jgFTEvZ35ws4S-nDqMY1Yd_IyQwG3aCs,3863
|
8
8
|
tensorflow_datasets/setup_teardown.py,sha256=4qTNXhVE9nvp6U77HEX7_OJ49_eCTFJLmp3suWGiM6g,1450
|
9
9
|
tensorflow_datasets/typing.py,sha256=UThdRvwKbnXQkllNBhMCVHmCaIhBLHZtxQtfl0TE8e8,1397
|
10
10
|
tensorflow_datasets/version.py,sha256=Yid9O3W4SuoFxeA25B4U4gQnr0lhvGeQKMY3tQ_F6No,1114
|
@@ -59,12 +59,12 @@ tensorflow_datasets/core/as_dataframe.py,sha256=3-2ScAo2G6wwYWbz_w3Crb4QyBwcuIYh
|
|
59
59
|
tensorflow_datasets/core/as_dataframe_test.py,sha256=cGgk3f9j87dDRA2EXedlYb11NpOLdew0dA_O0ZG-PLQ,2048
|
60
60
|
tensorflow_datasets/core/beam_utils.py,sha256=0X2lE9ILqLWZe5Idg58-G5XtgITXEAxqVodDtCDo9Ro,5109
|
61
61
|
tensorflow_datasets/core/beam_utils_test.py,sha256=5ZhntgG658uT5pF4kw_U5Epm8lu0tdg4cI-0viMItzg,2852
|
62
|
-
tensorflow_datasets/core/constants.py,sha256=
|
62
|
+
tensorflow_datasets/core/constants.py,sha256=nSePbV_wWc5iio_knccDTXOAWhoStvocHEBZHQwUl84,2932
|
63
63
|
tensorflow_datasets/core/dataset_builder.py,sha256=GMPEtJ3vcELXg5IvqgYHLjTxdomMzLNZV7rW8YBJg2E,80616
|
64
64
|
tensorflow_datasets/core/dataset_builder_beam_test.py,sha256=d7UsYNsAIY4FcANAERLcVMDcajIpAi0uMfrnQoe4yv4,8386
|
65
65
|
tensorflow_datasets/core/dataset_builder_notfdv_test.py,sha256=eIWlOZijQfopdze85EkbcPY1I8lFmEBnedcoUoOAnRQ,1346
|
66
66
|
tensorflow_datasets/core/dataset_builder_read_test.py,sha256=QxodggixId7zmknhnC0hAYMcuLehWUODVPo7JvjYw9k,4571
|
67
|
-
tensorflow_datasets/core/dataset_builder_test.py,sha256=
|
67
|
+
tensorflow_datasets/core/dataset_builder_test.py,sha256=rjijuAMBxn6bBQy7kVhgq-pWnbvMcV2UGKckVfYRfJk,48598
|
68
68
|
tensorflow_datasets/core/dataset_collection_builder.py,sha256=9tvIWFL4gLFaDFnU_ioKaGGzPQshmACS64vagjTVzkE,7224
|
69
69
|
tensorflow_datasets/core/dataset_collection_builder_test.py,sha256=m6H9Kxb-RHUiBEZXd6kyheoixlogJ3OwzixHM2jP6WA,3664
|
70
70
|
tensorflow_datasets/core/dataset_info.py,sha256=jtkzrc0Jw5BnLnbR18hvPwlzzj-86Rib_9eHlAehYa8,52636
|
@@ -139,7 +139,7 @@ tensorflow_datasets/core/data_sources/parquet.py,sha256=0QmGVlGm4gIVOCANP0U-iDpG
|
|
139
139
|
tensorflow_datasets/core/data_sources/python.py,sha256=qUCItHkomXP3OuH8VaOOCchHIoYzYsuEoJsjPiOJM0w,1452
|
140
140
|
tensorflow_datasets/core/data_sources/python_test.py,sha256=O3yqMPx40JlHN0uFfZPNLoV6s8DdEgYeRPtqveqRiJ0,1736
|
141
141
|
tensorflow_datasets/core/dataset_builders/__init__.py,sha256=StTA3euephqDZdpTzJQgfWNqB5inZosrAhaWg2BOeio,1945
|
142
|
-
tensorflow_datasets/core/dataset_builders/adhoc_builder.py,sha256=
|
142
|
+
tensorflow_datasets/core/dataset_builders/adhoc_builder.py,sha256=1a-5hVjf9t24SD9fWzDDuKoOrA-Vmydf5QxvU7ap-sI,9263
|
143
143
|
tensorflow_datasets/core/dataset_builders/adhoc_builder_test.py,sha256=yhRwrznK78MvHeWGRggnMTiyx_SlR1z30iD5VU3Gweo,13096
|
144
144
|
tensorflow_datasets/core/dataset_builders/croissant_builder.py,sha256=XmnbIKiEN9OnY_RC8P7-83hbUfvtuJhbm24HfNFpiQs,17088
|
145
145
|
tensorflow_datasets/core/dataset_builders/croissant_builder_test.py,sha256=42HpBr3pANVKiok4lcx6xqwf0fY7kma6WIGA8WehNSs,15072
|
@@ -247,13 +247,13 @@ tensorflow_datasets/core/utils/conversion_utils.py,sha256=V8kFmJu38op7-8ufZvEn0f
|
|
247
247
|
tensorflow_datasets/core/utils/conversion_utils_test.py,sha256=rP_nbzQWzmZc_GXp3Y6TirwIGJqiQbF-JtY3B1tOuN0,5346
|
248
248
|
tensorflow_datasets/core/utils/croissant_utils.py,sha256=9C8sScaEqSRsThqpQQc48GDNR1KFmDkS8hmKIvfZCB0,5181
|
249
249
|
tensorflow_datasets/core/utils/croissant_utils_test.py,sha256=UdkAVYDTPm1L0zmMESScurV_IMA5K3qAKmL_umeMJZI,4497
|
250
|
-
tensorflow_datasets/core/utils/docs.py,sha256=
|
250
|
+
tensorflow_datasets/core/utils/docs.py,sha256=nRE4d8wxYZav8AcT3dkiY0yplAJBx1hygWxkeKj_V7I,1412
|
251
251
|
tensorflow_datasets/core/utils/dtype_utils.py,sha256=LvDe1hbgQem57RiqXjG9U5Roj8-1KkBMmSYTtgctx2U,3246
|
252
252
|
tensorflow_datasets/core/utils/dtype_utils_test.py,sha256=-Qe2fQzDO5sjS36ZL-dY9w0tNrJXokIoSRFEQCv5dQA,3259
|
253
253
|
tensorflow_datasets/core/utils/error_utils.py,sha256=lnquUa_VGRjn7-G_5x-PvWGgnnO6GAWsi9I7xeVuGxQ,3204
|
254
254
|
tensorflow_datasets/core/utils/error_utils_test.py,sha256=Brt8X12ZlWCR4x3WLnSvq4X76eyU1yH3t5STPfAkxUs,2060
|
255
|
-
tensorflow_datasets/core/utils/file_utils.py,sha256=
|
256
|
-
tensorflow_datasets/core/utils/file_utils_test.py,sha256=
|
255
|
+
tensorflow_datasets/core/utils/file_utils.py,sha256=cyesb6JgcY5CNVMVB_wBOup8_36VXXQYgPutz-0ttuw,18152
|
256
|
+
tensorflow_datasets/core/utils/file_utils_test.py,sha256=Pg7XQzocQv9NCXASm1C9YqfgvYruLRCSLgMIL2Y6MbA,13418
|
257
257
|
tensorflow_datasets/core/utils/gcs_utils.py,sha256=8mBOgEepkah1Rw36F6DNIVhLzfXbR8iS8KMLQUM5sPk,5154
|
258
258
|
tensorflow_datasets/core/utils/gcs_utils_test.py,sha256=Ig8S37AvFG2g7kNjYxqgmqNKlLPeXt31XD7RY4UzsDg,2578
|
259
259
|
tensorflow_datasets/core/utils/huggingface_utils.py,sha256=NeYaUoO3vIFH8M0hZ8k4w7AchFZJIGsuV1XwKJVttfw,5325
|
@@ -507,7 +507,7 @@ tensorflow_datasets/datasets/ble_wind_field/CITATIONS.bib,sha256=OXFyckl5Jz_2ssa
|
|
507
507
|
tensorflow_datasets/datasets/ble_wind_field/README.md,sha256=dB1Gd0ocVxHb4lf1JqxioVsLZeK-wCX91LPboFGyXQo,1061
|
508
508
|
tensorflow_datasets/datasets/ble_wind_field/TAGS.txt,sha256=i6oI1TaRyQJiq7iTga-YlR5ZTz9oHCt8dyNS4dTsapo,26
|
509
509
|
tensorflow_datasets/datasets/ble_wind_field/__init__.py,sha256=eFqnTjU7s5iubj6XcKoU8lZUSHecOdnebZFm1vTkjbA,612
|
510
|
-
tensorflow_datasets/datasets/ble_wind_field/ble_wind_field_dataset_builder.py,sha256=
|
510
|
+
tensorflow_datasets/datasets/ble_wind_field/ble_wind_field_dataset_builder.py,sha256=EQ1U0GZpSl2wbXysnI7lWxwqmYRbfgw2e0Rad9xQ0pI,4764
|
511
511
|
tensorflow_datasets/datasets/ble_wind_field/ble_wind_field_dataset_builder_test.py,sha256=j9bp0NEpMFPQmikgErlAvOa-Qu7qveFkWqF5z4TVYdg,1851
|
512
512
|
tensorflow_datasets/datasets/ble_wind_field/checksums.tsv,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
513
513
|
tensorflow_datasets/datasets/blimp/CITATIONS.bib,sha256=jZ3PtuS5XjQ8FQO4oVOXFwivk00o11YjMnntK5xjLU4,297
|
@@ -931,11 +931,11 @@ tensorflow_datasets/datasets/mlqa/checksums.tsv,sha256=_lbjCALdPkQgy4EM4tyFPDusv
|
|
931
931
|
tensorflow_datasets/datasets/mlqa/mlqa_dataset_builder.py,sha256=j629tSE41KnEo5rywqyCPZteA14xD2YsPPIrjxsawks,2813
|
932
932
|
tensorflow_datasets/datasets/mlqa/mlqa_dataset_builder_test.py,sha256=qZXHcslBHUNAn-BRynUHouA_vclALLOubCWHpoIhPdc,1074
|
933
933
|
tensorflow_datasets/datasets/multi_news/CITATIONS.bib,sha256=oRe0wDEh7EgfsT4OJJFpphYCDBmZyRVHuR1YkihYYl0,328
|
934
|
-
tensorflow_datasets/datasets/multi_news/README.md,sha256=
|
935
|
-
tensorflow_datasets/datasets/multi_news/TAGS.txt,sha256=
|
934
|
+
tensorflow_datasets/datasets/multi_news/README.md,sha256=s0XL9ddJL7oNJ9r7mSG8_Hdp95WlMWOnosfacw1kuek,469
|
935
|
+
tensorflow_datasets/datasets/multi_news/TAGS.txt,sha256=OPDe1XqRiLYpvmXuPX2_aMaOKIXYsl562usmTEEqkwg,449
|
936
936
|
tensorflow_datasets/datasets/multi_news/__init__.py,sha256=eFqnTjU7s5iubj6XcKoU8lZUSHecOdnebZFm1vTkjbA,612
|
937
|
-
tensorflow_datasets/datasets/multi_news/checksums.tsv,sha256=
|
938
|
-
tensorflow_datasets/datasets/multi_news/multi_news_dataset_builder.py,sha256
|
937
|
+
tensorflow_datasets/datasets/multi_news/checksums.tsv,sha256=S-8k82snl0zj1rjjO5LW7svXRNnDuWRc72qpIcBu6WA,1031
|
938
|
+
tensorflow_datasets/datasets/multi_news/multi_news_dataset_builder.py,sha256=-ZOuQ7BfPN6_DkrUddcE2qm76eUpmlo8nHizd8DsstQ,3046
|
939
939
|
tensorflow_datasets/datasets/multi_news/multi_news_dataset_builder_test.py,sha256=5amBMQ7PKbPLeZ2kiT18tEb_Z-CMS0DasTRT6goTjXQ,1259
|
940
940
|
tensorflow_datasets/datasets/natural_instructions/CITATIONS.bib,sha256=tcQG5eEGL_wr_5MEnZ6Q_ce2oZm6InbbRKiFqee9g7I,412
|
941
941
|
tensorflow_datasets/datasets/natural_instructions/README.md,sha256=mceGvviI62PO5mh59sYPP_9vuuwKo0g-m7LQilP1mBI,370
|
@@ -1990,7 +1990,7 @@ tensorflow_datasets/scripts/cli/build.py,sha256=uBR2mPo1YO1Of83zZ6A3m5NU0GhP0nJd
|
|
1990
1990
|
tensorflow_datasets/scripts/cli/build_test.py,sha256=K7ho7IRtAty1ZNPLj33Th_nZajYBkXRLA4u3dbElQmo,10615
|
1991
1991
|
tensorflow_datasets/scripts/cli/builder_templates.py,sha256=99SvH3skigkc2Qg737BV2OzhXL_Rgu4az8eVHsxKCLk,7985
|
1992
1992
|
tensorflow_datasets/scripts/cli/builder_templates_test.py,sha256=HBNB-v2zlImKULPI8Webs9hXCkeFmWT29urxav-tDe8,2062
|
1993
|
-
tensorflow_datasets/scripts/cli/cli_utils.py,sha256=
|
1993
|
+
tensorflow_datasets/scripts/cli/cli_utils.py,sha256=1MjBd1thfqgUK6iHMV9MG-1ny4ZZUuyBgcRPFnApDgY,13434
|
1994
1994
|
tensorflow_datasets/scripts/cli/conftest.py,sha256=3PNh_BbR013G4HyLAZOleUXsQ9mICrD03NaKwdHFMXs,1291
|
1995
1995
|
tensorflow_datasets/scripts/cli/convert_format.py,sha256=ZS7CmWJ-oZ0usO4TB8GKDj9TBJ5MyEO0I9QLRg7eQOw,3797
|
1996
1996
|
tensorflow_datasets/scripts/cli/convert_format_utils.py,sha256=U_q5WVgMNrjBkOc166U4Y_eca5KOS3Xb3jSDjp4XdK4,29078
|
@@ -2005,8 +2005,6 @@ tensorflow_datasets/scripts/deployment/copy_dataset_info_files.py,sha256=uLuvwOW
|
|
2005
2005
|
tensorflow_datasets/scripts/deployment/export_community_datasets.py,sha256=h6IEu7Y19cl6adiT3ve5HUbKb5hUuysKdMqIgRc8CcM,3643
|
2006
2006
|
tensorflow_datasets/scripts/deployment/export_community_datasets_test.py,sha256=eIE0dR9KGkXWX2kcanfN6rKdLYSsFb5QCVXfe7nd7gI,3234
|
2007
2007
|
tensorflow_datasets/scripts/documentation/__init__.py,sha256=Z8UWkv0wbzS4AzaLgSpYVGApYv5j57RWY0vN5Z553BQ,613
|
2008
|
-
tensorflow_datasets/scripts/documentation/build_api_docs.py,sha256=NXbW1ijbIUogxpWFDaSngw8_84TJuFv7C_Jr7hLxeyk,2893
|
2009
|
-
tensorflow_datasets/scripts/documentation/build_api_docs_test.py,sha256=m8_XuS_05NYaErMJeCQoqdrvKUFw-JKBi7wdDeqI1jA,2065
|
2010
2008
|
tensorflow_datasets/scripts/documentation/build_catalog.py,sha256=SYJoNW-VxvL8xx85uYlFBwbr1k64HcmRBfxsj9-sdYA,8680
|
2011
2009
|
tensorflow_datasets/scripts/documentation/build_catalog_test.py,sha256=qjnqK6lhBh-uNrjLQkEs3AbKFBo5uz_sxhhdT4ibOyA,2532
|
2012
2010
|
tensorflow_datasets/scripts/documentation/build_community_catalog.py,sha256=gh84xnKbL_ndR4GGbgBNLJ0nxjFwiAPLuhUvzeKPZAo,19902
|
@@ -2127,7 +2125,7 @@ tensorflow_datasets/summarization/media_sum/__init__.py,sha256=ascERqiYc3QEf0hqG
|
|
2127
2125
|
tensorflow_datasets/summarization/media_sum/media_sum.py,sha256=CIhR_cfQb1aEfu9BTCdsMPe6TC_okOW_cwNR76wqrFo,877
|
2128
2126
|
tensorflow_datasets/summarization/summscreen/__init__.py,sha256=ADxohrpUPJjug4r2kGCCJEWZzVD4s2S0smqLfjkc8YY,718
|
2129
2127
|
tensorflow_datasets/summarization/summscreen/summscreen.py,sha256=DfwGr3vsRhOC62ODJ1Sp7-v219bPjJ93KK043YReV7I,884
|
2130
|
-
tensorflow_datasets/testing/__init__.py,sha256=
|
2128
|
+
tensorflow_datasets/testing/__init__.py,sha256=I19SrK6rO0lD3w-ZnuiG1pek3t07up0SXGopfDBNsBk,6195
|
2131
2129
|
tensorflow_datasets/testing/dataset_builder_testing.py,sha256=t95l1N8exM7G7qdPMHe1oOlF0E7KpptJBNivLXA3Tqo,25155
|
2132
2130
|
tensorflow_datasets/testing/dataset_builder_testing_test.py,sha256=Nf7Ykg5bY5o9ZatQKrRJhr-qGTtNKle4aZph4rt72i4,1283
|
2133
2131
|
tensorflow_datasets/testing/dataset_collection_builder_testing.py,sha256=tUv2l53rc9GEo4sWvM9OP9r-Ze54dcDakeLQBMS7yos,4825
|
@@ -2138,7 +2136,7 @@ tensorflow_datasets/testing/mocking.py,sha256=4mIq0ngxfs3w0hFlosGOSTp-mAQVfBfoFw
|
|
2138
2136
|
tensorflow_datasets/testing/mocking_test.py,sha256=9DMkxcQw_dZTKULNHiKv91e0VcBsUTa6FIhUOLvJKls,13796
|
2139
2137
|
tensorflow_datasets/testing/test_case.py,sha256=_H_M3pp6Vp3dbtPyVy5Um7X8S4V4EKPLrao1mbS2IdU,2554
|
2140
2138
|
tensorflow_datasets/testing/test_case_in_context.py,sha256=7YrdTI_rqR01Q-ToVqewIm1OKDwvxIidPhaffYmjP1E,1872
|
2141
|
-
tensorflow_datasets/testing/test_utils.py,sha256=
|
2139
|
+
tensorflow_datasets/testing/test_utils.py,sha256=TguQfyqwVkOAPjI5DLwKvFDnqDwvmI2Mg9MTS0RyFGg,27094
|
2142
2140
|
tensorflow_datasets/testing/test_utils_test.py,sha256=nL2niozCO5Gh4cWPWbDW5_w3w-mHRYZEQmmfej2fpjY,9576
|
2143
2141
|
tensorflow_datasets/testing/version_test.py,sha256=fNMSX1FSNs_66MHcRGAWzoPZWJ-sAvmc-rceKXGK-uM,2791
|
2144
2142
|
tensorflow_datasets/text/__init__.py,sha256=_PtJTw2LQqgxFNVeBCEXrLGF2qg5NNOiXTW9oKZR_ZA,5319
|
@@ -2474,10 +2472,10 @@ tensorflow_datasets/vision_language/wit/wit_test.py,sha256=PXS8DMNW-MDrT2p5oy4Ic
|
|
2474
2472
|
tensorflow_datasets/vision_language/wit_kaggle/__init__.py,sha256=vGwSGeM8WE4Q-l0-eEE1sBojmk6YT0l1OO60AWa4Q40,719
|
2475
2473
|
tensorflow_datasets/vision_language/wit_kaggle/wit_kaggle.py,sha256=q-vX_FBzIwsFxL4sY9vuyQ3UQD2PLM4yhUR4U6l-qao,16903
|
2476
2474
|
tensorflow_datasets/vision_language/wit_kaggle/wit_kaggle_test.py,sha256=ZymHT1NkmD-pUnh3BmM3_g30c5afsWYnmqDD9dVyDSA,1778
|
2477
|
-
tfds_nightly-4.9.9.
|
2478
|
-
tfds_nightly-4.9.9.
|
2479
|
-
tfds_nightly-4.9.9.
|
2480
|
-
tfds_nightly-4.9.9.
|
2481
|
-
tfds_nightly-4.9.9.
|
2482
|
-
tfds_nightly-4.9.9.
|
2483
|
-
tfds_nightly-4.9.9.
|
2475
|
+
tfds_nightly-4.9.9.dev202508200044.dist-info/licenses/AUTHORS,sha256=nvBG4WwfgjuOu1oZkuQKw9kg7X6rve679ObS-YDDmXg,309
|
2476
|
+
tfds_nightly-4.9.9.dev202508200044.dist-info/licenses/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
|
2477
|
+
tfds_nightly-4.9.9.dev202508200044.dist-info/METADATA,sha256=2wzC_C5TtacBcMmQdB8Qpf7CZSR7VUGMAzb3jtKy0Jo,11291
|
2478
|
+
tfds_nightly-4.9.9.dev202508200044.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
2479
|
+
tfds_nightly-4.9.9.dev202508200044.dist-info/entry_points.txt,sha256=eHEL7nF5y1uCY2FgkuYIdE062epJXlAQTSdq89px4p4,73
|
2480
|
+
tfds_nightly-4.9.9.dev202508200044.dist-info/top_level.txt,sha256=bAevmk9209s_oxVZVlN6hSDIVS423qrMQvmcWSvW4do,20
|
2481
|
+
tfds_nightly-4.9.9.dev202508200044.dist-info/RECORD,,
|
@@ -1,91 +0,0 @@
|
|
1
|
-
# coding=utf-8
|
2
|
-
# Copyright 2025 The TensorFlow Datasets Authors.
|
3
|
-
#
|
4
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
-
# you may not use this file except in compliance with the License.
|
6
|
-
# You may obtain a copy of the License at
|
7
|
-
#
|
8
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
-
#
|
10
|
-
# Unless required by applicable law or agreed to in writing, software
|
11
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
-
# See the License for the specific language governing permissions and
|
14
|
-
# limitations under the License.
|
15
|
-
|
16
|
-
"""generates api_docs for tensorflow_datasets."""
|
17
|
-
import os
|
18
|
-
|
19
|
-
from absl import app
|
20
|
-
from absl import flags
|
21
|
-
from absl import logging
|
22
|
-
|
23
|
-
import tensorflow_datasets as tfds
|
24
|
-
# Testing is lazily imported, so we first force its import.
|
25
|
-
from tensorflow_datasets.testing import * # pylint: disable=wildcard-import
|
26
|
-
from tensorflow_docs.api_generator import generate_lib
|
27
|
-
|
28
|
-
from tensorflow.tools.docs import doc_controls # pylint: disable=g-direct-tensorflow-import
|
29
|
-
|
30
|
-
# Force the definition of all documentation decorators declared in
|
31
|
-
# third_party/py/tensorflow_datasets/core/utils/docs.py to TensorFlow decorators
|
32
|
-
try:
|
33
|
-
tfds.core.utils.docs.deprecated = doc_controls.set_deprecated
|
34
|
-
tfds.core.utils.docs.doc_private = doc_controls.doc_private
|
35
|
-
tfds.core.utils.docs.do_not_doc = doc_controls.do_not_generate_docs
|
36
|
-
# Same as `do_not_doc`, but also applied to children
|
37
|
-
tfds.core.utils.docs.do_not_doc_inheritable = (
|
38
|
-
doc_controls.do_not_doc_inheritable
|
39
|
-
)
|
40
|
-
# Document the parent, but not the children
|
41
|
-
tfds.core.utils.docs.do_not_doc_in_subclasses = (
|
42
|
-
doc_controls.do_not_doc_in_subclasses
|
43
|
-
)
|
44
|
-
except AttributeError:
|
45
|
-
logging.info("Could not set TensorFlow documentation decorators.")
|
46
|
-
|
47
|
-
FLAGS = flags.FLAGS
|
48
|
-
|
49
|
-
flags.DEFINE_string(
|
50
|
-
"output_dir", "/tmp/datasets_api", "Where to output the docs"
|
51
|
-
)
|
52
|
-
flags.DEFINE_string(
|
53
|
-
"code_url_prefix",
|
54
|
-
"https://github.com/tensorflow/datasets/tree/master/tensorflow_datasets/",
|
55
|
-
"The url prefix for links to code.",
|
56
|
-
)
|
57
|
-
|
58
|
-
flags.DEFINE_bool(
|
59
|
-
"search_hints", True, "Include metadata search hints in the generated files"
|
60
|
-
)
|
61
|
-
|
62
|
-
flags.DEFINE_string(
|
63
|
-
"site_path", "datasets/api_docs/python", "Path prefix in the _toc.yaml"
|
64
|
-
)
|
65
|
-
|
66
|
-
|
67
|
-
def execute(output_dir, code_url_prefix, search_hints, site_path):
|
68
|
-
"""Builds API docs for tensorflow_datasets."""
|
69
|
-
doc_generator = generate_lib.DocGenerator(
|
70
|
-
root_title="TensorFlow Datasets",
|
71
|
-
py_modules=[("tfds", tfds)],
|
72
|
-
base_dir=os.path.dirname(tfds.__file__),
|
73
|
-
search_hints=search_hints,
|
74
|
-
code_url_prefix=code_url_prefix,
|
75
|
-
site_path=site_path,
|
76
|
-
)
|
77
|
-
|
78
|
-
doc_generator.build(output_dir)
|
79
|
-
|
80
|
-
|
81
|
-
def main(unused_argv):
|
82
|
-
execute(
|
83
|
-
FLAGS.output_dir,
|
84
|
-
FLAGS.code_url_prefix,
|
85
|
-
FLAGS.search_hints,
|
86
|
-
FLAGS.site_path,
|
87
|
-
)
|
88
|
-
|
89
|
-
|
90
|
-
if __name__ == "__main__":
|
91
|
-
app.run(main)
|
@@ -1,65 +0,0 @@
|
|
1
|
-
# coding=utf-8
|
2
|
-
# Copyright 2025 The TensorFlow Datasets Authors.
|
3
|
-
#
|
4
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
-
# you may not use this file except in compliance with the License.
|
6
|
-
# You may obtain a copy of the License at
|
7
|
-
#
|
8
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
-
#
|
10
|
-
# Unless required by applicable law or agreed to in writing, software
|
11
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
-
# See the License for the specific language governing permissions and
|
14
|
-
# limitations under the License.
|
15
|
-
|
16
|
-
"""Smoke Test for docs generation."""
|
17
|
-
|
18
|
-
import os
|
19
|
-
import shutil
|
20
|
-
import tempfile
|
21
|
-
|
22
|
-
from absl.testing import absltest
|
23
|
-
|
24
|
-
from tensorflow_datasets.core.utils import docs
|
25
|
-
from tensorflow_datasets.scripts.documentation import build_api_docs
|
26
|
-
|
27
|
-
|
28
|
-
class BuildDocsTest(absltest.TestCase):
|
29
|
-
|
30
|
-
def setUp(self):
|
31
|
-
super(BuildDocsTest, self).setUp()
|
32
|
-
self.workdir = tempfile.mkdtemp()
|
33
|
-
if os.path.exists(self.workdir):
|
34
|
-
shutil.rmtree(self.workdir)
|
35
|
-
os.makedirs(self.workdir)
|
36
|
-
|
37
|
-
def test_api_gen(self):
|
38
|
-
build_api_docs.execute(
|
39
|
-
output_dir=self.workdir,
|
40
|
-
code_url_prefix="",
|
41
|
-
search_hints=True,
|
42
|
-
site_path="datasets/api_docs/python",
|
43
|
-
)
|
44
|
-
|
45
|
-
# Check that the "defined in" section is working
|
46
|
-
with open(os.path.join(self.workdir, "tfds.md")) as f:
|
47
|
-
content = f.read()
|
48
|
-
self.assertIn("__init__.py", content)
|
49
|
-
|
50
|
-
# Check that the `testing` folder is generated.
|
51
|
-
with open(os.path.join(self.workdir, "tfds/testing.md")) as f:
|
52
|
-
content = f.read()
|
53
|
-
self.assertIn("__init__.py", content)
|
54
|
-
|
55
|
-
def test_tensorflow_decorators_are_used_and_not_dummy_decorator(self):
|
56
|
-
self.assertEqual(docs.deprecated.__name__, "set_deprecated")
|
57
|
-
self.assertEqual(docs.doc_private.__name__, "doc_private")
|
58
|
-
self.assertEqual(docs.do_not_doc.__name__, "do_not_generate_docs")
|
59
|
-
self.assertEqual(
|
60
|
-
docs.do_not_doc_inheritable.__name__, "do_not_doc_inheritable"
|
61
|
-
)
|
62
|
-
|
63
|
-
|
64
|
-
if __name__ == "__main__":
|
65
|
-
absltest.main()
|
{tfds_nightly-4.9.9.dev202508180045.dist-info → tfds_nightly-4.9.9.dev202508200044.dist-info}/WHEEL
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|