tfds-nightly 4.9.9.dev202508120044__py3-none-any.whl → 4.9.9.dev202508140045__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tensorflow_datasets/scripts/cli/build.py +2 -14
- tensorflow_datasets/scripts/cli/build_test.py +2 -1
- tensorflow_datasets/scripts/cli/cli_utils.py +32 -1
- tensorflow_datasets/scripts/cli/convert_format.py +3 -15
- tensorflow_datasets/scripts/cli/croissant.py +6 -27
- tensorflow_datasets/scripts/cli/main.py +54 -31
- tensorflow_datasets/scripts/cli/new.py +2 -15
- tensorflow_datasets/scripts/download_and_prepare.py +4 -4
- tensorflow_datasets/testing/dataset_builder_testing.py +16 -16
- {tfds_nightly-4.9.9.dev202508120044.dist-info → tfds_nightly-4.9.9.dev202508140045.dist-info}/METADATA +1 -1
- {tfds_nightly-4.9.9.dev202508120044.dist-info → tfds_nightly-4.9.9.dev202508140045.dist-info}/RECORD +16 -16
- {tfds_nightly-4.9.9.dev202508120044.dist-info → tfds_nightly-4.9.9.dev202508140045.dist-info}/WHEEL +0 -0
- {tfds_nightly-4.9.9.dev202508120044.dist-info → tfds_nightly-4.9.9.dev202508140045.dist-info}/entry_points.txt +0 -0
- {tfds_nightly-4.9.9.dev202508120044.dist-info → tfds_nightly-4.9.9.dev202508140045.dist-info}/licenses/AUTHORS +0 -0
- {tfds_nightly-4.9.9.dev202508120044.dist-info → tfds_nightly-4.9.9.dev202508140045.dist-info}/licenses/LICENSE +0 -0
- {tfds_nightly-4.9.9.dev202508120044.dist-info → tfds_nightly-4.9.9.dev202508140045.dist-info}/top_level.txt +0 -0
@@ -15,7 +15,6 @@
|
|
15
15
|
|
16
16
|
"""`tfds build` command."""
|
17
17
|
|
18
|
-
import argparse
|
19
18
|
from collections.abc import Iterator
|
20
19
|
import dataclasses
|
21
20
|
import functools
|
@@ -24,7 +23,6 @@ import itertools
|
|
24
23
|
import json
|
25
24
|
import multiprocessing
|
26
25
|
import os
|
27
|
-
import typing
|
28
26
|
from typing import Any, Type
|
29
27
|
|
30
28
|
from absl import logging
|
@@ -34,8 +32,8 @@ from tensorflow_datasets.scripts.cli import cli_utils
|
|
34
32
|
|
35
33
|
|
36
34
|
@dataclasses.dataclass(frozen=True, kw_only=True)
|
37
|
-
class Args:
|
38
|
-
"""
|
35
|
+
class Args(cli_utils.Args):
|
36
|
+
"""Commands for downloading and preparing datasets.
|
39
37
|
|
40
38
|
Attributes:
|
41
39
|
positional_datasets: Name(s) of the dataset(s) to build. Default to current
|
@@ -120,16 +118,6 @@ class Args:
|
|
120
118
|
pool.map(process_builder_fn, builders)
|
121
119
|
|
122
120
|
|
123
|
-
def register_subparser(parsers: argparse._SubParsersAction) -> None: # pylint: disable=protected-access
|
124
|
-
"""Add subparser for `build` command."""
|
125
|
-
parser = parsers.add_parser(
|
126
|
-
'build', help='Commands for downloading and preparing datasets.'
|
127
|
-
)
|
128
|
-
parser = typing.cast(simple_parsing.ArgumentParser, parser)
|
129
|
-
parser.add_arguments(Args, dest='args')
|
130
|
-
parser.set_defaults(subparser_fn=lambda args: args.args.execute())
|
131
|
-
|
132
|
-
|
133
121
|
def _make_builders(
|
134
122
|
args: Args,
|
135
123
|
builder_cls: Type[tfds.core.DatasetBuilder],
|
@@ -19,6 +19,7 @@ import dataclasses
|
|
19
19
|
import functools
|
20
20
|
import multiprocessing
|
21
21
|
import os
|
22
|
+
import typing
|
22
23
|
from unittest import mock
|
23
24
|
|
24
25
|
from etils import epath
|
@@ -311,7 +312,7 @@ def test_download_only(build):
|
|
311
312
|
)
|
312
313
|
def test_make_download_config(args: str, download_config_kwargs):
|
313
314
|
args = main._parse_flags(f'tfds build x {args}'.split())
|
314
|
-
cmd_args
|
315
|
+
cmd_args = typing.cast(build_lib.Args, args.command)
|
315
316
|
actual = build_lib._make_download_config(cmd_args, dataset_name='x')
|
316
317
|
# Ignore the beam runner
|
317
318
|
actual = actual.replace(beam_runner=None)
|
@@ -15,11 +15,13 @@
|
|
15
15
|
|
16
16
|
"""Utility functions for TFDS CLI."""
|
17
17
|
|
18
|
+
import abc
|
18
19
|
import argparse
|
19
|
-
from collections.abc import Sequence
|
20
|
+
from collections.abc import Callable, Sequence
|
20
21
|
import dataclasses
|
21
22
|
import itertools
|
22
23
|
import pathlib
|
24
|
+
from typing import TypeVar
|
23
25
|
|
24
26
|
from absl import logging
|
25
27
|
from absl.flags import argparse_flags
|
@@ -33,6 +35,8 @@ from tensorflow_datasets.core import naming
|
|
33
35
|
from tensorflow_datasets.core.utils import file_utils
|
34
36
|
from tensorflow_datasets.scripts.utils import flag_utils
|
35
37
|
|
38
|
+
_DataclassT = TypeVar('_DataclassT')
|
39
|
+
|
36
40
|
|
37
41
|
class ArgumentParser(
|
38
42
|
argparse_flags.ArgumentParser, simple_parsing.ArgumentParser
|
@@ -77,6 +81,33 @@ class ArgumentParser(
|
|
77
81
|
return super().parse_known_args(args, namespace)
|
78
82
|
|
79
83
|
|
84
|
+
def make_flags_parser(
|
85
|
+
args_dataclass: type[_DataclassT], description: str
|
86
|
+
) -> Callable[[list[str]], _DataclassT]:
|
87
|
+
"""Returns a function that parses flags and returns the dataclass instance."""
|
88
|
+
|
89
|
+
def _parse_flags(argv: list[str]) -> _DataclassT:
|
90
|
+
"""Command lines flag parsing."""
|
91
|
+
parser = ArgumentParser(
|
92
|
+
description=description,
|
93
|
+
allow_abbrev=False,
|
94
|
+
)
|
95
|
+
parser.add_arguments(args_dataclass, dest='args')
|
96
|
+
return parser.parse_args(argv[1:]).args
|
97
|
+
|
98
|
+
return _parse_flags
|
99
|
+
|
100
|
+
|
101
|
+
@dataclasses.dataclass(frozen=True, kw_only=True)
|
102
|
+
class Args(abc.ABC):
|
103
|
+
"""CLI arguments for TFDS CLI commands."""
|
104
|
+
|
105
|
+
@abc.abstractmethod
|
106
|
+
def execute(self) -> None:
|
107
|
+
"""Execute the CLI command."""
|
108
|
+
...
|
109
|
+
|
110
|
+
|
80
111
|
@dataclasses.dataclass
|
81
112
|
class DatasetInfo:
|
82
113
|
"""Structure for common string used for formatting.
|
@@ -25,19 +25,18 @@ tfds convert_format \
|
|
25
25
|
```
|
26
26
|
"""
|
27
27
|
|
28
|
-
import argparse
|
29
28
|
import dataclasses
|
30
|
-
import typing
|
31
29
|
|
32
30
|
from etils import epath
|
33
31
|
import simple_parsing
|
34
32
|
from tensorflow_datasets.core import file_adapters
|
33
|
+
from tensorflow_datasets.scripts.cli import cli_utils
|
35
34
|
from tensorflow_datasets.scripts.cli import convert_format_utils
|
36
35
|
|
37
36
|
|
38
37
|
@dataclasses.dataclass(frozen=True, kw_only=True)
|
39
|
-
class Args:
|
40
|
-
"""
|
38
|
+
class Args(cli_utils.Args):
|
39
|
+
"""Converts a dataset from one file format to another format.
|
41
40
|
|
42
41
|
Attributes:
|
43
42
|
root_data_dir: Root data dir that contains all datasets. All datasets and
|
@@ -94,14 +93,3 @@ class Args:
|
|
94
93
|
num_workers=self.num_workers,
|
95
94
|
fail_on_error=not self.only_log_errors,
|
96
95
|
)
|
97
|
-
|
98
|
-
|
99
|
-
def register_subparser(parsers: argparse._SubParsersAction) -> None:
|
100
|
-
"""Add subparser for `convert_format` command."""
|
101
|
-
parser = parsers.add_parser(
|
102
|
-
'convert_format',
|
103
|
-
help='Converts a dataset from one file format to another format.',
|
104
|
-
)
|
105
|
-
parser = typing.cast(simple_parsing.ArgumentParser, parser)
|
106
|
-
parser.add_arguments(Args, dest='args')
|
107
|
-
parser.set_defaults(subparser_fn=lambda args: args.args.execute())
|
@@ -26,11 +26,9 @@ tfds build_croissant \
|
|
26
26
|
```
|
27
27
|
"""
|
28
28
|
|
29
|
-
import argparse
|
30
29
|
import dataclasses
|
31
30
|
import functools
|
32
31
|
import json
|
33
|
-
import typing
|
34
32
|
|
35
33
|
from etils import epath
|
36
34
|
import mlcroissant as mlc
|
@@ -43,8 +41,8 @@ from tensorflow_datasets.scripts.cli import cli_utils
|
|
43
41
|
|
44
42
|
|
45
43
|
@dataclasses.dataclass(frozen=True, kw_only=True)
|
46
|
-
class CmdArgs(simple_parsing.helpers.FrozenSerializable):
|
47
|
-
"""
|
44
|
+
class CmdArgs(simple_parsing.helpers.FrozenSerializable, cli_utils.Args):
|
45
|
+
"""Prepares a Croissant dataset.
|
48
46
|
|
49
47
|
Attributes:
|
50
48
|
jsonld: Path to the JSONLD file.
|
@@ -122,18 +120,10 @@ class CmdArgs(simple_parsing.helpers.FrozenSerializable):
|
|
122
120
|
self.overwrite_version or self.dataset.metadata.version or '1.0.0'
|
123
121
|
)
|
124
122
|
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
'build_croissant',
|
130
|
-
help='Prepares a croissant dataset',
|
131
|
-
)
|
132
|
-
parser = typing.cast(simple_parsing.ArgumentParser, parser)
|
133
|
-
parser.add_arguments(CmdArgs, dest='args')
|
134
|
-
parser.set_defaults(
|
135
|
-
subparser_fn=lambda args: prepare_croissant_builders(args.args)
|
136
|
-
)
|
123
|
+
def execute(self) -> None:
|
124
|
+
"""Creates Croissant Builders and prepares them."""
|
125
|
+
for record_set_id in self.record_set_ids:
|
126
|
+
prepare_croissant_builder(args=self, record_set_id=record_set_id)
|
137
127
|
|
138
128
|
|
139
129
|
def prepare_croissant_builder(
|
@@ -163,14 +153,3 @@ def prepare_croissant_builder(
|
|
163
153
|
beam_pipeline_options=None,
|
164
154
|
)
|
165
155
|
return builder
|
166
|
-
|
167
|
-
|
168
|
-
def prepare_croissant_builders(args: CmdArgs):
|
169
|
-
"""Creates Croissant Builders and prepares them.
|
170
|
-
|
171
|
-
Args:
|
172
|
-
args: CLI arguments.
|
173
|
-
"""
|
174
|
-
# Generate each config sequentially.
|
175
|
-
for record_set_id in args.record_set_ids:
|
176
|
-
prepare_croissant_builder(args=args, record_set_id=record_set_id)
|
@@ -21,13 +21,13 @@ TFDS CLI to help creates and build datasets (e.g. `tfds new my_dataset`,
|
|
21
21
|
See: https://www.tensorflow.org/datasets/cli
|
22
22
|
"""
|
23
23
|
|
24
|
-
import
|
24
|
+
import dataclasses
|
25
25
|
import logging as python_logging
|
26
|
-
from typing import List
|
27
26
|
|
28
27
|
from absl import app
|
29
28
|
from absl import flags
|
30
29
|
from absl import logging
|
30
|
+
import simple_parsing
|
31
31
|
|
32
32
|
import tensorflow_datasets.public_api as tfds
|
33
33
|
|
@@ -41,33 +41,60 @@ from tensorflow_datasets.scripts.cli import new
|
|
41
41
|
FLAGS = flags.FLAGS
|
42
42
|
|
43
43
|
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
44
|
+
@dataclasses.dataclass(frozen=True, kw_only=True)
|
45
|
+
class _DummyCommand:
|
46
|
+
"""Dummy command to avoid `command is MISSING` error."""
|
47
|
+
|
48
|
+
pass
|
49
|
+
|
50
|
+
|
51
|
+
version_field = simple_parsing.field(
|
52
|
+
action='version',
|
53
|
+
version='TensorFlow Datasets: ' + tfds.__version__,
|
54
|
+
help='The version of the TensorFlow Datasets package.',
|
55
|
+
)
|
56
|
+
|
57
|
+
|
58
|
+
@dataclasses.dataclass(frozen=True, kw_only=True)
|
59
|
+
class Args(cli_utils.Args):
|
60
|
+
"""Tensorflow Datasets CLI tool."""
|
61
|
+
|
62
|
+
version: str = version_field
|
63
|
+
"""The version of the TensorFlow Datasets package."""
|
64
|
+
|
65
|
+
dry_run: bool = simple_parsing.flag(default=False)
|
66
|
+
"""If True, print the parsed arguments and exit."""
|
67
|
+
|
68
|
+
command: build.Args | new.Args | convert_format.Args | croissant.CmdArgs = (
|
69
|
+
simple_parsing.subparsers(
|
70
|
+
{
|
71
|
+
'build': build.Args,
|
72
|
+
'new': new.Args,
|
73
|
+
'convert_format': convert_format.Args,
|
74
|
+
'build_croissant': croissant.CmdArgs,
|
75
|
+
},
|
76
|
+
default_factory=_DummyCommand,
|
77
|
+
)
|
59
78
|
)
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
79
|
+
"""The command to execute."""
|
80
|
+
|
81
|
+
def execute(self) -> None:
|
82
|
+
"""Run the command."""
|
83
|
+
if self.dry_run:
|
84
|
+
print(self)
|
85
|
+
# When no command is given, print the help message.
|
86
|
+
elif isinstance(self.command, _DummyCommand):
|
87
|
+
_parse_flags(['', '--help'])
|
88
|
+
else:
|
89
|
+
self.command.execute()
|
90
|
+
|
91
|
+
|
92
|
+
_parse_flags = cli_utils.make_flags_parser(
|
93
|
+
Args, description='Tensorflow Datasets CLI tool'
|
94
|
+
)
|
68
95
|
|
69
96
|
|
70
|
-
def main(args:
|
97
|
+
def main(args: Args) -> None:
|
71
98
|
|
72
99
|
# From the CLI, all datasets are visible
|
73
100
|
tfds.core.visibility.set_availables([
|
@@ -98,11 +125,7 @@ def main(args: argparse.Namespace) -> None:
|
|
98
125
|
new_stream = tfds.core.utils.tqdm_utils.TqdmStream()
|
99
126
|
python_handler.setStream(new_stream)
|
100
127
|
|
101
|
-
|
102
|
-
print(args)
|
103
|
-
else:
|
104
|
-
# Launch the subcommand defined in the subparser (or default to print help)
|
105
|
-
args.subparser_fn(args)
|
128
|
+
args.execute()
|
106
129
|
|
107
130
|
|
108
131
|
def launch_cli() -> None:
|
@@ -15,13 +15,11 @@
|
|
15
15
|
|
16
16
|
"""`tfds new` command."""
|
17
17
|
|
18
|
-
import argparse
|
19
18
|
import dataclasses
|
20
19
|
import os
|
21
20
|
import pathlib
|
22
21
|
import subprocess
|
23
22
|
import textwrap
|
24
|
-
import typing
|
25
23
|
|
26
24
|
import simple_parsing
|
27
25
|
from tensorflow_datasets.core import constants
|
@@ -33,8 +31,8 @@ from tensorflow_datasets.scripts.cli import cli_utils as utils
|
|
33
31
|
|
34
32
|
|
35
33
|
@dataclasses.dataclass(frozen=True, kw_only=True)
|
36
|
-
class Args:
|
37
|
-
"""
|
34
|
+
class Args(utils.Args):
|
35
|
+
"""Creates a new dataset directory from the template.
|
38
36
|
|
39
37
|
Attributes:
|
40
38
|
dataset_name: Name of the dataset to be created (in snake_case).
|
@@ -71,17 +69,6 @@ class Args:
|
|
71
69
|
)
|
72
70
|
|
73
71
|
|
74
|
-
def register_subparser(parsers: argparse._SubParsersAction) -> None:
|
75
|
-
"""Add subparser for `new` command."""
|
76
|
-
parser = parsers.add_parser(
|
77
|
-
'new',
|
78
|
-
help='Creates a new dataset directory from the template.',
|
79
|
-
)
|
80
|
-
parser = typing.cast(simple_parsing.ArgumentParser, parser)
|
81
|
-
parser.add_arguments(Args, dest='args')
|
82
|
-
parser.set_defaults(subparser_fn=lambda args: args.args.execute())
|
83
|
-
|
84
|
-
|
85
72
|
def create_dataset_files(
|
86
73
|
dataset_name: str,
|
87
74
|
dataset_dir: pathlib.Path,
|
@@ -15,7 +15,7 @@
|
|
15
15
|
|
16
16
|
r"""Wrapper around `tfds build`."""
|
17
17
|
|
18
|
-
import
|
18
|
+
import typing
|
19
19
|
|
20
20
|
from absl import app
|
21
21
|
from absl import flags
|
@@ -32,7 +32,7 @@ builder_config_id = flags.DEFINE_integer(
|
|
32
32
|
|
33
33
|
|
34
34
|
|
35
|
-
def _parse_flags(argv: list[str]) ->
|
35
|
+
def _parse_flags(argv: list[str]) -> main_cli.Args:
|
36
36
|
"""Command lines flag parsing."""
|
37
37
|
return main_cli._parse_flags([argv[0], 'build'] + argv[1:]) # pylint: disable=protected-access
|
38
38
|
|
@@ -40,12 +40,12 @@ def _parse_flags(argv: list[str]) -> argparse.Namespace:
|
|
40
40
|
_display_warning = True
|
41
41
|
|
42
42
|
|
43
|
-
def main(args:
|
43
|
+
def main(args: main_cli.Args) -> None:
|
44
44
|
if _display_warning:
|
45
45
|
logging.warning(
|
46
46
|
'***`tfds build` should be used instead of `download_and_prepare`.***'
|
47
47
|
)
|
48
|
-
cmd_args
|
48
|
+
cmd_args = typing.cast(build.Args, args.command)
|
49
49
|
if module_import.value:
|
50
50
|
cmd_args.generation.imports = module_import.value
|
51
51
|
if dataset.value:
|
@@ -105,15 +105,19 @@ class DatasetBuilderTestCase(
|
|
105
105
|
BUILDER_CONFIGS from the class will be tested.
|
106
106
|
* DL_EXTRACT_RESULT: `dict[str, str]`, the returned result of mocked
|
107
107
|
`download_and_extract` method. The values should be the path of files
|
108
|
-
present in the `fake_examples` directory, relative to
|
109
|
-
|
108
|
+
present in the `fake_examples` (or `dummy_data`) directory, relative to
|
109
|
+
that directory.
|
110
|
+
If not specified, path to `fake_examples` (or `dummy_data`) will always be
|
111
|
+
returned.
|
110
112
|
* DL_EXTRACT_ONLY_RESULT: `dict[str, str]`, the returned result of mocked
|
111
113
|
`extract` method. The values should be the path of files present in the
|
112
|
-
`fake_examples` directory, relative to that directory.
|
114
|
+
`fake_examples` (or `dummy_data`) directory, relative to that directory.
|
115
|
+
If not specified:
|
113
116
|
will call DownloadManager `extract` method.
|
114
117
|
* DL_DOWNLOAD_RESULT: `dict[str, str]`, the returned result of mocked
|
115
118
|
`download_and_extract` method. The values should be the path of files
|
116
|
-
present in the `fake_examples` directory, relative to
|
119
|
+
present in the `fake_examples` (or `dummy_data`) directory, relative to
|
120
|
+
that directory.
|
117
121
|
If not specified: will use DL_EXTRACT_RESULT (this is due to backwards
|
118
122
|
compatibility and will be removed in the future).
|
119
123
|
* EXAMPLE_DIR: `str`, the base directory in in which fake examples are
|
@@ -167,11 +171,9 @@ class DatasetBuilderTestCase(
|
|
167
171
|
"Assign your DatasetBuilder class to %s.DATASET_CLASS." % name
|
168
172
|
)
|
169
173
|
|
170
|
-
cls._available_cm = visibility.set_availables_tmp(
|
171
|
-
|
172
|
-
|
173
|
-
]
|
174
|
-
)
|
174
|
+
cls._available_cm = visibility.set_availables_tmp([
|
175
|
+
visibility.DatasetType.TFDS_PUBLIC,
|
176
|
+
])
|
175
177
|
cls._available_cm.__enter__() # pylint: disable=protected-access
|
176
178
|
|
177
179
|
@classmethod
|
@@ -398,9 +400,9 @@ class DatasetBuilderTestCase(
|
|
398
400
|
err_msg = (
|
399
401
|
"Did you forget to record checksums with `--register_checksums` ? See"
|
400
402
|
" instructions at:"
|
401
|
-
" https://www.tensorflow.org/datasets/add_dataset#
|
402
|
-
" want to opt-out of checksums validation, please add
|
403
|
-
" True` to the `DatasetBuilderTestCase`.\n"
|
403
|
+
" https://www.tensorflow.org/datasets/add_dataset#run_the_generation_code"
|
404
|
+
" If you want to opt-out of checksums validation, please add "
|
405
|
+
" `SKIP_CHECKSUMS = True` to the `DatasetBuilderTestCase`.\n"
|
404
406
|
)
|
405
407
|
url_infos = self.dataset_class.url_infos
|
406
408
|
filepath = self.dataset_class._checksums_path # pylint: disable=protected-access
|
@@ -574,15 +576,13 @@ class DatasetBuilderTestCase(
|
|
574
576
|
|
575
577
|
# If configs specified, ensure they are all valid
|
576
578
|
if builder.builder_config and builder.builder_config.description:
|
577
|
-
err_msg = textwrap.dedent(
|
578
|
-
"""\
|
579
|
+
err_msg = textwrap.dedent("""\
|
579
580
|
The BuilderConfig description should be a one-line description of
|
580
581
|
the config.
|
581
582
|
It shouldn't be the same as `builder.info.description` to avoid
|
582
583
|
redundancy. Both `config.description` and `builder.info.description`
|
583
584
|
will be displayed in the catalog.
|
584
|
-
"""
|
585
|
-
)
|
585
|
+
""")
|
586
586
|
ratio = difflib.SequenceMatcher(
|
587
587
|
None,
|
588
588
|
builder.builder_config.description,
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: tfds-nightly
|
3
|
-
Version: 4.9.9.
|
3
|
+
Version: 4.9.9.dev202508140045
|
4
4
|
Summary: tensorflow/datasets is a library of datasets ready to use with TensorFlow.
|
5
5
|
Home-page: https://github.com/tensorflow/datasets
|
6
6
|
Download-URL: https://github.com/tensorflow/datasets/tags
|
{tfds_nightly-4.9.9.dev202508120044.dist-info → tfds_nightly-4.9.9.dev202508140045.dist-info}/RECORD
RENAMED
@@ -1965,7 +1965,7 @@ tensorflow_datasets/robotics/rtx/__init__.py,sha256=T5AMbjr-iztrX4Q7k4QhiMNXLOAK
|
|
1965
1965
|
tensorflow_datasets/robotics/rtx/rtx.py,sha256=8OEnc0_LNsgEJjaySoMwWDjzgiv4hzeobuploMM1cdo,50084
|
1966
1966
|
tensorflow_datasets/scripts/__init__.py,sha256=Z8UWkv0wbzS4AzaLgSpYVGApYv5j57RWY0vN5Z553BQ,613
|
1967
1967
|
tensorflow_datasets/scripts/convert_format.py,sha256=Kopn3YbNqH-euJaWFsd1nyo56-HDHgq8fDzRViXdx9A,3604
|
1968
|
-
tensorflow_datasets/scripts/download_and_prepare.py,sha256=
|
1968
|
+
tensorflow_datasets/scripts/download_and_prepare.py,sha256=LzbjSnFeo53r1D5oaRgTucHtJiabhBBYodmZsKBpt9s,1875
|
1969
1969
|
tensorflow_datasets/scripts/freeze_dataset_versions.py,sha256=SKC7raxmREqaD5pUnSuy_NHdu9gxTlRxJIOoPoT3cuw,1244
|
1970
1970
|
tensorflow_datasets/scripts/print_num_configs.py,sha256=an80znBHmkycQS4ZEHFQTi1fuFop56tDUx9hgguVcvw,971
|
1971
1971
|
tensorflow_datasets/scripts/replace_fake_images.py,sha256=9L2m3zY0nntaOmsVlNWy6BRJEEytyrMuu5W0LXzLCpA,5223
|
@@ -1979,19 +1979,19 @@ tensorflow_datasets/scripts/cleanup/refactor_dataset_as_folder.py,sha256=VpEc2Us
|
|
1979
1979
|
tensorflow_datasets/scripts/cleanup/url_filename_recorder.py,sha256=iLcsT8UgbyNUw00N7bVBC0zCqEuIQ2ndeCCcb4B-OEc,4490
|
1980
1980
|
tensorflow_datasets/scripts/cleanup/url_status_checker.py,sha256=Tr3LtLnGhI8ElDAS-ejmuAU3rs1lmqmYlU4figoVQg0,1967
|
1981
1981
|
tensorflow_datasets/scripts/cli/__init__.py,sha256=Z8UWkv0wbzS4AzaLgSpYVGApYv5j57RWY0vN5Z553BQ,613
|
1982
|
-
tensorflow_datasets/scripts/cli/build.py,sha256=
|
1983
|
-
tensorflow_datasets/scripts/cli/build_test.py,sha256=
|
1982
|
+
tensorflow_datasets/scripts/cli/build.py,sha256=_YetKh9ZZJfo3w6brP5sdzsdCKfVM4HnQLUyX4mbrX4,15002
|
1983
|
+
tensorflow_datasets/scripts/cli/build_test.py,sha256=K7ho7IRtAty1ZNPLj33Th_nZajYBkXRLA4u3dbElQmo,10615
|
1984
1984
|
tensorflow_datasets/scripts/cli/builder_templates.py,sha256=99SvH3skigkc2Qg737BV2OzhXL_Rgu4az8eVHsxKCLk,7985
|
1985
1985
|
tensorflow_datasets/scripts/cli/builder_templates_test.py,sha256=HBNB-v2zlImKULPI8Webs9hXCkeFmWT29urxav-tDe8,2062
|
1986
|
-
tensorflow_datasets/scripts/cli/cli_utils.py,sha256=
|
1986
|
+
tensorflow_datasets/scripts/cli/cli_utils.py,sha256=sARBmqVP9W6FgTNTPcCN8rUpRqoOAd4WdMksBRnu1Tg,13307
|
1987
1987
|
tensorflow_datasets/scripts/cli/conftest.py,sha256=3PNh_BbR013G4HyLAZOleUXsQ9mICrD03NaKwdHFMXs,1291
|
1988
|
-
tensorflow_datasets/scripts/cli/convert_format.py,sha256=
|
1988
|
+
tensorflow_datasets/scripts/cli/convert_format.py,sha256=ZS7CmWJ-oZ0usO4TB8GKDj9TBJ5MyEO0I9QLRg7eQOw,3797
|
1989
1989
|
tensorflow_datasets/scripts/cli/convert_format_utils.py,sha256=U_q5WVgMNrjBkOc166U4Y_eca5KOS3Xb3jSDjp4XdK4,29078
|
1990
1990
|
tensorflow_datasets/scripts/cli/convert_format_utils_test.py,sha256=9JGNu9TvUWzbuhe6DWwnO3V9Lia5S1Is64re-pceAWE,8823
|
1991
|
-
tensorflow_datasets/scripts/cli/croissant.py,sha256=
|
1992
|
-
tensorflow_datasets/scripts/cli/main.py,sha256=
|
1991
|
+
tensorflow_datasets/scripts/cli/croissant.py,sha256=0JFcSCc4nuk-jVnG_dFQkvTWiKuNZDx-OUTC4gjqRwA,5568
|
1992
|
+
tensorflow_datasets/scripts/cli/main.py,sha256=T4MRQGfNm-FLrp8aZoujQcHY6ctkmX2B6qkErFQUVpA,4238
|
1993
1993
|
tensorflow_datasets/scripts/cli/main_test.py,sha256=3zNaS_2FmxxLoZOX05iJ2riuP4Qv8cx6bhAI56tV8YI,1067
|
1994
|
-
tensorflow_datasets/scripts/cli/new.py,sha256=
|
1994
|
+
tensorflow_datasets/scripts/cli/new.py,sha256=fJok7iV0zauRKwV9n3FLVG57qfiVHYUXVBtqjEApNBY,7386
|
1995
1995
|
tensorflow_datasets/scripts/cli/new_test.py,sha256=USr9So-FPtg8UzaQPPacXn0E1ukDIoew9oYkOn45oik,2655
|
1996
1996
|
tensorflow_datasets/scripts/deployment/__init__.py,sha256=Z8UWkv0wbzS4AzaLgSpYVGApYv5j57RWY0vN5Z553BQ,613
|
1997
1997
|
tensorflow_datasets/scripts/deployment/copy_dataset_info_files.py,sha256=uLuvwOWqvo1SOLAcxAOHIWBvfbyZQJ7nF79v8lTalKQ,2690
|
@@ -2122,7 +2122,7 @@ tensorflow_datasets/summarization/media_sum/media_sum.py,sha256=CIhR_cfQb1aEfu9B
|
|
2122
2122
|
tensorflow_datasets/summarization/summscreen/__init__.py,sha256=ADxohrpUPJjug4r2kGCCJEWZzVD4s2S0smqLfjkc8YY,718
|
2123
2123
|
tensorflow_datasets/summarization/summscreen/summscreen.py,sha256=DfwGr3vsRhOC62ODJ1Sp7-v219bPjJ93KK043YReV7I,884
|
2124
2124
|
tensorflow_datasets/testing/__init__.py,sha256=aSwY_kciK-EZXp1D_JRkuuCJwtbFljGZ72c9YNB6yfE,6049
|
2125
|
-
tensorflow_datasets/testing/dataset_builder_testing.py,sha256=
|
2125
|
+
tensorflow_datasets/testing/dataset_builder_testing.py,sha256=t95l1N8exM7G7qdPMHe1oOlF0E7KpptJBNivLXA3Tqo,25155
|
2126
2126
|
tensorflow_datasets/testing/dataset_builder_testing_test.py,sha256=Nf7Ykg5bY5o9ZatQKrRJhr-qGTtNKle4aZph4rt72i4,1283
|
2127
2127
|
tensorflow_datasets/testing/dataset_collection_builder_testing.py,sha256=tUv2l53rc9GEo4sWvM9OP9r-Ze54dcDakeLQBMS7yos,4825
|
2128
2128
|
tensorflow_datasets/testing/dataset_collection_builder_testing_test.py,sha256=Dw5tACaDjVt9CZi0V84tMAh2JJexrRwWF1N3DID1Mbs,1155
|
@@ -2468,10 +2468,10 @@ tensorflow_datasets/vision_language/wit/wit_test.py,sha256=PXS8DMNW-MDrT2p5oy4Ic
|
|
2468
2468
|
tensorflow_datasets/vision_language/wit_kaggle/__init__.py,sha256=vGwSGeM8WE4Q-l0-eEE1sBojmk6YT0l1OO60AWa4Q40,719
|
2469
2469
|
tensorflow_datasets/vision_language/wit_kaggle/wit_kaggle.py,sha256=q-vX_FBzIwsFxL4sY9vuyQ3UQD2PLM4yhUR4U6l-qao,16903
|
2470
2470
|
tensorflow_datasets/vision_language/wit_kaggle/wit_kaggle_test.py,sha256=ZymHT1NkmD-pUnh3BmM3_g30c5afsWYnmqDD9dVyDSA,1778
|
2471
|
-
tfds_nightly-4.9.9.
|
2472
|
-
tfds_nightly-4.9.9.
|
2473
|
-
tfds_nightly-4.9.9.
|
2474
|
-
tfds_nightly-4.9.9.
|
2475
|
-
tfds_nightly-4.9.9.
|
2476
|
-
tfds_nightly-4.9.9.
|
2477
|
-
tfds_nightly-4.9.9.
|
2471
|
+
tfds_nightly-4.9.9.dev202508140045.dist-info/licenses/AUTHORS,sha256=nvBG4WwfgjuOu1oZkuQKw9kg7X6rve679ObS-YDDmXg,309
|
2472
|
+
tfds_nightly-4.9.9.dev202508140045.dist-info/licenses/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
|
2473
|
+
tfds_nightly-4.9.9.dev202508140045.dist-info/METADATA,sha256=Fk8az7nti-M2Kanqf6ioLiS8DkCCYIo15CA-nz-BscM,11694
|
2474
|
+
tfds_nightly-4.9.9.dev202508140045.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
2475
|
+
tfds_nightly-4.9.9.dev202508140045.dist-info/entry_points.txt,sha256=eHEL7nF5y1uCY2FgkuYIdE062epJXlAQTSdq89px4p4,73
|
2476
|
+
tfds_nightly-4.9.9.dev202508140045.dist-info/top_level.txt,sha256=bAevmk9209s_oxVZVlN6hSDIVS423qrMQvmcWSvW4do,20
|
2477
|
+
tfds_nightly-4.9.9.dev202508140045.dist-info/RECORD,,
|
{tfds_nightly-4.9.9.dev202508120044.dist-info → tfds_nightly-4.9.9.dev202508140045.dist-info}/WHEEL
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|