tfds-nightly 4.9.9.dev202508130045__py3-none-any.whl → 4.9.9.dev202508150045__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -33,42 +33,42 @@ from tensorflow_datasets.scripts.cli import cli_utils
33
33
 
34
34
  @dataclasses.dataclass(frozen=True, kw_only=True)
35
35
  class Args(cli_utils.Args):
36
- """Commands for downloading and preparing datasets.
37
-
38
- Attributes:
39
- positional_datasets: Name(s) of the dataset(s) to build. Default to current
40
- dir. See https://www.tensorflow.org/datasets/cli for accepted values.
41
- datasets: Datasets can also be provided as keyword argument.
42
- debug: Debug & tests options. Use --pdb to enter post-mortem debugging mode
43
- if an exception is raised.
44
- paths: Path options.
45
- generation: Generation options.
46
- publishing: Publishing options.
47
- automation: Automation options.
48
- """
36
+ """Commands for downloading and preparing datasets."""
49
37
 
50
- positional_datasets: list[str] = simple_parsing.field(
38
+ # Name(s) of the dataset(s) to build. Default to current dir. See
39
+ # https://www.tensorflow.org/datasets/cli for accepted values.
40
+ positional_datasets: list[str] = simple_parsing.list_field(
51
41
  positional=True,
52
- nargs='*',
53
- default_factory=list,
54
42
  # Need to explicitly set metavar for command-line help.
55
43
  metavar='datasets',
56
44
  )
57
- datasets: list[str] = simple_parsing.field(nargs='*', default_factory=list)
45
+
46
+ datasets: list[str] = simple_parsing.list_field(alias='--dataset')
47
+ """Datasets can also be provided as keyword argument."""
58
48
 
59
49
  debug: cli_utils.DebugOptions = cli_utils.DebugOptions()
50
+ """Debug & tests options. Use --pdb to enter post-mortem debugging mode if an
51
+ exception is raised."""
52
+
60
53
  paths: cli_utils.PathOptions = simple_parsing.field(
61
54
  default_factory=cli_utils.PathOptions
62
55
  )
56
+ """Path options."""
57
+
63
58
  generation: cli_utils.GenerationOptions = simple_parsing.field(
64
59
  default_factory=cli_utils.GenerationOptions
65
60
  )
61
+ """Generation options."""
62
+
66
63
  publishing: cli_utils.PublishingOptions = simple_parsing.field(
67
64
  default_factory=cli_utils.PublishingOptions
68
65
  )
66
+ """Publishing options."""
67
+
69
68
  automation: cli_utils.AutomationOptions = simple_parsing.field(
70
69
  default_factory=cli_utils.AutomationOptions
71
70
  )
71
+ """Automation options."""
72
72
 
73
73
  def execute(self) -> None:
74
74
  """Build the given datasets."""
@@ -243,10 +243,14 @@ class GenerationOptions:
243
243
 
244
244
  download_only: bool = simple_parsing.flag(default=False)
245
245
  config: str | None = simple_parsing.field(default=None, alias='-c')
246
- config_idx: int | None = None
246
+ config_idx: int | None = simple_parsing.field(
247
+ default=None, alias='--builder_config_id'
248
+ )
247
249
  update_metadata_only: bool = simple_parsing.flag(default=False)
248
250
  download_config: str | None = None
249
- imports: str | None = simple_parsing.field(default=None, alias='-i')
251
+ imports: str | None = simple_parsing.field(
252
+ default=None, alias=['-i', '--module_import']
253
+ )
250
254
  register_checksums: bool = simple_parsing.flag(default=False)
251
255
  force_checksums_validation: bool = simple_parsing.flag(default=False)
252
256
  beam_pipeline_options: str | None = None
@@ -15,43 +15,21 @@
15
15
 
16
16
  r"""Wrapper around `tfds build`."""
17
17
 
18
- import typing
19
-
20
18
  from absl import app
21
19
  from absl import flags
22
- from absl import logging
23
- from tensorflow_datasets.scripts.cli import build
24
20
  from tensorflow_datasets.scripts.cli import main as main_cli
25
21
 
26
- module_import = flags.DEFINE_string('module_import', None, '`--imports` flag.')
27
- dataset = flags.DEFINE_string('dataset', None, 'singleton `--datasets` flag.')
28
-
29
- builder_config_id = flags.DEFINE_integer(
30
- 'builder_config_id', None, '`--config_idx` flag'
31
- )
32
-
33
-
34
22
 
35
23
  def _parse_flags(argv: list[str]) -> main_cli.Args:
36
24
  """Command lines flag parsing."""
37
25
  return main_cli._parse_flags([argv[0], 'build'] + argv[1:]) # pylint: disable=protected-access
38
26
 
39
27
 
40
- _display_warning = True
41
-
42
-
43
28
  def main(args: main_cli.Args) -> None:
44
- if _display_warning:
45
- logging.warning(
46
- '***`tfds build` should be used instead of `download_and_prepare`.***'
47
- )
48
- cmd_args = typing.cast(build.Args, args.command)
49
- if module_import.value:
50
- cmd_args.generation.imports = module_import.value
51
- if dataset.value:
52
- cmd_args.datasets = [dataset.value]
53
- if builder_config_id.value is not None:
54
- cmd_args.generation.config_idx = builder_config_id.value
29
+ from absl import logging
30
+ logging.warning(
31
+ '***`tfds build` should be used instead of `download_and_prepare`.***'
32
+ )
55
33
  main_cli.main(args)
56
34
 
57
35
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: tfds-nightly
3
- Version: 4.9.9.dev202508130045
3
+ Version: 4.9.9.dev202508150045
4
4
  Summary: tensorflow/datasets is a library of datasets ready to use with TensorFlow.
5
5
  Home-page: https://github.com/tensorflow/datasets
6
6
  Download-URL: https://github.com/tensorflow/datasets/tags
@@ -1965,7 +1965,7 @@ tensorflow_datasets/robotics/rtx/__init__.py,sha256=T5AMbjr-iztrX4Q7k4QhiMNXLOAK
1965
1965
  tensorflow_datasets/robotics/rtx/rtx.py,sha256=8OEnc0_LNsgEJjaySoMwWDjzgiv4hzeobuploMM1cdo,50084
1966
1966
  tensorflow_datasets/scripts/__init__.py,sha256=Z8UWkv0wbzS4AzaLgSpYVGApYv5j57RWY0vN5Z553BQ,613
1967
1967
  tensorflow_datasets/scripts/convert_format.py,sha256=Kopn3YbNqH-euJaWFsd1nyo56-HDHgq8fDzRViXdx9A,3604
1968
- tensorflow_datasets/scripts/download_and_prepare.py,sha256=LzbjSnFeo53r1D5oaRgTucHtJiabhBBYodmZsKBpt9s,1875
1968
+ tensorflow_datasets/scripts/download_and_prepare.py,sha256=3-waIxsFK9fE-O9TgjCRUVjl7jZWV7umEXCeH61DBlA,1204
1969
1969
  tensorflow_datasets/scripts/freeze_dataset_versions.py,sha256=SKC7raxmREqaD5pUnSuy_NHdu9gxTlRxJIOoPoT3cuw,1244
1970
1970
  tensorflow_datasets/scripts/print_num_configs.py,sha256=an80znBHmkycQS4ZEHFQTi1fuFop56tDUx9hgguVcvw,971
1971
1971
  tensorflow_datasets/scripts/replace_fake_images.py,sha256=9L2m3zY0nntaOmsVlNWy6BRJEEytyrMuu5W0LXzLCpA,5223
@@ -1979,11 +1979,11 @@ tensorflow_datasets/scripts/cleanup/refactor_dataset_as_folder.py,sha256=VpEc2Us
1979
1979
  tensorflow_datasets/scripts/cleanup/url_filename_recorder.py,sha256=iLcsT8UgbyNUw00N7bVBC0zCqEuIQ2ndeCCcb4B-OEc,4490
1980
1980
  tensorflow_datasets/scripts/cleanup/url_status_checker.py,sha256=Tr3LtLnGhI8ElDAS-ejmuAU3rs1lmqmYlU4figoVQg0,1967
1981
1981
  tensorflow_datasets/scripts/cli/__init__.py,sha256=Z8UWkv0wbzS4AzaLgSpYVGApYv5j57RWY0vN5Z553BQ,613
1982
- tensorflow_datasets/scripts/cli/build.py,sha256=_YetKh9ZZJfo3w6brP5sdzsdCKfVM4HnQLUyX4mbrX4,15002
1982
+ tensorflow_datasets/scripts/cli/build.py,sha256=uBR2mPo1YO1Of83zZ6A3m5NU0GhP0nJdpLN23idsdx4,14877
1983
1983
  tensorflow_datasets/scripts/cli/build_test.py,sha256=K7ho7IRtAty1ZNPLj33Th_nZajYBkXRLA4u3dbElQmo,10615
1984
1984
  tensorflow_datasets/scripts/cli/builder_templates.py,sha256=99SvH3skigkc2Qg737BV2OzhXL_Rgu4az8eVHsxKCLk,7985
1985
1985
  tensorflow_datasets/scripts/cli/builder_templates_test.py,sha256=HBNB-v2zlImKULPI8Webs9hXCkeFmWT29urxav-tDe8,2062
1986
- tensorflow_datasets/scripts/cli/cli_utils.py,sha256=sARBmqVP9W6FgTNTPcCN8rUpRqoOAd4WdMksBRnu1Tg,13307
1986
+ tensorflow_datasets/scripts/cli/cli_utils.py,sha256=6B3LLFwEvB41LLsQ5__el7F0Pd-COlLnaU3nvUTBdCw,13407
1987
1987
  tensorflow_datasets/scripts/cli/conftest.py,sha256=3PNh_BbR013G4HyLAZOleUXsQ9mICrD03NaKwdHFMXs,1291
1988
1988
  tensorflow_datasets/scripts/cli/convert_format.py,sha256=ZS7CmWJ-oZ0usO4TB8GKDj9TBJ5MyEO0I9QLRg7eQOw,3797
1989
1989
  tensorflow_datasets/scripts/cli/convert_format_utils.py,sha256=U_q5WVgMNrjBkOc166U4Y_eca5KOS3Xb3jSDjp4XdK4,29078
@@ -2468,10 +2468,10 @@ tensorflow_datasets/vision_language/wit/wit_test.py,sha256=PXS8DMNW-MDrT2p5oy4Ic
2468
2468
  tensorflow_datasets/vision_language/wit_kaggle/__init__.py,sha256=vGwSGeM8WE4Q-l0-eEE1sBojmk6YT0l1OO60AWa4Q40,719
2469
2469
  tensorflow_datasets/vision_language/wit_kaggle/wit_kaggle.py,sha256=q-vX_FBzIwsFxL4sY9vuyQ3UQD2PLM4yhUR4U6l-qao,16903
2470
2470
  tensorflow_datasets/vision_language/wit_kaggle/wit_kaggle_test.py,sha256=ZymHT1NkmD-pUnh3BmM3_g30c5afsWYnmqDD9dVyDSA,1778
2471
- tfds_nightly-4.9.9.dev202508130045.dist-info/licenses/AUTHORS,sha256=nvBG4WwfgjuOu1oZkuQKw9kg7X6rve679ObS-YDDmXg,309
2472
- tfds_nightly-4.9.9.dev202508130045.dist-info/licenses/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
2473
- tfds_nightly-4.9.9.dev202508130045.dist-info/METADATA,sha256=MRLubuygIcfrej-GxBNv-7IT4Nyueo9Uqa-rh7TrfOQ,11694
2474
- tfds_nightly-4.9.9.dev202508130045.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
2475
- tfds_nightly-4.9.9.dev202508130045.dist-info/entry_points.txt,sha256=eHEL7nF5y1uCY2FgkuYIdE062epJXlAQTSdq89px4p4,73
2476
- tfds_nightly-4.9.9.dev202508130045.dist-info/top_level.txt,sha256=bAevmk9209s_oxVZVlN6hSDIVS423qrMQvmcWSvW4do,20
2477
- tfds_nightly-4.9.9.dev202508130045.dist-info/RECORD,,
2471
+ tfds_nightly-4.9.9.dev202508150045.dist-info/licenses/AUTHORS,sha256=nvBG4WwfgjuOu1oZkuQKw9kg7X6rve679ObS-YDDmXg,309
2472
+ tfds_nightly-4.9.9.dev202508150045.dist-info/licenses/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
2473
+ tfds_nightly-4.9.9.dev202508150045.dist-info/METADATA,sha256=3GAb0FSJHgdUUFf1do-Od7H7EhdMdcIQhGjo-vNaTYk,11694
2474
+ tfds_nightly-4.9.9.dev202508150045.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
2475
+ tfds_nightly-4.9.9.dev202508150045.dist-info/entry_points.txt,sha256=eHEL7nF5y1uCY2FgkuYIdE062epJXlAQTSdq89px4p4,73
2476
+ tfds_nightly-4.9.9.dev202508150045.dist-info/top_level.txt,sha256=bAevmk9209s_oxVZVlN6hSDIVS423qrMQvmcWSvW4do,20
2477
+ tfds_nightly-4.9.9.dev202508150045.dist-info/RECORD,,