ob-metaflow-stubs 6.0.3.102rc1__py2.py3-none-any.whl → 6.0.3.102rc3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (140) hide show
  1. metaflow-stubs/__init__.pyi +464 -464
  2. metaflow-stubs/cards.pyi +4 -4
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +2 -2
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +1 -1
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +1 -1
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +2 -2
  13. metaflow-stubs/info_file.pyi +1 -1
  14. metaflow-stubs/metadata/metadata.pyi +2 -2
  15. metaflow-stubs/metadata/util.pyi +1 -1
  16. metaflow-stubs/metaflow_config.pyi +1 -1
  17. metaflow-stubs/metaflow_current.pyi +45 -45
  18. metaflow-stubs/mflog/mflog.pyi +1 -1
  19. metaflow-stubs/multicore_utils.pyi +1 -1
  20. metaflow-stubs/parameters.pyi +3 -3
  21. metaflow-stubs/plugins/__init__.pyi +1 -1
  22. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  23. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  24. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  25. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  26. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  29. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  30. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  31. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  32. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +5 -5
  34. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +6 -6
  35. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  36. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  37. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  38. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  39. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  40. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  41. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  42. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  43. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  44. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  45. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  46. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  47. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +1 -1
  48. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  49. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  50. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  51. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  52. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  53. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  54. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  55. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  56. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  57. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  58. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  59. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  60. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  61. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  62. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  63. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  64. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  66. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  68. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  70. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  71. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  73. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  74. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  75. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  76. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  77. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  78. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  79. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  80. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  81. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  82. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  83. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  84. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  85. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  86. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  87. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  88. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  89. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  90. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  91. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  92. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  93. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  94. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  95. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  96. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  97. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +2 -2
  98. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  99. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  101. metaflow-stubs/plugins/logs_cli.pyi +2 -2
  102. metaflow-stubs/plugins/package_cli.pyi +1 -1
  103. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  104. metaflow-stubs/plugins/perimeters.pyi +1 -1
  105. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  106. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  107. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  108. metaflow-stubs/plugins/pypi/conda_environment.pyi +2 -2
  109. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  110. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  111. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  112. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  113. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  114. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  115. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  116. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  117. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  118. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  119. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  120. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  121. metaflow-stubs/procpoll.pyi +1 -1
  122. metaflow-stubs/profilers/__init__.pyi +1 -1
  123. metaflow-stubs/pylint_wrapper.pyi +1 -1
  124. metaflow-stubs/runner/__init__.pyi +1 -1
  125. metaflow-stubs/runner/deployer.pyi +3 -3
  126. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  127. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  128. metaflow-stubs/runner/nbrun.pyi +1 -1
  129. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  130. metaflow-stubs/runner/utils.pyi +1 -1
  131. metaflow-stubs/system/__init__.pyi +2 -2
  132. metaflow-stubs/system/system_logger.pyi +1 -1
  133. metaflow-stubs/system/system_monitor.pyi +1 -1
  134. metaflow-stubs/tagging_util.pyi +1 -1
  135. metaflow-stubs/tuple_util.pyi +1 -1
  136. {ob_metaflow_stubs-6.0.3.102rc1.dist-info → ob_metaflow_stubs-6.0.3.102rc3.dist-info}/METADATA +1 -1
  137. ob_metaflow_stubs-6.0.3.102rc3.dist-info/RECORD +140 -0
  138. ob_metaflow_stubs-6.0.3.102rc1.dist-info/RECORD +0 -140
  139. {ob_metaflow_stubs-6.0.3.102rc1.dist-info → ob_metaflow_stubs-6.0.3.102rc3.dist-info}/WHEEL +0 -0
  140. {ob_metaflow_stubs-6.0.3.102rc1.dist-info → ob_metaflow_stubs-6.0.3.102rc3.dist-info}/top_level.txt +0 -0
@@ -1,24 +1,24 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.12.22.1+obcheckpoint(0.0.10);ob(v1) #
4
- # Generated on 2024-09-20T18:51:04.453225 #
4
+ # Generated on 2024-09-20T19:02:23.973955 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.client.core
12
- import metaflow.metaflow_current
13
- import metaflow.parameters
11
+ import metaflow.flowspec
12
+ import datetime
14
13
  import metaflow._vendor.click.types
15
- import metaflow_extensions.obcheckpoint.plugins.machine_learning_utilities.datastructures
16
- import metaflow.runner.metaflow_runner
17
- import metaflow.events
14
+ import metaflow.parameters
18
15
  import typing
19
- import datetime
20
16
  import metaflow.datastore.inputs
21
- import metaflow.flowspec
17
+ import metaflow.metaflow_current
18
+ import metaflow.events
19
+ import metaflow.runner.metaflow_runner
20
+ import metaflow_extensions.obcheckpoint.plugins.machine_learning_utilities.datastructures
21
+ import metaflow.client.core
22
22
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
23
23
  StepFlag = typing.NewType("StepFlag", bool)
24
24
 
@@ -434,128 +434,203 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
434
434
  """
435
435
  ...
436
436
 
437
- @typing.overload
438
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
439
- """
440
- Decorator prototype for all step decorators. This function gets specialized
441
- and imported for all decorators types by _import_plugin_decorators().
442
- """
443
- ...
444
-
445
- @typing.overload
446
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
447
- ...
448
-
449
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
437
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
450
438
  """
451
- Decorator prototype for all step decorators. This function gets specialized
452
- and imported for all decorators types by _import_plugin_decorators().
439
+ Specifies that this step should execute on Kubernetes.
440
+
441
+ Parameters
442
+ ----------
443
+ cpu : int, default 1
444
+ Number of CPUs required for this step. If `@resources` is
445
+ also present, the maximum value from all decorators is used.
446
+ memory : int, default 4096
447
+ Memory size (in MB) required for this step. If
448
+ `@resources` is also present, the maximum value from all decorators is
449
+ used.
450
+ disk : int, default 10240
451
+ Disk size (in MB) required for this step. If
452
+ `@resources` is also present, the maximum value from all decorators is
453
+ used.
454
+ image : str, optional, default None
455
+ Docker image to use when launching on Kubernetes. If not specified, and
456
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
457
+ not, a default Docker image mapping to the current version of Python is used.
458
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
459
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
460
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
461
+ Kubernetes service account to use when launching pod in Kubernetes.
462
+ secrets : List[str], optional, default None
463
+ Kubernetes secrets to use when launching pod in Kubernetes. These
464
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
465
+ in Metaflow configuration.
466
+ node_selector: Union[Dict[str,str], str], optional, default None
467
+ Kubernetes node selector(s) to apply to the pod running the task.
468
+ Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
469
+ or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
470
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
471
+ Kubernetes namespace to use when launching pod in Kubernetes.
472
+ gpu : int, optional, default None
473
+ Number of GPUs required for this step. A value of zero implies that
474
+ the scheduled node should not have GPUs.
475
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
476
+ The vendor of the GPUs to be used for this step.
477
+ tolerations : List[str], default []
478
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
479
+ Kubernetes tolerations to use when launching pod in Kubernetes.
480
+ use_tmpfs : bool, default False
481
+ This enables an explicit tmpfs mount for this step.
482
+ tmpfs_tempdir : bool, default True
483
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
484
+ tmpfs_size : int, optional, default: None
485
+ The value for the size (in MiB) of the tmpfs mount for this step.
486
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
487
+ memory allocated for this step.
488
+ tmpfs_path : str, optional, default /metaflow_temp
489
+ Path to tmpfs mount for this step.
490
+ persistent_volume_claims : Dict[str, str], optional, default None
491
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
492
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
493
+ shared_memory: int, optional
494
+ Shared memory size (in MiB) required for this step
495
+ port: int, optional
496
+ Port number to specify in the Kubernetes job object
497
+ compute_pool : str, optional, default None
498
+ Compute pool to be used for for this step.
499
+ If not specified, any accessible compute pool within the perimeter is used.
453
500
  """
454
501
  ...
455
502
 
456
503
  @typing.overload
457
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
504
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
458
505
  """
459
- Specifies the PyPI packages for the step.
506
+ Specifies the Conda environment for the step.
460
507
 
461
508
  Information in this decorator will augment any
462
- attributes set in the `@pyi_base` flow-level decorator. Hence,
463
- you can use `@pypi_base` to set packages required by all
464
- steps and use `@pypi` to specify step-specific overrides.
509
+ attributes set in the `@conda_base` flow-level decorator. Hence,
510
+ you can use `@conda_base` to set packages required by all
511
+ steps and use `@conda` to specify step-specific overrides.
465
512
 
466
513
  Parameters
467
514
  ----------
468
- packages : Dict[str, str], default: {}
515
+ packages : Dict[str, str], default {}
469
516
  Packages to use for this step. The key is the name of the package
470
517
  and the value is the version to use.
471
- python : str, optional, default: None
518
+ libraries : Dict[str, str], default {}
519
+ Supported for backward compatibility. When used with packages, packages will take precedence.
520
+ python : str, optional, default None
472
521
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
473
522
  that the version used will correspond to the version of the Python interpreter used to start the run.
523
+ disabled : bool, default False
524
+ If set to True, disables @conda.
474
525
  """
475
526
  ...
476
527
 
477
528
  @typing.overload
478
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
529
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
479
530
  ...
480
531
 
481
532
  @typing.overload
482
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
533
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
483
534
  ...
484
535
 
485
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
536
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
486
537
  """
487
- Specifies the PyPI packages for the step.
538
+ Specifies the Conda environment for the step.
488
539
 
489
540
  Information in this decorator will augment any
490
- attributes set in the `@pyi_base` flow-level decorator. Hence,
491
- you can use `@pypi_base` to set packages required by all
492
- steps and use `@pypi` to specify step-specific overrides.
541
+ attributes set in the `@conda_base` flow-level decorator. Hence,
542
+ you can use `@conda_base` to set packages required by all
543
+ steps and use `@conda` to specify step-specific overrides.
493
544
 
494
545
  Parameters
495
546
  ----------
496
- packages : Dict[str, str], default: {}
547
+ packages : Dict[str, str], default {}
497
548
  Packages to use for this step. The key is the name of the package
498
549
  and the value is the version to use.
499
- python : str, optional, default: None
550
+ libraries : Dict[str, str], default {}
551
+ Supported for backward compatibility. When used with packages, packages will take precedence.
552
+ python : str, optional, default None
500
553
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
501
554
  that the version used will correspond to the version of the Python interpreter used to start the run.
555
+ disabled : bool, default False
556
+ If set to True, disables @conda.
502
557
  """
503
558
  ...
504
559
 
505
560
  @typing.overload
506
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
561
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
507
562
  """
508
- Specifies a timeout for your step.
509
-
510
- This decorator is useful if this step may hang indefinitely.
563
+ Specifies the resources needed when executing this step.
511
564
 
512
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
513
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
514
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
565
+ Use `@resources` to specify the resource requirements
566
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
515
567
 
516
- Note that all the values specified in parameters are added together so if you specify
517
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
568
+ You can choose the compute layer on the command line by executing e.g.
569
+ ```
570
+ python myflow.py run --with batch
571
+ ```
572
+ or
573
+ ```
574
+ python myflow.py run --with kubernetes
575
+ ```
576
+ which executes the flow on the desired system using the
577
+ requirements specified in `@resources`.
518
578
 
519
579
  Parameters
520
580
  ----------
521
- seconds : int, default 0
522
- Number of seconds to wait prior to timing out.
523
- minutes : int, default 0
524
- Number of minutes to wait prior to timing out.
525
- hours : int, default 0
526
- Number of hours to wait prior to timing out.
581
+ cpu : int, default 1
582
+ Number of CPUs required for this step.
583
+ gpu : int, default 0
584
+ Number of GPUs required for this step.
585
+ disk : int, optional, default None
586
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
587
+ memory : int, default 4096
588
+ Memory size (in MB) required for this step.
589
+ shared_memory : int, optional, default None
590
+ The value for the size (in MiB) of the /dev/shm volume for this step.
591
+ This parameter maps to the `--shm-size` option in Docker.
527
592
  """
528
593
  ...
529
594
 
530
595
  @typing.overload
531
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
596
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
532
597
  ...
533
598
 
534
599
  @typing.overload
535
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
600
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
536
601
  ...
537
602
 
538
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
603
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
539
604
  """
540
- Specifies a timeout for your step.
541
-
542
- This decorator is useful if this step may hang indefinitely.
605
+ Specifies the resources needed when executing this step.
543
606
 
544
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
545
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
546
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
607
+ Use `@resources` to specify the resource requirements
608
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
547
609
 
548
- Note that all the values specified in parameters are added together so if you specify
549
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
610
+ You can choose the compute layer on the command line by executing e.g.
611
+ ```
612
+ python myflow.py run --with batch
613
+ ```
614
+ or
615
+ ```
616
+ python myflow.py run --with kubernetes
617
+ ```
618
+ which executes the flow on the desired system using the
619
+ requirements specified in `@resources`.
550
620
 
551
621
  Parameters
552
622
  ----------
553
- seconds : int, default 0
554
- Number of seconds to wait prior to timing out.
555
- minutes : int, default 0
556
- Number of minutes to wait prior to timing out.
557
- hours : int, default 0
558
- Number of hours to wait prior to timing out.
623
+ cpu : int, default 1
624
+ Number of CPUs required for this step.
625
+ gpu : int, default 0
626
+ Number of GPUs required for this step.
627
+ disk : int, optional, default None
628
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
629
+ memory : int, default 4096
630
+ Memory size (in MB) required for this step.
631
+ shared_memory : int, optional, default None
632
+ The value for the size (in MiB) of the /dev/shm volume for this step.
633
+ This parameter maps to the `--shm-size` option in Docker.
559
634
  """
560
635
  ...
561
636
 
@@ -672,224 +747,59 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
672
747
  ...
673
748
 
674
749
  @typing.overload
675
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
750
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
676
751
  """
677
- Specifies secrets to be retrieved and injected as environment variables prior to
678
- the execution of a step.
752
+ Specifies a timeout for your step.
753
+
754
+ This decorator is useful if this step may hang indefinitely.
755
+
756
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
757
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
758
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
759
+
760
+ Note that all the values specified in parameters are added together so if you specify
761
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
679
762
 
680
763
  Parameters
681
764
  ----------
682
- sources : List[Union[str, Dict[str, Any]]], default: []
683
- List of secret specs, defining how the secrets are to be retrieved
765
+ seconds : int, default 0
766
+ Number of seconds to wait prior to timing out.
767
+ minutes : int, default 0
768
+ Number of minutes to wait prior to timing out.
769
+ hours : int, default 0
770
+ Number of hours to wait prior to timing out.
684
771
  """
685
772
  ...
686
773
 
687
774
  @typing.overload
688
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
775
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
689
776
  ...
690
777
 
691
778
  @typing.overload
692
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
779
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
693
780
  ...
694
781
 
695
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
782
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
696
783
  """
697
- Specifies secrets to be retrieved and injected as environment variables prior to
698
- the execution of a step.
784
+ Specifies a timeout for your step.
699
785
 
700
- Parameters
701
- ----------
702
- sources : List[Union[str, Dict[str, Any]]], default: []
703
- List of secret specs, defining how the secrets are to be retrieved
704
- """
705
- ...
706
-
707
- @typing.overload
708
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
709
- """
710
- Internal decorator to support Fast bakery
711
- """
712
- ...
713
-
714
- @typing.overload
715
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
716
- ...
717
-
718
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
719
- """
720
- Internal decorator to support Fast bakery
721
- """
722
- ...
723
-
724
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
725
- """
726
- Specifies that this step should execute on Kubernetes.
727
-
728
- Parameters
729
- ----------
730
- cpu : int, default 1
731
- Number of CPUs required for this step. If `@resources` is
732
- also present, the maximum value from all decorators is used.
733
- memory : int, default 4096
734
- Memory size (in MB) required for this step. If
735
- `@resources` is also present, the maximum value from all decorators is
736
- used.
737
- disk : int, default 10240
738
- Disk size (in MB) required for this step. If
739
- `@resources` is also present, the maximum value from all decorators is
740
- used.
741
- image : str, optional, default None
742
- Docker image to use when launching on Kubernetes. If not specified, and
743
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
744
- not, a default Docker image mapping to the current version of Python is used.
745
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
746
- If given, the imagePullPolicy to be applied to the Docker image of the step.
747
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
748
- Kubernetes service account to use when launching pod in Kubernetes.
749
- secrets : List[str], optional, default None
750
- Kubernetes secrets to use when launching pod in Kubernetes. These
751
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
752
- in Metaflow configuration.
753
- node_selector: Union[Dict[str,str], str], optional, default None
754
- Kubernetes node selector(s) to apply to the pod running the task.
755
- Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
756
- or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
757
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
758
- Kubernetes namespace to use when launching pod in Kubernetes.
759
- gpu : int, optional, default None
760
- Number of GPUs required for this step. A value of zero implies that
761
- the scheduled node should not have GPUs.
762
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
763
- The vendor of the GPUs to be used for this step.
764
- tolerations : List[str], default []
765
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
766
- Kubernetes tolerations to use when launching pod in Kubernetes.
767
- use_tmpfs : bool, default False
768
- This enables an explicit tmpfs mount for this step.
769
- tmpfs_tempdir : bool, default True
770
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
771
- tmpfs_size : int, optional, default: None
772
- The value for the size (in MiB) of the tmpfs mount for this step.
773
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
774
- memory allocated for this step.
775
- tmpfs_path : str, optional, default /metaflow_temp
776
- Path to tmpfs mount for this step.
777
- persistent_volume_claims : Dict[str, str], optional, default None
778
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
779
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
780
- shared_memory: int, optional
781
- Shared memory size (in MiB) required for this step
782
- port: int, optional
783
- Port number to specify in the Kubernetes job object
784
- compute_pool : str, optional, default None
785
- Compute pool to be used for for this step.
786
- If not specified, any accessible compute pool within the perimeter is used.
787
- """
788
- ...
789
-
790
- @typing.overload
791
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
792
- """
793
- Specifies that the step will success under all circumstances.
794
-
795
- The decorator will create an optional artifact, specified by `var`, which
796
- contains the exception raised. You can use it to detect the presence
797
- of errors, indicating that all happy-path artifacts produced by the step
798
- are missing.
799
-
800
- Parameters
801
- ----------
802
- var : str, optional, default None
803
- Name of the artifact in which to store the caught exception.
804
- If not specified, the exception is not stored.
805
- print_exception : bool, default True
806
- Determines whether or not the exception is printed to
807
- stdout when caught.
808
- """
809
- ...
810
-
811
- @typing.overload
812
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
813
- ...
814
-
815
- @typing.overload
816
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
817
- ...
818
-
819
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
820
- """
821
- Specifies that the step will success under all circumstances.
822
-
823
- The decorator will create an optional artifact, specified by `var`, which
824
- contains the exception raised. You can use it to detect the presence
825
- of errors, indicating that all happy-path artifacts produced by the step
826
- are missing.
827
-
828
- Parameters
829
- ----------
830
- var : str, optional, default None
831
- Name of the artifact in which to store the caught exception.
832
- If not specified, the exception is not stored.
833
- print_exception : bool, default True
834
- Determines whether or not the exception is printed to
835
- stdout when caught.
836
- """
837
- ...
838
-
839
- @typing.overload
840
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
841
- """
842
- Specifies the Conda environment for the step.
843
-
844
- Information in this decorator will augment any
845
- attributes set in the `@conda_base` flow-level decorator. Hence,
846
- you can use `@conda_base` to set packages required by all
847
- steps and use `@conda` to specify step-specific overrides.
786
+ This decorator is useful if this step may hang indefinitely.
848
787
 
849
- Parameters
850
- ----------
851
- packages : Dict[str, str], default {}
852
- Packages to use for this step. The key is the name of the package
853
- and the value is the version to use.
854
- libraries : Dict[str, str], default {}
855
- Supported for backward compatibility. When used with packages, packages will take precedence.
856
- python : str, optional, default None
857
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
858
- that the version used will correspond to the version of the Python interpreter used to start the run.
859
- disabled : bool, default False
860
- If set to True, disables @conda.
861
- """
862
- ...
863
-
864
- @typing.overload
865
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
866
- ...
867
-
868
- @typing.overload
869
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
870
- ...
871
-
872
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
873
- """
874
- Specifies the Conda environment for the step.
788
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
789
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
790
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
875
791
 
876
- Information in this decorator will augment any
877
- attributes set in the `@conda_base` flow-level decorator. Hence,
878
- you can use `@conda_base` to set packages required by all
879
- steps and use `@conda` to specify step-specific overrides.
792
+ Note that all the values specified in parameters are added together so if you specify
793
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
880
794
 
881
795
  Parameters
882
796
  ----------
883
- packages : Dict[str, str], default {}
884
- Packages to use for this step. The key is the name of the package
885
- and the value is the version to use.
886
- libraries : Dict[str, str], default {}
887
- Supported for backward compatibility. When used with packages, packages will take precedence.
888
- python : str, optional, default None
889
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
890
- that the version used will correspond to the version of the Python interpreter used to start the run.
891
- disabled : bool, default False
892
- If set to True, disables @conda.
797
+ seconds : int, default 0
798
+ Number of seconds to wait prior to timing out.
799
+ minutes : int, default 0
800
+ Number of minutes to wait prior to timing out.
801
+ hours : int, default 0
802
+ Number of hours to wait prior to timing out.
893
803
  """
894
804
  ...
895
805
 
@@ -978,79 +888,120 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
978
888
  ...
979
889
 
980
890
  @typing.overload
981
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
891
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
982
892
  """
983
- Specifies the resources needed when executing this step.
984
-
985
- Use `@resources` to specify the resource requirements
986
- independently of the specific compute layer (`@batch`, `@kubernetes`).
893
+ Internal decorator to support Fast bakery
894
+ """
895
+ ...
896
+
897
+ @typing.overload
898
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
899
+ ...
900
+
901
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
902
+ """
903
+ Internal decorator to support Fast bakery
904
+ """
905
+ ...
906
+
907
+ @typing.overload
908
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
909
+ """
910
+ Specifies that the step will success under all circumstances.
987
911
 
988
- You can choose the compute layer on the command line by executing e.g.
989
- ```
990
- python myflow.py run --with batch
991
- ```
992
- or
993
- ```
994
- python myflow.py run --with kubernetes
995
- ```
996
- which executes the flow on the desired system using the
997
- requirements specified in `@resources`.
912
+ The decorator will create an optional artifact, specified by `var`, which
913
+ contains the exception raised. You can use it to detect the presence
914
+ of errors, indicating that all happy-path artifacts produced by the step
915
+ are missing.
998
916
 
999
917
  Parameters
1000
918
  ----------
1001
- cpu : int, default 1
1002
- Number of CPUs required for this step.
1003
- gpu : int, default 0
1004
- Number of GPUs required for this step.
1005
- disk : int, optional, default None
1006
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1007
- memory : int, default 4096
1008
- Memory size (in MB) required for this step.
1009
- shared_memory : int, optional, default None
1010
- The value for the size (in MiB) of the /dev/shm volume for this step.
1011
- This parameter maps to the `--shm-size` option in Docker.
919
+ var : str, optional, default None
920
+ Name of the artifact in which to store the caught exception.
921
+ If not specified, the exception is not stored.
922
+ print_exception : bool, default True
923
+ Determines whether or not the exception is printed to
924
+ stdout when caught.
1012
925
  """
1013
926
  ...
1014
927
 
1015
928
  @typing.overload
1016
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
929
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1017
930
  ...
1018
931
 
1019
932
  @typing.overload
1020
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
933
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1021
934
  ...
1022
935
 
1023
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
936
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1024
937
  """
1025
- Specifies the resources needed when executing this step.
938
+ Specifies that the step will success under all circumstances.
1026
939
 
1027
- Use `@resources` to specify the resource requirements
1028
- independently of the specific compute layer (`@batch`, `@kubernetes`).
940
+ The decorator will create an optional artifact, specified by `var`, which
941
+ contains the exception raised. You can use it to detect the presence
942
+ of errors, indicating that all happy-path artifacts produced by the step
943
+ are missing.
1029
944
 
1030
- You can choose the compute layer on the command line by executing e.g.
1031
- ```
1032
- python myflow.py run --with batch
1033
- ```
1034
- or
1035
- ```
1036
- python myflow.py run --with kubernetes
1037
- ```
1038
- which executes the flow on the desired system using the
1039
- requirements specified in `@resources`.
945
+ Parameters
946
+ ----------
947
+ var : str, optional, default None
948
+ Name of the artifact in which to store the caught exception.
949
+ If not specified, the exception is not stored.
950
+ print_exception : bool, default True
951
+ Determines whether or not the exception is printed to
952
+ stdout when caught.
953
+ """
954
+ ...
955
+
956
+ @typing.overload
957
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
958
+ """
959
+ Decorator prototype for all step decorators. This function gets specialized
960
+ and imported for all decorators types by _import_plugin_decorators().
961
+ """
962
+ ...
963
+
964
+ @typing.overload
965
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
966
+ ...
967
+
968
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
969
+ """
970
+ Decorator prototype for all step decorators. This function gets specialized
971
+ and imported for all decorators types by _import_plugin_decorators().
972
+ """
973
+ ...
974
+
975
+ @typing.overload
976
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
977
+ """
978
+ Specifies secrets to be retrieved and injected as environment variables prior to
979
+ the execution of a step.
1040
980
 
1041
981
  Parameters
1042
982
  ----------
1043
- cpu : int, default 1
1044
- Number of CPUs required for this step.
1045
- gpu : int, default 0
1046
- Number of GPUs required for this step.
1047
- disk : int, optional, default None
1048
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1049
- memory : int, default 4096
1050
- Memory size (in MB) required for this step.
1051
- shared_memory : int, optional, default None
1052
- The value for the size (in MiB) of the /dev/shm volume for this step.
1053
- This parameter maps to the `--shm-size` option in Docker.
983
+ sources : List[Union[str, Dict[str, Any]]], default: []
984
+ List of secret specs, defining how the secrets are to be retrieved
985
+ """
986
+ ...
987
+
988
+ @typing.overload
989
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
990
+ ...
991
+
992
+ @typing.overload
993
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
994
+ ...
995
+
996
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
997
+ """
998
+ Specifies secrets to be retrieved and injected as environment variables prior to
999
+ the execution of a step.
1000
+
1001
+ Parameters
1002
+ ----------
1003
+ sources : List[Union[str, Dict[str, Any]]], default: []
1004
+ List of secret specs, defining how the secrets are to be retrieved
1054
1005
  """
1055
1006
  ...
1056
1007
 
@@ -1115,45 +1066,52 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
1115
1066
  """
1116
1067
  ...
1117
1068
 
1118
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1069
+ @typing.overload
1070
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1119
1071
  """
1120
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1121
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1072
+ Specifies the PyPI packages for the step.
1073
+
1074
+ Information in this decorator will augment any
1075
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1076
+ you can use `@pypi_base` to set packages required by all
1077
+ steps and use `@pypi` to specify step-specific overrides.
1122
1078
 
1123
1079
  Parameters
1124
1080
  ----------
1125
- timeout : int
1126
- Time, in seconds before the task times out and fails. (Default: 3600)
1127
- poke_interval : int
1128
- Time in seconds that the job should wait in between each try. (Default: 60)
1129
- mode : str
1130
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1131
- exponential_backoff : bool
1132
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1133
- pool : str
1134
- the slot pool this task should run in,
1135
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1136
- soft_fail : bool
1137
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1138
- name : str
1139
- Name of the sensor on Airflow
1140
- description : str
1141
- Description of sensor in the Airflow UI
1142
- external_dag_id : str
1143
- The dag_id that contains the task you want to wait for.
1144
- external_task_ids : List[str]
1145
- The list of task_ids that you want to wait for.
1146
- If None (default value) the sensor waits for the DAG. (Default: None)
1147
- allowed_states : List[str]
1148
- Iterable of allowed states, (Default: ['success'])
1149
- failed_states : List[str]
1150
- Iterable of failed or dis-allowed states. (Default: None)
1151
- execution_delta : datetime.timedelta
1152
- time difference with the previous execution to look at,
1153
- the default is the same logical date as the current task or DAG. (Default: None)
1154
- check_existence: bool
1155
- Set to True to check if the external task exists or check if
1156
- the DAG to wait for exists. (Default: True)
1081
+ packages : Dict[str, str], default: {}
1082
+ Packages to use for this step. The key is the name of the package
1083
+ and the value is the version to use.
1084
+ python : str, optional, default: None
1085
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1086
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1087
+ """
1088
+ ...
1089
+
1090
+ @typing.overload
1091
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1092
+ ...
1093
+
1094
+ @typing.overload
1095
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1096
+ ...
1097
+
1098
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1099
+ """
1100
+ Specifies the PyPI packages for the step.
1101
+
1102
+ Information in this decorator will augment any
1103
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1104
+ you can use `@pypi_base` to set packages required by all
1105
+ steps and use `@pypi` to specify step-specific overrides.
1106
+
1107
+ Parameters
1108
+ ----------
1109
+ packages : Dict[str, str], default: {}
1110
+ Packages to use for this step. The key is the name of the package
1111
+ and the value is the version to use.
1112
+ python : str, optional, default: None
1113
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1114
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1157
1115
  """
1158
1116
  ...
1159
1117
 
@@ -1206,6 +1164,48 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1206
1164
  """
1207
1165
  ...
1208
1166
 
1167
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1168
+ """
1169
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1170
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1171
+
1172
+ Parameters
1173
+ ----------
1174
+ timeout : int
1175
+ Time, in seconds before the task times out and fails. (Default: 3600)
1176
+ poke_interval : int
1177
+ Time in seconds that the job should wait in between each try. (Default: 60)
1178
+ mode : str
1179
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1180
+ exponential_backoff : bool
1181
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1182
+ pool : str
1183
+ the slot pool this task should run in,
1184
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1185
+ soft_fail : bool
1186
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1187
+ name : str
1188
+ Name of the sensor on Airflow
1189
+ description : str
1190
+ Description of sensor in the Airflow UI
1191
+ external_dag_id : str
1192
+ The dag_id that contains the task you want to wait for.
1193
+ external_task_ids : List[str]
1194
+ The list of task_ids that you want to wait for.
1195
+ If None (default value) the sensor waits for the DAG. (Default: None)
1196
+ allowed_states : List[str]
1197
+ Iterable of allowed states, (Default: ['success'])
1198
+ failed_states : List[str]
1199
+ Iterable of failed or dis-allowed states. (Default: None)
1200
+ execution_delta : datetime.timedelta
1201
+ time difference with the previous execution to look at,
1202
+ the default is the same logical date as the current task or DAG. (Default: None)
1203
+ check_existence: bool
1204
+ Set to True to check if the external task exists or check if
1205
+ the DAG to wait for exists. (Default: True)
1206
+ """
1207
+ ...
1208
+
1209
1209
  def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1210
1210
  """
1211
1211
  This decorator is used to run NIM containers in Metaflow tasks as sidecars.
@@ -1237,6 +1237,112 @@ def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[Fl
1237
1237
  """
1238
1238
  ...
1239
1239
 
1240
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1241
+ """
1242
+ Specifies what flows belong to the same project.
1243
+
1244
+ A project-specific namespace is created for all flows that
1245
+ use the same `@project(name)`.
1246
+
1247
+ Parameters
1248
+ ----------
1249
+ name : str
1250
+ Project name. Make sure that the name is unique amongst all
1251
+ projects that use the same production scheduler. The name may
1252
+ contain only lowercase alphanumeric characters and underscores.
1253
+
1254
+
1255
+ """
1256
+ ...
1257
+
1258
+ @typing.overload
1259
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1260
+ """
1261
+ Specifies the times when the flow should be run when running on a
1262
+ production scheduler.
1263
+
1264
+ Parameters
1265
+ ----------
1266
+ hourly : bool, default False
1267
+ Run the workflow hourly.
1268
+ daily : bool, default True
1269
+ Run the workflow daily.
1270
+ weekly : bool, default False
1271
+ Run the workflow weekly.
1272
+ cron : str, optional, default None
1273
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1274
+ specified by this expression.
1275
+ timezone : str, optional, default None
1276
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1277
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1278
+ """
1279
+ ...
1280
+
1281
+ @typing.overload
1282
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1283
+ ...
1284
+
1285
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1286
+ """
1287
+ Specifies the times when the flow should be run when running on a
1288
+ production scheduler.
1289
+
1290
+ Parameters
1291
+ ----------
1292
+ hourly : bool, default False
1293
+ Run the workflow hourly.
1294
+ daily : bool, default True
1295
+ Run the workflow daily.
1296
+ weekly : bool, default False
1297
+ Run the workflow weekly.
1298
+ cron : str, optional, default None
1299
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1300
+ specified by this expression.
1301
+ timezone : str, optional, default None
1302
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1303
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1304
+ """
1305
+ ...
1306
+
1307
+ @typing.overload
1308
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1309
+ """
1310
+ Specifies the PyPI packages for all steps of the flow.
1311
+
1312
+ Use `@pypi_base` to set common packages required by all
1313
+ steps and use `@pypi` to specify step-specific overrides.
1314
+ Parameters
1315
+ ----------
1316
+ packages : Dict[str, str], default: {}
1317
+ Packages to use for this flow. The key is the name of the package
1318
+ and the value is the version to use.
1319
+ python : str, optional, default: None
1320
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1321
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1322
+ """
1323
+ ...
1324
+
1325
+ @typing.overload
1326
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1327
+ ...
1328
+
1329
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1330
+ """
1331
+ Specifies the PyPI packages for all steps of the flow.
1332
+
1333
+ Use `@pypi_base` to set common packages required by all
1334
+ steps and use `@pypi` to specify step-specific overrides.
1335
+ Parameters
1336
+ ----------
1337
+ packages : Dict[str, str], default: {}
1338
+ Packages to use for this flow. The key is the name of the package
1339
+ and the value is the version to use.
1340
+ python : str, optional, default: None
1341
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1342
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1343
+ """
1344
+ ...
1345
+
1240
1346
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1241
1347
  """
1242
1348
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1382,112 +1488,6 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1382
1488
  """
1383
1489
  ...
1384
1490
 
1385
- @typing.overload
1386
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1387
- """
1388
- Specifies the times when the flow should be run when running on a
1389
- production scheduler.
1390
-
1391
- Parameters
1392
- ----------
1393
- hourly : bool, default False
1394
- Run the workflow hourly.
1395
- daily : bool, default True
1396
- Run the workflow daily.
1397
- weekly : bool, default False
1398
- Run the workflow weekly.
1399
- cron : str, optional, default None
1400
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1401
- specified by this expression.
1402
- timezone : str, optional, default None
1403
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1404
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1405
- """
1406
- ...
1407
-
1408
- @typing.overload
1409
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1410
- ...
1411
-
1412
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1413
- """
1414
- Specifies the times when the flow should be run when running on a
1415
- production scheduler.
1416
-
1417
- Parameters
1418
- ----------
1419
- hourly : bool, default False
1420
- Run the workflow hourly.
1421
- daily : bool, default True
1422
- Run the workflow daily.
1423
- weekly : bool, default False
1424
- Run the workflow weekly.
1425
- cron : str, optional, default None
1426
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1427
- specified by this expression.
1428
- timezone : str, optional, default None
1429
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1430
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1431
- """
1432
- ...
1433
-
1434
- @typing.overload
1435
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1436
- """
1437
- Specifies the PyPI packages for all steps of the flow.
1438
-
1439
- Use `@pypi_base` to set common packages required by all
1440
- steps and use `@pypi` to specify step-specific overrides.
1441
- Parameters
1442
- ----------
1443
- packages : Dict[str, str], default: {}
1444
- Packages to use for this flow. The key is the name of the package
1445
- and the value is the version to use.
1446
- python : str, optional, default: None
1447
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1448
- that the version used will correspond to the version of the Python interpreter used to start the run.
1449
- """
1450
- ...
1451
-
1452
- @typing.overload
1453
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1454
- ...
1455
-
1456
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1457
- """
1458
- Specifies the PyPI packages for all steps of the flow.
1459
-
1460
- Use `@pypi_base` to set common packages required by all
1461
- steps and use `@pypi` to specify step-specific overrides.
1462
- Parameters
1463
- ----------
1464
- packages : Dict[str, str], default: {}
1465
- Packages to use for this flow. The key is the name of the package
1466
- and the value is the version to use.
1467
- python : str, optional, default: None
1468
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1469
- that the version used will correspond to the version of the Python interpreter used to start the run.
1470
- """
1471
- ...
1472
-
1473
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1474
- """
1475
- Specifies what flows belong to the same project.
1476
-
1477
- A project-specific namespace is created for all flows that
1478
- use the same `@project(name)`.
1479
-
1480
- Parameters
1481
- ----------
1482
- name : str
1483
- Project name. Make sure that the name is unique amongst all
1484
- projects that use the same production scheduler. The name may
1485
- contain only lowercase alphanumeric characters and underscores.
1486
-
1487
-
1488
- """
1489
- ...
1490
-
1491
1491
  @typing.overload
1492
1492
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1493
1493
  """