metaflow-stubs 2.15.6__py2.py3-none-any.whl → 2.15.8__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (149) hide show
  1. metaflow-stubs/__init__.pyi +449 -448
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +4 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +6 -6
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +4 -2
  20. metaflow-stubs/metaflow_current.pyi +23 -23
  21. metaflow-stubs/metaflow_git.pyi +26 -0
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/parameters.pyi +4 -4
  24. metaflow-stubs/plugins/__init__.pyi +13 -12
  25. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  33. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  36. metaflow-stubs/plugins/argo/argo_workflows.pyi +9 -3
  37. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  39. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  40. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  41. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  42. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  46. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  49. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  58. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  62. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  63. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  64. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  65. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  67. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  72. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  78. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  80. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  84. metaflow-stubs/plugins/datatools/s3/s3.pyi +7 -5
  85. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  86. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  87. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  88. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  89. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  92. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  95. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  99. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  107. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  109. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  110. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  112. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  116. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  119. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  120. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  122. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  124. metaflow-stubs/plugins/uv/__init__.pyi +11 -0
  125. metaflow-stubs/plugins/uv/uv_environment.pyi +36 -0
  126. metaflow-stubs/pylint_wrapper.pyi +2 -2
  127. metaflow-stubs/runner/__init__.pyi +2 -2
  128. metaflow-stubs/runner/deployer.pyi +29 -29
  129. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  130. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  131. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  132. metaflow-stubs/runner/nbrun.pyi +2 -2
  133. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  134. metaflow-stubs/runner/utils.pyi +4 -4
  135. metaflow-stubs/system/__init__.pyi +2 -2
  136. metaflow-stubs/system/system_logger.pyi +2 -2
  137. metaflow-stubs/system/system_monitor.pyi +2 -2
  138. metaflow-stubs/tagging_util.pyi +2 -2
  139. metaflow-stubs/tuple_util.pyi +2 -2
  140. metaflow-stubs/user_configs/__init__.pyi +2 -2
  141. metaflow-stubs/user_configs/config_decorators.pyi +6 -6
  142. metaflow-stubs/user_configs/config_options.pyi +3 -3
  143. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  144. metaflow-stubs/version.pyi +2 -2
  145. {metaflow_stubs-2.15.6.dist-info → metaflow_stubs-2.15.8.dist-info}/METADATA +3 -3
  146. metaflow_stubs-2.15.8.dist-info/RECORD +149 -0
  147. {metaflow_stubs-2.15.6.dist-info → metaflow_stubs-2.15.8.dist-info}/WHEEL +1 -1
  148. metaflow_stubs-2.15.6.dist-info/RECORD +0 -146
  149. {metaflow_stubs-2.15.6.dist-info → metaflow_stubs-2.15.8.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.15.6 #
4
- # Generated on 2025-03-17T21:08:12.538673 #
3
+ # MF version: 2.15.8 #
4
+ # Generated on 2025-04-17T19:00:19.966511 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -36,15 +36,16 @@ from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
38
  from . import tuple_util as tuple_util
39
+ from . import metaflow_git as metaflow_git
39
40
  from . import events as events
40
41
  from . import runner as runner
41
42
  from . import plugins as plugins
42
43
  from .plugins.datatools.s3.s3 import S3 as S3
43
44
  from . import includefile as includefile
44
45
  from .includefile import IncludeFile as IncludeFile
45
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
46
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
47
46
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
47
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
48
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
48
49
  from . import cards as cards
49
50
  from . import client as client
50
51
  from .client.core import namespace as namespace
@@ -145,6 +146,57 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
145
146
  """
146
147
  ...
147
148
 
149
+ @typing.overload
150
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
151
+ """
152
+ Specifies that the step will success under all circumstances.
153
+
154
+ The decorator will create an optional artifact, specified by `var`, which
155
+ contains the exception raised. You can use it to detect the presence
156
+ of errors, indicating that all happy-path artifacts produced by the step
157
+ are missing.
158
+
159
+
160
+ Parameters
161
+ ----------
162
+ var : str, optional, default None
163
+ Name of the artifact in which to store the caught exception.
164
+ If not specified, the exception is not stored.
165
+ print_exception : bool, default True
166
+ Determines whether or not the exception is printed to
167
+ stdout when caught.
168
+ """
169
+ ...
170
+
171
+ @typing.overload
172
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
173
+ ...
174
+
175
+ @typing.overload
176
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
177
+ ...
178
+
179
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
180
+ """
181
+ Specifies that the step will success under all circumstances.
182
+
183
+ The decorator will create an optional artifact, specified by `var`, which
184
+ contains the exception raised. You can use it to detect the presence
185
+ of errors, indicating that all happy-path artifacts produced by the step
186
+ are missing.
187
+
188
+
189
+ Parameters
190
+ ----------
191
+ var : str, optional, default None
192
+ Name of the artifact in which to store the caught exception.
193
+ If not specified, the exception is not stored.
194
+ print_exception : bool, default True
195
+ Determines whether or not the exception is printed to
196
+ stdout when caught.
197
+ """
198
+ ...
199
+
148
200
  @typing.overload
149
201
  def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
150
202
  """
@@ -349,6 +401,142 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
349
401
  """
350
402
  ...
351
403
 
404
+ @typing.overload
405
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
406
+ """
407
+ Specifies the Conda environment for the step.
408
+
409
+ Information in this decorator will augment any
410
+ attributes set in the `@conda_base` flow-level decorator. Hence,
411
+ you can use `@conda_base` to set packages required by all
412
+ steps and use `@conda` to specify step-specific overrides.
413
+
414
+
415
+ Parameters
416
+ ----------
417
+ packages : Dict[str, str], default {}
418
+ Packages to use for this step. The key is the name of the package
419
+ and the value is the version to use.
420
+ libraries : Dict[str, str], default {}
421
+ Supported for backward compatibility. When used with packages, packages will take precedence.
422
+ python : str, optional, default None
423
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
424
+ that the version used will correspond to the version of the Python interpreter used to start the run.
425
+ disabled : bool, default False
426
+ If set to True, disables @conda.
427
+ """
428
+ ...
429
+
430
+ @typing.overload
431
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
432
+ ...
433
+
434
+ @typing.overload
435
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
436
+ ...
437
+
438
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
439
+ """
440
+ Specifies the Conda environment for the step.
441
+
442
+ Information in this decorator will augment any
443
+ attributes set in the `@conda_base` flow-level decorator. Hence,
444
+ you can use `@conda_base` to set packages required by all
445
+ steps and use `@conda` to specify step-specific overrides.
446
+
447
+
448
+ Parameters
449
+ ----------
450
+ packages : Dict[str, str], default {}
451
+ Packages to use for this step. The key is the name of the package
452
+ and the value is the version to use.
453
+ libraries : Dict[str, str], default {}
454
+ Supported for backward compatibility. When used with packages, packages will take precedence.
455
+ python : str, optional, default None
456
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
457
+ that the version used will correspond to the version of the Python interpreter used to start the run.
458
+ disabled : bool, default False
459
+ If set to True, disables @conda.
460
+ """
461
+ ...
462
+
463
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
464
+ """
465
+ Specifies that this step should execute on Kubernetes.
466
+
467
+
468
+ Parameters
469
+ ----------
470
+ cpu : int, default 1
471
+ Number of CPUs required for this step. If `@resources` is
472
+ also present, the maximum value from all decorators is used.
473
+ memory : int, default 4096
474
+ Memory size (in MB) required for this step. If
475
+ `@resources` is also present, the maximum value from all decorators is
476
+ used.
477
+ disk : int, default 10240
478
+ Disk size (in MB) required for this step. If
479
+ `@resources` is also present, the maximum value from all decorators is
480
+ used.
481
+ image : str, optional, default None
482
+ Docker image to use when launching on Kubernetes. If not specified, and
483
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
484
+ not, a default Docker image mapping to the current version of Python is used.
485
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
486
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
487
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
488
+ Kubernetes service account to use when launching pod in Kubernetes.
489
+ secrets : List[str], optional, default None
490
+ Kubernetes secrets to use when launching pod in Kubernetes. These
491
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
492
+ in Metaflow configuration.
493
+ node_selector: Union[Dict[str,str], str], optional, default None
494
+ Kubernetes node selector(s) to apply to the pod running the task.
495
+ Can be passed in as a comma separated string of values e.g.
496
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
497
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
498
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
499
+ Kubernetes namespace to use when launching pod in Kubernetes.
500
+ gpu : int, optional, default None
501
+ Number of GPUs required for this step. A value of zero implies that
502
+ the scheduled node should not have GPUs.
503
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
504
+ The vendor of the GPUs to be used for this step.
505
+ tolerations : List[str], default []
506
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
507
+ Kubernetes tolerations to use when launching pod in Kubernetes.
508
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
509
+ Kubernetes labels to use when launching pod in Kubernetes.
510
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
511
+ Kubernetes annotations to use when launching pod in Kubernetes.
512
+ use_tmpfs : bool, default False
513
+ This enables an explicit tmpfs mount for this step.
514
+ tmpfs_tempdir : bool, default True
515
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
516
+ tmpfs_size : int, optional, default: None
517
+ The value for the size (in MiB) of the tmpfs mount for this step.
518
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
519
+ memory allocated for this step.
520
+ tmpfs_path : str, optional, default /metaflow_temp
521
+ Path to tmpfs mount for this step.
522
+ persistent_volume_claims : Dict[str, str], optional, default None
523
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
524
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
525
+ shared_memory: int, optional
526
+ Shared memory size (in MiB) required for this step
527
+ port: int, optional
528
+ Port number to specify in the Kubernetes job object
529
+ compute_pool : str, optional, default None
530
+ Compute pool to be used for for this step.
531
+ If not specified, any accessible compute pool within the perimeter is used.
532
+ hostname_resolution_timeout: int, default 10 * 60
533
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
534
+ Only applicable when @parallel is used.
535
+ qos: str, default: Burstable
536
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
537
+ """
538
+ ...
539
+
352
540
  @typing.overload
353
541
  def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
354
542
  """
@@ -408,57 +596,6 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
408
596
  """
409
597
  ...
410
598
 
411
- @typing.overload
412
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
413
- """
414
- Specifies the PyPI packages for the step.
415
-
416
- Information in this decorator will augment any
417
- attributes set in the `@pyi_base` flow-level decorator. Hence,
418
- you can use `@pypi_base` to set packages required by all
419
- steps and use `@pypi` to specify step-specific overrides.
420
-
421
-
422
- Parameters
423
- ----------
424
- packages : Dict[str, str], default: {}
425
- Packages to use for this step. The key is the name of the package
426
- and the value is the version to use.
427
- python : str, optional, default: None
428
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
429
- that the version used will correspond to the version of the Python interpreter used to start the run.
430
- """
431
- ...
432
-
433
- @typing.overload
434
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
435
- ...
436
-
437
- @typing.overload
438
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
439
- ...
440
-
441
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
442
- """
443
- Specifies the PyPI packages for the step.
444
-
445
- Information in this decorator will augment any
446
- attributes set in the `@pyi_base` flow-level decorator. Hence,
447
- you can use `@pypi_base` to set packages required by all
448
- steps and use `@pypi` to specify step-specific overrides.
449
-
450
-
451
- Parameters
452
- ----------
453
- packages : Dict[str, str], default: {}
454
- Packages to use for this step. The key is the name of the package
455
- and the value is the version to use.
456
- python : str, optional, default: None
457
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
458
- that the version used will correspond to the version of the Python interpreter used to start the run.
459
- """
460
- ...
461
-
462
599
  @typing.overload
463
600
  def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
464
601
  """
@@ -495,142 +632,11 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
495
632
  ...
496
633
 
497
634
  @typing.overload
498
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
635
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
499
636
  """
500
- Specifies environment variables to be set prior to the execution of a step.
501
-
502
-
503
- Parameters
504
- ----------
505
- vars : Dict[str, str], default {}
506
- Dictionary of environment variables to set.
507
- """
508
- ...
509
-
510
- @typing.overload
511
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
512
- ...
513
-
514
- @typing.overload
515
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
516
- ...
517
-
518
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
519
- """
520
- Specifies environment variables to be set prior to the execution of a step.
521
-
522
-
523
- Parameters
524
- ----------
525
- vars : Dict[str, str], default {}
526
- Dictionary of environment variables to set.
527
- """
528
- ...
529
-
530
- @typing.overload
531
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
532
- """
533
- Decorator prototype for all step decorators. This function gets specialized
534
- and imported for all decorators types by _import_plugin_decorators().
535
- """
536
- ...
537
-
538
- @typing.overload
539
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
540
- ...
541
-
542
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
543
- """
544
- Decorator prototype for all step decorators. This function gets specialized
545
- and imported for all decorators types by _import_plugin_decorators().
546
- """
547
- ...
548
-
549
- @typing.overload
550
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
551
- """
552
- Specifies the resources needed when executing this step.
553
-
554
- Use `@resources` to specify the resource requirements
555
- independently of the specific compute layer (`@batch`, `@kubernetes`).
556
-
557
- You can choose the compute layer on the command line by executing e.g.
558
- ```
559
- python myflow.py run --with batch
560
- ```
561
- or
562
- ```
563
- python myflow.py run --with kubernetes
564
- ```
565
- which executes the flow on the desired system using the
566
- requirements specified in `@resources`.
567
-
568
-
569
- Parameters
570
- ----------
571
- cpu : int, default 1
572
- Number of CPUs required for this step.
573
- gpu : int, optional, default None
574
- Number of GPUs required for this step.
575
- disk : int, optional, default None
576
- Disk size (in MB) required for this step. Only applies on Kubernetes.
577
- memory : int, default 4096
578
- Memory size (in MB) required for this step.
579
- shared_memory : int, optional, default None
580
- The value for the size (in MiB) of the /dev/shm volume for this step.
581
- This parameter maps to the `--shm-size` option in Docker.
582
- """
583
- ...
584
-
585
- @typing.overload
586
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
587
- ...
588
-
589
- @typing.overload
590
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
591
- ...
592
-
593
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
594
- """
595
- Specifies the resources needed when executing this step.
596
-
597
- Use `@resources` to specify the resource requirements
598
- independently of the specific compute layer (`@batch`, `@kubernetes`).
599
-
600
- You can choose the compute layer on the command line by executing e.g.
601
- ```
602
- python myflow.py run --with batch
603
- ```
604
- or
605
- ```
606
- python myflow.py run --with kubernetes
607
- ```
608
- which executes the flow on the desired system using the
609
- requirements specified in `@resources`.
610
-
611
-
612
- Parameters
613
- ----------
614
- cpu : int, default 1
615
- Number of CPUs required for this step.
616
- gpu : int, optional, default None
617
- Number of GPUs required for this step.
618
- disk : int, optional, default None
619
- Disk size (in MB) required for this step. Only applies on Kubernetes.
620
- memory : int, default 4096
621
- Memory size (in MB) required for this step.
622
- shared_memory : int, optional, default None
623
- The value for the size (in MiB) of the /dev/shm volume for this step.
624
- This parameter maps to the `--shm-size` option in Docker.
625
- """
626
- ...
627
-
628
- @typing.overload
629
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
630
- """
631
- Creates a human-readable report, a Metaflow Card, after this step completes.
632
-
633
- Note that you may add multiple `@card` decorators in a step with different parameters.
637
+ Creates a human-readable report, a Metaflow Card, after this step completes.
638
+
639
+ Note that you may add multiple `@card` decorators in a step with different parameters.
634
640
 
635
641
 
636
642
  Parameters
@@ -675,232 +681,184 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
675
681
  ...
676
682
 
677
683
  @typing.overload
678
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
684
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
679
685
  """
680
- Specifies the Conda environment for the step.
686
+ Decorator prototype for all step decorators. This function gets specialized
687
+ and imported for all decorators types by _import_plugin_decorators().
688
+ """
689
+ ...
690
+
691
+ @typing.overload
692
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
693
+ ...
694
+
695
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
696
+ """
697
+ Decorator prototype for all step decorators. This function gets specialized
698
+ and imported for all decorators types by _import_plugin_decorators().
699
+ """
700
+ ...
701
+
702
+ @typing.overload
703
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
704
+ """
705
+ Specifies the PyPI packages for the step.
681
706
 
682
707
  Information in this decorator will augment any
683
- attributes set in the `@conda_base` flow-level decorator. Hence,
684
- you can use `@conda_base` to set packages required by all
685
- steps and use `@conda` to specify step-specific overrides.
708
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
709
+ you can use `@pypi_base` to set packages required by all
710
+ steps and use `@pypi` to specify step-specific overrides.
686
711
 
687
712
 
688
713
  Parameters
689
714
  ----------
690
- packages : Dict[str, str], default {}
715
+ packages : Dict[str, str], default: {}
691
716
  Packages to use for this step. The key is the name of the package
692
717
  and the value is the version to use.
693
- libraries : Dict[str, str], default {}
694
- Supported for backward compatibility. When used with packages, packages will take precedence.
695
- python : str, optional, default None
718
+ python : str, optional, default: None
696
719
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
697
720
  that the version used will correspond to the version of the Python interpreter used to start the run.
698
- disabled : bool, default False
699
- If set to True, disables @conda.
700
721
  """
701
722
  ...
702
723
 
703
724
  @typing.overload
704
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
725
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
705
726
  ...
706
727
 
707
728
  @typing.overload
708
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
729
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
709
730
  ...
710
731
 
711
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
732
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
712
733
  """
713
- Specifies the Conda environment for the step.
734
+ Specifies the PyPI packages for the step.
714
735
 
715
736
  Information in this decorator will augment any
716
- attributes set in the `@conda_base` flow-level decorator. Hence,
717
- you can use `@conda_base` to set packages required by all
718
- steps and use `@conda` to specify step-specific overrides.
737
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
738
+ you can use `@pypi_base` to set packages required by all
739
+ steps and use `@pypi` to specify step-specific overrides.
719
740
 
720
741
 
721
742
  Parameters
722
743
  ----------
723
- packages : Dict[str, str], default {}
744
+ packages : Dict[str, str], default: {}
724
745
  Packages to use for this step. The key is the name of the package
725
746
  and the value is the version to use.
726
- libraries : Dict[str, str], default {}
727
- Supported for backward compatibility. When used with packages, packages will take precedence.
728
- python : str, optional, default None
747
+ python : str, optional, default: None
729
748
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
730
749
  that the version used will correspond to the version of the Python interpreter used to start the run.
731
- disabled : bool, default False
732
- If set to True, disables @conda.
733
750
  """
734
751
  ...
735
752
 
736
753
  @typing.overload
737
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
754
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
738
755
  """
739
- Specifies that the step will success under all circumstances.
740
-
741
- The decorator will create an optional artifact, specified by `var`, which
742
- contains the exception raised. You can use it to detect the presence
743
- of errors, indicating that all happy-path artifacts produced by the step
744
- are missing.
756
+ Specifies environment variables to be set prior to the execution of a step.
745
757
 
746
758
 
747
759
  Parameters
748
760
  ----------
749
- var : str, optional, default None
750
- Name of the artifact in which to store the caught exception.
751
- If not specified, the exception is not stored.
752
- print_exception : bool, default True
753
- Determines whether or not the exception is printed to
754
- stdout when caught.
761
+ vars : Dict[str, str], default {}
762
+ Dictionary of environment variables to set.
755
763
  """
756
764
  ...
757
765
 
758
766
  @typing.overload
759
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
767
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
760
768
  ...
761
769
 
762
770
  @typing.overload
763
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
771
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
764
772
  ...
765
773
 
766
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
774
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
767
775
  """
768
- Specifies that the step will success under all circumstances.
769
-
770
- The decorator will create an optional artifact, specified by `var`, which
771
- contains the exception raised. You can use it to detect the presence
772
- of errors, indicating that all happy-path artifacts produced by the step
773
- are missing.
776
+ Specifies environment variables to be set prior to the execution of a step.
774
777
 
775
778
 
776
779
  Parameters
777
780
  ----------
778
- var : str, optional, default None
779
- Name of the artifact in which to store the caught exception.
780
- If not specified, the exception is not stored.
781
- print_exception : bool, default True
782
- Determines whether or not the exception is printed to
783
- stdout when caught.
781
+ vars : Dict[str, str], default {}
782
+ Dictionary of environment variables to set.
784
783
  """
785
784
  ...
786
785
 
787
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
786
+ @typing.overload
787
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
788
788
  """
789
- Specifies that this step should execute on Kubernetes.
789
+ Specifies the resources needed when executing this step.
790
+
791
+ Use `@resources` to specify the resource requirements
792
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
793
+
794
+ You can choose the compute layer on the command line by executing e.g.
795
+ ```
796
+ python myflow.py run --with batch
797
+ ```
798
+ or
799
+ ```
800
+ python myflow.py run --with kubernetes
801
+ ```
802
+ which executes the flow on the desired system using the
803
+ requirements specified in `@resources`.
790
804
 
791
805
 
792
806
  Parameters
793
807
  ----------
794
808
  cpu : int, default 1
795
- Number of CPUs required for this step. If `@resources` is
796
- also present, the maximum value from all decorators is used.
797
- memory : int, default 4096
798
- Memory size (in MB) required for this step. If
799
- `@resources` is also present, the maximum value from all decorators is
800
- used.
801
- disk : int, default 10240
802
- Disk size (in MB) required for this step. If
803
- `@resources` is also present, the maximum value from all decorators is
804
- used.
805
- image : str, optional, default None
806
- Docker image to use when launching on Kubernetes. If not specified, and
807
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
808
- not, a default Docker image mapping to the current version of Python is used.
809
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
810
- If given, the imagePullPolicy to be applied to the Docker image of the step.
811
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
812
- Kubernetes service account to use when launching pod in Kubernetes.
813
- secrets : List[str], optional, default None
814
- Kubernetes secrets to use when launching pod in Kubernetes. These
815
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
816
- in Metaflow configuration.
817
- node_selector: Union[Dict[str,str], str], optional, default None
818
- Kubernetes node selector(s) to apply to the pod running the task.
819
- Can be passed in as a comma separated string of values e.g.
820
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
821
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
822
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
823
- Kubernetes namespace to use when launching pod in Kubernetes.
809
+ Number of CPUs required for this step.
824
810
  gpu : int, optional, default None
825
- Number of GPUs required for this step. A value of zero implies that
826
- the scheduled node should not have GPUs.
827
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
828
- The vendor of the GPUs to be used for this step.
829
- tolerations : List[str], default []
830
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
831
- Kubernetes tolerations to use when launching pod in Kubernetes.
832
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
833
- Kubernetes labels to use when launching pod in Kubernetes.
834
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
835
- Kubernetes annotations to use when launching pod in Kubernetes.
836
- use_tmpfs : bool, default False
837
- This enables an explicit tmpfs mount for this step.
838
- tmpfs_tempdir : bool, default True
839
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
840
- tmpfs_size : int, optional, default: None
841
- The value for the size (in MiB) of the tmpfs mount for this step.
842
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
843
- memory allocated for this step.
844
- tmpfs_path : str, optional, default /metaflow_temp
845
- Path to tmpfs mount for this step.
846
- persistent_volume_claims : Dict[str, str], optional, default None
847
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
848
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
849
- shared_memory: int, optional
850
- Shared memory size (in MiB) required for this step
851
- port: int, optional
852
- Port number to specify in the Kubernetes job object
853
- compute_pool : str, optional, default None
854
- Compute pool to be used for for this step.
855
- If not specified, any accessible compute pool within the perimeter is used.
856
- hostname_resolution_timeout: int, default 10 * 60
857
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
858
- Only applicable when @parallel is used.
859
- qos: str, default: Burstable
860
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
811
+ Number of GPUs required for this step.
812
+ disk : int, optional, default None
813
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
814
+ memory : int, default 4096
815
+ Memory size (in MB) required for this step.
816
+ shared_memory : int, optional, default None
817
+ The value for the size (in MiB) of the /dev/shm volume for this step.
818
+ This parameter maps to the `--shm-size` option in Docker.
861
819
  """
862
820
  ...
863
821
 
864
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
822
+ @typing.overload
823
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
824
+ ...
825
+
826
+ @typing.overload
827
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
828
+ ...
829
+
830
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
865
831
  """
866
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
867
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
832
+ Specifies the resources needed when executing this step.
833
+
834
+ Use `@resources` to specify the resource requirements
835
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
836
+
837
+ You can choose the compute layer on the command line by executing e.g.
838
+ ```
839
+ python myflow.py run --with batch
840
+ ```
841
+ or
842
+ ```
843
+ python myflow.py run --with kubernetes
844
+ ```
845
+ which executes the flow on the desired system using the
846
+ requirements specified in `@resources`.
868
847
 
869
848
 
870
849
  Parameters
871
850
  ----------
872
- timeout : int
873
- Time, in seconds before the task times out and fails. (Default: 3600)
874
- poke_interval : int
875
- Time in seconds that the job should wait in between each try. (Default: 60)
876
- mode : str
877
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
878
- exponential_backoff : bool
879
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
880
- pool : str
881
- the slot pool this task should run in,
882
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
883
- soft_fail : bool
884
- Set to true to mark the task as SKIPPED on failure. (Default: False)
885
- name : str
886
- Name of the sensor on Airflow
887
- description : str
888
- Description of sensor in the Airflow UI
889
- external_dag_id : str
890
- The dag_id that contains the task you want to wait for.
891
- external_task_ids : List[str]
892
- The list of task_ids that you want to wait for.
893
- If None (default value) the sensor waits for the DAG. (Default: None)
894
- allowed_states : List[str]
895
- Iterable of allowed states, (Default: ['success'])
896
- failed_states : List[str]
897
- Iterable of failed or dis-allowed states. (Default: None)
898
- execution_delta : datetime.timedelta
899
- time difference with the previous execution to look at,
900
- the default is the same logical date as the current task or DAG. (Default: None)
901
- check_existence: bool
902
- Set to True to check if the external task exists or check if
903
- the DAG to wait for exists. (Default: True)
851
+ cpu : int, default 1
852
+ Number of CPUs required for this step.
853
+ gpu : int, optional, default None
854
+ Number of GPUs required for this step.
855
+ disk : int, optional, default None
856
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
857
+ memory : int, default 4096
858
+ Memory size (in MB) required for this step.
859
+ shared_memory : int, optional, default None
860
+ The value for the size (in MiB) of the /dev/shm volume for this step.
861
+ This parameter maps to the `--shm-size` option in Docker.
904
862
  """
905
863
  ...
906
864
 
@@ -940,94 +898,53 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
940
898
  ...
941
899
 
942
900
  @typing.overload
943
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
944
- """
945
- Specifies the PyPI packages for all steps of the flow.
946
-
947
- Use `@pypi_base` to set common packages required by all
948
- steps and use `@pypi` to specify step-specific overrides.
949
-
950
- Parameters
951
- ----------
952
- packages : Dict[str, str], default: {}
953
- Packages to use for this flow. The key is the name of the package
954
- and the value is the version to use.
955
- python : str, optional, default: None
956
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
957
- that the version used will correspond to the version of the Python interpreter used to start the run.
958
- """
959
- ...
960
-
961
- @typing.overload
962
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
963
- ...
964
-
965
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
966
- """
967
- Specifies the PyPI packages for all steps of the flow.
968
-
969
- Use `@pypi_base` to set common packages required by all
970
- steps and use `@pypi` to specify step-specific overrides.
971
-
972
- Parameters
973
- ----------
974
- packages : Dict[str, str], default: {}
975
- Packages to use for this flow. The key is the name of the package
976
- and the value is the version to use.
977
- python : str, optional, default: None
978
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
979
- that the version used will correspond to the version of the Python interpreter used to start the run.
980
- """
981
- ...
982
-
983
- @typing.overload
984
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
901
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
985
902
  """
986
- Specifies the Conda environment for all steps of the flow.
987
-
988
- Use `@conda_base` to set common libraries required by all
989
- steps and use `@conda` to specify step-specific additions.
903
+ Specifies the times when the flow should be run when running on a
904
+ production scheduler.
990
905
 
991
906
 
992
907
  Parameters
993
908
  ----------
994
- packages : Dict[str, str], default {}
995
- Packages to use for this flow. The key is the name of the package
996
- and the value is the version to use.
997
- libraries : Dict[str, str], default {}
998
- Supported for backward compatibility. When used with packages, packages will take precedence.
999
- python : str, optional, default None
1000
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1001
- that the version used will correspond to the version of the Python interpreter used to start the run.
1002
- disabled : bool, default False
1003
- If set to True, disables Conda.
909
+ hourly : bool, default False
910
+ Run the workflow hourly.
911
+ daily : bool, default True
912
+ Run the workflow daily.
913
+ weekly : bool, default False
914
+ Run the workflow weekly.
915
+ cron : str, optional, default None
916
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
917
+ specified by this expression.
918
+ timezone : str, optional, default None
919
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
920
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1004
921
  """
1005
922
  ...
1006
923
 
1007
924
  @typing.overload
1008
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
925
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1009
926
  ...
1010
927
 
1011
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
928
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1012
929
  """
1013
- Specifies the Conda environment for all steps of the flow.
1014
-
1015
- Use `@conda_base` to set common libraries required by all
1016
- steps and use `@conda` to specify step-specific additions.
930
+ Specifies the times when the flow should be run when running on a
931
+ production scheduler.
1017
932
 
1018
933
 
1019
934
  Parameters
1020
935
  ----------
1021
- packages : Dict[str, str], default {}
1022
- Packages to use for this flow. The key is the name of the package
1023
- and the value is the version to use.
1024
- libraries : Dict[str, str], default {}
1025
- Supported for backward compatibility. When used with packages, packages will take precedence.
1026
- python : str, optional, default None
1027
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1028
- that the version used will correspond to the version of the Python interpreter used to start the run.
1029
- disabled : bool, default False
1030
- If set to True, disables Conda.
936
+ hourly : bool, default False
937
+ Run the workflow hourly.
938
+ daily : bool, default True
939
+ Run the workflow daily.
940
+ weekly : bool, default False
941
+ Run the workflow weekly.
942
+ cron : str, optional, default None
943
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
944
+ specified by this expression.
945
+ timezone : str, optional, default None
946
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
947
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1031
948
  """
1032
949
  ...
1033
950
 
@@ -1124,6 +1041,57 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1124
1041
  """
1125
1042
  ...
1126
1043
 
1044
+ @typing.overload
1045
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1046
+ """
1047
+ Specifies the Conda environment for all steps of the flow.
1048
+
1049
+ Use `@conda_base` to set common libraries required by all
1050
+ steps and use `@conda` to specify step-specific additions.
1051
+
1052
+
1053
+ Parameters
1054
+ ----------
1055
+ packages : Dict[str, str], default {}
1056
+ Packages to use for this flow. The key is the name of the package
1057
+ and the value is the version to use.
1058
+ libraries : Dict[str, str], default {}
1059
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1060
+ python : str, optional, default None
1061
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1062
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1063
+ disabled : bool, default False
1064
+ If set to True, disables Conda.
1065
+ """
1066
+ ...
1067
+
1068
+ @typing.overload
1069
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1070
+ ...
1071
+
1072
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1073
+ """
1074
+ Specifies the Conda environment for all steps of the flow.
1075
+
1076
+ Use `@conda_base` to set common libraries required by all
1077
+ steps and use `@conda` to specify step-specific additions.
1078
+
1079
+
1080
+ Parameters
1081
+ ----------
1082
+ packages : Dict[str, str], default {}
1083
+ Packages to use for this flow. The key is the name of the package
1084
+ and the value is the version to use.
1085
+ libraries : Dict[str, str], default {}
1086
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1087
+ python : str, optional, default None
1088
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1089
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1090
+ disabled : bool, default False
1091
+ If set to True, disables Conda.
1092
+ """
1093
+ ...
1094
+
1127
1095
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1128
1096
  """
1129
1097
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1268,54 +1236,87 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1268
1236
  """
1269
1237
  ...
1270
1238
 
1239
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1240
+ """
1241
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1242
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1243
+
1244
+
1245
+ Parameters
1246
+ ----------
1247
+ timeout : int
1248
+ Time, in seconds before the task times out and fails. (Default: 3600)
1249
+ poke_interval : int
1250
+ Time in seconds that the job should wait in between each try. (Default: 60)
1251
+ mode : str
1252
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1253
+ exponential_backoff : bool
1254
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1255
+ pool : str
1256
+ the slot pool this task should run in,
1257
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1258
+ soft_fail : bool
1259
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1260
+ name : str
1261
+ Name of the sensor on Airflow
1262
+ description : str
1263
+ Description of sensor in the Airflow UI
1264
+ external_dag_id : str
1265
+ The dag_id that contains the task you want to wait for.
1266
+ external_task_ids : List[str]
1267
+ The list of task_ids that you want to wait for.
1268
+ If None (default value) the sensor waits for the DAG. (Default: None)
1269
+ allowed_states : List[str]
1270
+ Iterable of allowed states, (Default: ['success'])
1271
+ failed_states : List[str]
1272
+ Iterable of failed or dis-allowed states. (Default: None)
1273
+ execution_delta : datetime.timedelta
1274
+ time difference with the previous execution to look at,
1275
+ the default is the same logical date as the current task or DAG. (Default: None)
1276
+ check_existence: bool
1277
+ Set to True to check if the external task exists or check if
1278
+ the DAG to wait for exists. (Default: True)
1279
+ """
1280
+ ...
1281
+
1271
1282
  @typing.overload
1272
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1283
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1273
1284
  """
1274
- Specifies the times when the flow should be run when running on a
1275
- production scheduler.
1285
+ Specifies the PyPI packages for all steps of the flow.
1276
1286
 
1287
+ Use `@pypi_base` to set common packages required by all
1288
+ steps and use `@pypi` to specify step-specific overrides.
1277
1289
 
1278
1290
  Parameters
1279
1291
  ----------
1280
- hourly : bool, default False
1281
- Run the workflow hourly.
1282
- daily : bool, default True
1283
- Run the workflow daily.
1284
- weekly : bool, default False
1285
- Run the workflow weekly.
1286
- cron : str, optional, default None
1287
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1288
- specified by this expression.
1289
- timezone : str, optional, default None
1290
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1291
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1292
+ packages : Dict[str, str], default: {}
1293
+ Packages to use for this flow. The key is the name of the package
1294
+ and the value is the version to use.
1295
+ python : str, optional, default: None
1296
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1297
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1292
1298
  """
1293
1299
  ...
1294
1300
 
1295
1301
  @typing.overload
1296
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1302
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1297
1303
  ...
1298
1304
 
1299
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1305
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1300
1306
  """
1301
- Specifies the times when the flow should be run when running on a
1302
- production scheduler.
1307
+ Specifies the PyPI packages for all steps of the flow.
1303
1308
 
1309
+ Use `@pypi_base` to set common packages required by all
1310
+ steps and use `@pypi` to specify step-specific overrides.
1304
1311
 
1305
1312
  Parameters
1306
1313
  ----------
1307
- hourly : bool, default False
1308
- Run the workflow hourly.
1309
- daily : bool, default True
1310
- Run the workflow daily.
1311
- weekly : bool, default False
1312
- Run the workflow weekly.
1313
- cron : str, optional, default None
1314
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1315
- specified by this expression.
1316
- timezone : str, optional, default None
1317
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1318
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1314
+ packages : Dict[str, str], default: {}
1315
+ Packages to use for this flow. The key is the name of the package
1316
+ and the value is the version to use.
1317
+ python : str, optional, default: None
1318
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1319
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1319
1320
  """
1320
1321
  ...
1321
1322