metaflow-stubs 2.12.36__py2.py3-none-any.whl → 2.12.37__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (162) hide show
  1. metaflow-stubs/__init__.pyi +193 -189
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +9 -21
  4. metaflow-stubs/{plugins/cards/card_modules/chevron/metadata.pyi → cli_components/__init__.pyi} +3 -4
  5. metaflow-stubs/cli_components/utils.pyi +35 -0
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +10 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +19 -12
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +4 -2
  20. metaflow-stubs/metaflow_current.pyi +5 -5
  21. metaflow-stubs/multicore_utils.pyi +2 -2
  22. metaflow-stubs/parameters.pyi +34 -17
  23. metaflow-stubs/plugins/__init__.pyi +13 -13
  24. metaflow-stubs/plugins/airflow/__init__.pyi +2 -4
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -5
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -3
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
  39. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -3
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +5 -5
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -7
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +3 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +5 -5
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  57. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +5 -5
  61. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  62. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  63. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  64. metaflow-stubs/plugins/cards/__init__.pyi +2 -3
  65. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -3
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  73. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  77. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  79. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  80. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  84. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  86. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  87. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  88. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +5 -5
  94. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  95. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  98. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -3
  99. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  101. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  103. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  105. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  106. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  108. metaflow-stubs/plugins/pypi/conda_decorator.pyi +6 -2
  109. metaflow-stubs/plugins/pypi/conda_environment.pyi +6 -6
  110. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  112. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  113. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  116. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  117. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  119. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  120. metaflow-stubs/plugins/timeout_decorator.pyi +4 -4
  121. metaflow-stubs/pylint_wrapper.pyi +2 -2
  122. metaflow-stubs/runner/__init__.pyi +2 -2
  123. metaflow-stubs/runner/deployer.pyi +7 -7
  124. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  125. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  126. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  127. metaflow-stubs/runner/nbrun.pyi +2 -2
  128. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  129. metaflow-stubs/runner/utils.pyi +3 -3
  130. metaflow-stubs/system/__init__.pyi +2 -2
  131. metaflow-stubs/system/system_logger.pyi +2 -2
  132. metaflow-stubs/system/system_monitor.pyi +2 -2
  133. metaflow-stubs/tagging_util.pyi +2 -2
  134. metaflow-stubs/tuple_util.pyi +2 -2
  135. metaflow-stubs/{plugins/package_cli.pyi → user_configs/__init__.pyi} +5 -3
  136. metaflow-stubs/user_configs/config_decorators.pyi +253 -0
  137. metaflow-stubs/user_configs/config_options.pyi +82 -0
  138. metaflow-stubs/user_configs/config_parameters.pyi +217 -0
  139. metaflow-stubs/version.pyi +2 -2
  140. {metaflow_stubs-2.12.36.dist-info → metaflow_stubs-2.12.37.dist-info}/METADATA +2 -2
  141. metaflow_stubs-2.12.37.dist-info/RECORD +144 -0
  142. metaflow-stubs/clone_util.pyi +0 -14
  143. metaflow-stubs/mflog/__init__.pyi +0 -6
  144. metaflow-stubs/mflog/mflog.pyi +0 -69
  145. metaflow-stubs/plugins/airflow/airflow.pyi +0 -89
  146. metaflow-stubs/plugins/airflow/airflow_cli.pyi +0 -37
  147. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +0 -103
  148. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +0 -25
  149. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +0 -19
  150. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +0 -69
  151. metaflow-stubs/plugins/cards/card_cli.pyi +0 -146
  152. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +0 -17
  153. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +0 -22
  154. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +0 -77
  155. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +0 -76
  156. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +0 -33
  157. metaflow-stubs/plugins/logs_cli.pyi +0 -46
  158. metaflow-stubs/plugins/tag_cli.pyi +0 -19
  159. metaflow-stubs/procpoll.pyi +0 -53
  160. metaflow_stubs-2.12.36.dist-info/RECORD +0 -158
  161. {metaflow_stubs-2.12.36.dist-info → metaflow_stubs-2.12.37.dist-info}/WHEEL +0 -0
  162. {metaflow_stubs-2.12.36.dist-info → metaflow_stubs-2.12.37.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.36 #
4
- # Generated on 2024-12-07T00:02:15.920105 #
3
+ # MF version: 2.12.37 #
4
+ # Generated on 2024-12-07T08:10:43.629344 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -22,6 +22,7 @@ from .multicore_utils import parallel_map as parallel_map
22
22
  from . import metaflow_current as metaflow_current
23
23
  from .metaflow_current import current as current
24
24
  from . import parameters as parameters
25
+ from . import user_configs as user_configs
25
26
  from . import tagging_util as tagging_util
26
27
  from . import metadata_provider as metadata_provider
27
28
  from . import flowspec as flowspec
@@ -29,8 +30,12 @@ from .flowspec import FlowSpec as FlowSpec
29
30
  from .parameters import Parameter as Parameter
30
31
  from .parameters import JSONTypeClass as JSONTypeClass
31
32
  from .parameters import JSONType as JSONType
32
- from . import events as events
33
+ from .user_configs.config_parameters import Config as Config
34
+ from .user_configs.config_parameters import config_expr as config_expr
35
+ from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
36
+ from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
33
37
  from . import tuple_util as tuple_util
38
+ from . import events as events
34
39
  from . import runner as runner
35
40
  from . import plugins as plugins
36
41
  from .plugins.datatools.s3.s3 import S3 as S3
@@ -56,10 +61,9 @@ from .runner.deployer import Deployer as Deployer
56
61
  from .runner.deployer import DeployedFlow as DeployedFlow
57
62
  from .runner.nbdeploy import NBDeployer as NBDeployer
58
63
  from . import version as version
64
+ from . import cli_components as cli_components
59
65
  from . import system as system
60
66
  from . import pylint_wrapper as pylint_wrapper
61
- from . import procpoll as procpoll
62
- from . import clone_util as clone_util
63
67
  from . import cli as cli
64
68
 
65
69
  EXT_PKG: str
@@ -137,338 +141,338 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
137
141
  """
138
142
  ...
139
143
 
140
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
141
- """
142
- Specifies that this step should execute on Kubernetes.
143
- """
144
- ...
145
-
146
144
  @typing.overload
147
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
145
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
148
146
  """
149
- Specifies the resources needed when executing this step.
147
+ Specifies the number of times the task corresponding
148
+ to a step needs to be retried.
150
149
 
151
- Use `@resources` to specify the resource requirements
152
- independently of the specific compute layer (`@batch`, `@kubernetes`).
150
+ This decorator is useful for handling transient errors, such as networking issues.
151
+ If your task contains operations that can't be retried safely, e.g. database updates,
152
+ it is advisable to annotate it with `@retry(times=0)`.
153
153
 
154
- You can choose the compute layer on the command line by executing e.g.
155
- ```
156
- python myflow.py run --with batch
157
- ```
158
- or
159
- ```
160
- python myflow.py run --with kubernetes
161
- ```
162
- which executes the flow on the desired system using the
163
- requirements specified in `@resources`.
154
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
155
+ decorator will execute a no-op task after all retries have been exhausted,
156
+ ensuring that the flow execution can continue.
164
157
  """
165
158
  ...
166
159
 
167
160
  @typing.overload
168
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
161
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
169
162
  ...
170
163
 
171
164
  @typing.overload
172
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
165
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
173
166
  ...
174
167
 
175
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
168
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
176
169
  """
177
- Specifies the resources needed when executing this step.
170
+ Specifies the number of times the task corresponding
171
+ to a step needs to be retried.
178
172
 
179
- Use `@resources` to specify the resource requirements
180
- independently of the specific compute layer (`@batch`, `@kubernetes`).
173
+ This decorator is useful for handling transient errors, such as networking issues.
174
+ If your task contains operations that can't be retried safely, e.g. database updates,
175
+ it is advisable to annotate it with `@retry(times=0)`.
181
176
 
182
- You can choose the compute layer on the command line by executing e.g.
183
- ```
184
- python myflow.py run --with batch
185
- ```
186
- or
187
- ```
188
- python myflow.py run --with kubernetes
189
- ```
190
- which executes the flow on the desired system using the
191
- requirements specified in `@resources`.
177
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
178
+ decorator will execute a no-op task after all retries have been exhausted,
179
+ ensuring that the flow execution can continue.
192
180
  """
193
181
  ...
194
182
 
195
183
  @typing.overload
196
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
184
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
197
185
  """
198
- Specifies that the step will success under all circumstances.
186
+ Specifies the Conda environment for the step.
199
187
 
200
- The decorator will create an optional artifact, specified by `var`, which
201
- contains the exception raised. You can use it to detect the presence
202
- of errors, indicating that all happy-path artifacts produced by the step
203
- are missing.
188
+ Information in this decorator will augment any
189
+ attributes set in the `@conda_base` flow-level decorator. Hence,
190
+ you can use `@conda_base` to set packages required by all
191
+ steps and use `@conda` to specify step-specific overrides.
204
192
  """
205
193
  ...
206
194
 
207
195
  @typing.overload
208
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
196
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
209
197
  ...
210
198
 
211
199
  @typing.overload
212
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
200
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
213
201
  ...
214
202
 
215
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
203
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
216
204
  """
217
- Specifies that the step will success under all circumstances.
205
+ Specifies the Conda environment for the step.
218
206
 
219
- The decorator will create an optional artifact, specified by `var`, which
220
- contains the exception raised. You can use it to detect the presence
221
- of errors, indicating that all happy-path artifacts produced by the step
222
- are missing.
207
+ Information in this decorator will augment any
208
+ attributes set in the `@conda_base` flow-level decorator. Hence,
209
+ you can use `@conda_base` to set packages required by all
210
+ steps and use `@conda` to specify step-specific overrides.
223
211
  """
224
212
  ...
225
213
 
226
214
  @typing.overload
227
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
215
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
228
216
  """
229
- Specifies secrets to be retrieved and injected as environment variables prior to
230
- the execution of a step.
217
+ Decorator prototype for all step decorators. This function gets specialized
218
+ and imported for all decorators types by _import_plugin_decorators().
231
219
  """
232
220
  ...
233
221
 
234
222
  @typing.overload
235
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
236
- ...
237
-
238
- @typing.overload
239
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
223
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
240
224
  ...
241
225
 
242
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
226
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
243
227
  """
244
- Specifies secrets to be retrieved and injected as environment variables prior to
245
- the execution of a step.
228
+ Decorator prototype for all step decorators. This function gets specialized
229
+ and imported for all decorators types by _import_plugin_decorators().
246
230
  """
247
231
  ...
248
232
 
249
233
  @typing.overload
250
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
234
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
251
235
  """
252
- Specifies a timeout for your step.
253
-
254
- This decorator is useful if this step may hang indefinitely.
255
-
256
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
257
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
258
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
236
+ Specifies the PyPI packages for the step.
259
237
 
260
- Note that all the values specified in parameters are added together so if you specify
261
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
238
+ Information in this decorator will augment any
239
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
240
+ you can use `@pypi_base` to set packages required by all
241
+ steps and use `@pypi` to specify step-specific overrides.
262
242
  """
263
243
  ...
264
244
 
265
245
  @typing.overload
266
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
246
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
267
247
  ...
268
248
 
269
249
  @typing.overload
270
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
250
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
271
251
  ...
272
252
 
273
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
253
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
274
254
  """
275
- Specifies a timeout for your step.
276
-
277
- This decorator is useful if this step may hang indefinitely.
278
-
279
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
280
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
281
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
255
+ Specifies the PyPI packages for the step.
282
256
 
283
- Note that all the values specified in parameters are added together so if you specify
284
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
257
+ Information in this decorator will augment any
258
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
259
+ you can use `@pypi_base` to set packages required by all
260
+ steps and use `@pypi` to specify step-specific overrides.
261
+ """
262
+ ...
263
+
264
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
265
+ """
266
+ Specifies that this step should execute on Kubernetes.
285
267
  """
286
268
  ...
287
269
 
288
270
  @typing.overload
289
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
271
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
290
272
  """
291
- Decorator prototype for all step decorators. This function gets specialized
292
- and imported for all decorators types by _import_plugin_decorators().
273
+ Specifies secrets to be retrieved and injected as environment variables prior to
274
+ the execution of a step.
293
275
  """
294
276
  ...
295
277
 
296
278
  @typing.overload
297
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
279
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
298
280
  ...
299
281
 
300
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
282
+ @typing.overload
283
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
284
+ ...
285
+
286
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
301
287
  """
302
- Decorator prototype for all step decorators. This function gets specialized
303
- and imported for all decorators types by _import_plugin_decorators().
288
+ Specifies secrets to be retrieved and injected as environment variables prior to
289
+ the execution of a step.
304
290
  """
305
291
  ...
306
292
 
307
293
  @typing.overload
308
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
294
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
309
295
  """
310
- Specifies the number of times the task corresponding
311
- to a step needs to be retried.
312
-
313
- This decorator is useful for handling transient errors, such as networking issues.
314
- If your task contains operations that can't be retried safely, e.g. database updates,
315
- it is advisable to annotate it with `@retry(times=0)`.
316
-
317
- This can be used in conjunction with the `@catch` decorator. The `@catch`
318
- decorator will execute a no-op task after all retries have been exhausted,
319
- ensuring that the flow execution can continue.
296
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
320
297
  """
321
298
  ...
322
299
 
323
300
  @typing.overload
324
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
301
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
325
302
  ...
326
303
 
327
304
  @typing.overload
328
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
305
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
329
306
  ...
330
307
 
331
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
308
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
332
309
  """
333
- Specifies the number of times the task corresponding
334
- to a step needs to be retried.
335
-
336
- This decorator is useful for handling transient errors, such as networking issues.
337
- If your task contains operations that can't be retried safely, e.g. database updates,
338
- it is advisable to annotate it with `@retry(times=0)`.
339
-
340
- This can be used in conjunction with the `@catch` decorator. The `@catch`
341
- decorator will execute a no-op task after all retries have been exhausted,
342
- ensuring that the flow execution can continue.
310
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
343
311
  """
344
312
  ...
345
313
 
346
314
  @typing.overload
347
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
315
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
348
316
  """
349
- Creates a human-readable report, a Metaflow Card, after this step completes.
317
+ Specifies the resources needed when executing this step.
350
318
 
351
- Note that you may add multiple `@card` decorators in a step with different parameters.
319
+ Use `@resources` to specify the resource requirements
320
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
321
+
322
+ You can choose the compute layer on the command line by executing e.g.
323
+ ```
324
+ python myflow.py run --with batch
325
+ ```
326
+ or
327
+ ```
328
+ python myflow.py run --with kubernetes
329
+ ```
330
+ which executes the flow on the desired system using the
331
+ requirements specified in `@resources`.
352
332
  """
353
333
  ...
354
334
 
355
335
  @typing.overload
356
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
336
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
357
337
  ...
358
338
 
359
339
  @typing.overload
360
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
340
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
361
341
  ...
362
342
 
363
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
343
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
364
344
  """
365
- Creates a human-readable report, a Metaflow Card, after this step completes.
345
+ Specifies the resources needed when executing this step.
366
346
 
367
- Note that you may add multiple `@card` decorators in a step with different parameters.
347
+ Use `@resources` to specify the resource requirements
348
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
349
+
350
+ You can choose the compute layer on the command line by executing e.g.
351
+ ```
352
+ python myflow.py run --with batch
353
+ ```
354
+ or
355
+ ```
356
+ python myflow.py run --with kubernetes
357
+ ```
358
+ which executes the flow on the desired system using the
359
+ requirements specified in `@resources`.
368
360
  """
369
361
  ...
370
362
 
371
363
  @typing.overload
372
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
364
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
373
365
  """
374
- Specifies the Conda environment for the step.
366
+ Creates a human-readable report, a Metaflow Card, after this step completes.
375
367
 
376
- Information in this decorator will augment any
377
- attributes set in the `@conda_base` flow-level decorator. Hence,
378
- you can use `@conda_base` to set packages required by all
379
- steps and use `@conda` to specify step-specific overrides.
368
+ Note that you may add multiple `@card` decorators in a step with different parameters.
380
369
  """
381
370
  ...
382
371
 
383
372
  @typing.overload
384
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
373
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
385
374
  ...
386
375
 
387
376
  @typing.overload
388
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
377
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
389
378
  ...
390
379
 
391
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
380
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
392
381
  """
393
- Specifies the Conda environment for the step.
382
+ Creates a human-readable report, a Metaflow Card, after this step completes.
394
383
 
395
- Information in this decorator will augment any
396
- attributes set in the `@conda_base` flow-level decorator. Hence,
397
- you can use `@conda_base` to set packages required by all
398
- steps and use `@conda` to specify step-specific overrides.
384
+ Note that you may add multiple `@card` decorators in a step with different parameters.
399
385
  """
400
386
  ...
401
387
 
402
388
  @typing.overload
403
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
389
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
404
390
  """
405
- Specifies the PyPI packages for the step.
406
-
407
- Information in this decorator will augment any
408
- attributes set in the `@pyi_base` flow-level decorator. Hence,
409
- you can use `@pypi_base` to set packages required by all
410
- steps and use `@pypi` to specify step-specific overrides.
391
+ Specifies environment variables to be set prior to the execution of a step.
411
392
  """
412
393
  ...
413
394
 
414
395
  @typing.overload
415
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
396
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
416
397
  ...
417
398
 
418
399
  @typing.overload
419
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
400
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
420
401
  ...
421
402
 
422
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
403
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
423
404
  """
424
- Specifies the PyPI packages for the step.
425
-
426
- Information in this decorator will augment any
427
- attributes set in the `@pyi_base` flow-level decorator. Hence,
428
- you can use `@pypi_base` to set packages required by all
429
- steps and use `@pypi` to specify step-specific overrides.
405
+ Specifies environment variables to be set prior to the execution of a step.
430
406
  """
431
407
  ...
432
408
 
433
409
  @typing.overload
434
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
410
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
435
411
  """
436
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
412
+ Specifies that the step will success under all circumstances.
413
+
414
+ The decorator will create an optional artifact, specified by `var`, which
415
+ contains the exception raised. You can use it to detect the presence
416
+ of errors, indicating that all happy-path artifacts produced by the step
417
+ are missing.
437
418
  """
438
419
  ...
439
420
 
440
421
  @typing.overload
441
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
422
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
442
423
  ...
443
424
 
444
425
  @typing.overload
445
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
426
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
446
427
  ...
447
428
 
448
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
429
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
449
430
  """
450
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
431
+ Specifies that the step will success under all circumstances.
432
+
433
+ The decorator will create an optional artifact, specified by `var`, which
434
+ contains the exception raised. You can use it to detect the presence
435
+ of errors, indicating that all happy-path artifacts produced by the step
436
+ are missing.
451
437
  """
452
438
  ...
453
439
 
454
440
  @typing.overload
455
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
441
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
456
442
  """
457
- Specifies environment variables to be set prior to the execution of a step.
443
+ Specifies a timeout for your step.
444
+
445
+ This decorator is useful if this step may hang indefinitely.
446
+
447
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
448
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
449
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
450
+
451
+ Note that all the values specified in parameters are added together so if you specify
452
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
458
453
  """
459
454
  ...
460
455
 
461
456
  @typing.overload
462
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
457
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
463
458
  ...
464
459
 
465
460
  @typing.overload
466
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
461
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
467
462
  ...
468
463
 
469
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
464
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
470
465
  """
471
- Specifies environment variables to be set prior to the execution of a step.
466
+ Specifies a timeout for your step.
467
+
468
+ This decorator is useful if this step may hang indefinitely.
469
+
470
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
471
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
472
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
473
+
474
+ Note that all the values specified in parameters are added together so if you specify
475
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
472
476
  """
473
477
  ...
474
478
 
@@ -586,25 +590,6 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
586
590
  """
587
591
  ...
588
592
 
589
- @typing.overload
590
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
591
- """
592
- Specifies the times when the flow should be run when running on a
593
- production scheduler.
594
- """
595
- ...
596
-
597
- @typing.overload
598
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
599
- ...
600
-
601
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
602
- """
603
- Specifies the times when the flow should be run when running on a
604
- production scheduler.
605
- """
606
- ...
607
-
608
593
  @typing.overload
609
594
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
610
595
  """
@@ -678,6 +663,25 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
678
663
  """
679
664
  ...
680
665
 
666
+ @typing.overload
667
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
668
+ """
669
+ Specifies the times when the flow should be run when running on a
670
+ production scheduler.
671
+ """
672
+ ...
673
+
674
+ @typing.overload
675
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
676
+ ...
677
+
678
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
679
+ """
680
+ Specifies the times when the flow should be run when running on a
681
+ production scheduler.
682
+ """
683
+ ...
684
+
681
685
  def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
682
686
  """
683
687
  The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.