metaflow-stubs 2.13__py2.py3-none-any.whl → 2.13.2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. metaflow-stubs/__init__.pyi +218 -218
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +5 -5
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +4 -2
  20. metaflow-stubs/metaflow_current.pyi +35 -35
  21. metaflow-stubs/multicore_utils.pyi +2 -2
  22. metaflow-stubs/parameters.pyi +5 -5
  23. metaflow-stubs/plugins/__init__.pyi +21 -13
  24. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -8
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -4
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  39. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +9 -10
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +5 -5
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
  57. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  61. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  62. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  63. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  64. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  73. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  77. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  79. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  80. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  84. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  86. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  87. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  88. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  94. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  95. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  98. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +25 -2
  100. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +5 -22
  101. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +12 -3
  103. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +3 -7
  104. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +10 -4
  105. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  106. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  108. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  109. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  110. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  112. metaflow-stubs/plugins/pypi/utils.pyi +6 -2
  113. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  116. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  117. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  119. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  120. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  121. metaflow-stubs/pylint_wrapper.pyi +2 -2
  122. metaflow-stubs/runner/__init__.pyi +2 -2
  123. metaflow-stubs/runner/deployer.pyi +30 -30
  124. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  125. metaflow-stubs/runner/metaflow_runner.pyi +15 -7
  126. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  127. metaflow-stubs/runner/nbrun.pyi +2 -2
  128. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  129. metaflow-stubs/runner/utils.pyi +3 -3
  130. metaflow-stubs/system/__init__.pyi +2 -2
  131. metaflow-stubs/system/system_logger.pyi +3 -3
  132. metaflow-stubs/system/system_monitor.pyi +2 -2
  133. metaflow-stubs/tagging_util.pyi +2 -2
  134. metaflow-stubs/tuple_util.pyi +2 -2
  135. metaflow-stubs/user_configs/__init__.pyi +2 -2
  136. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  137. metaflow-stubs/user_configs/config_options.pyi +4 -4
  138. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  139. metaflow-stubs/version.pyi +2 -2
  140. {metaflow_stubs-2.13.dist-info → metaflow_stubs-2.13.2.dist-info}/METADATA +2 -2
  141. metaflow_stubs-2.13.2.dist-info/RECORD +144 -0
  142. {metaflow_stubs-2.13.dist-info → metaflow_stubs-2.13.2.dist-info}/WHEEL +1 -1
  143. metaflow_stubs-2.13.dist-info/RECORD +0 -144
  144. {metaflow_stubs-2.13.dist-info → metaflow_stubs-2.13.2.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13 #
4
- # Generated on 2024-12-20T07:38:30.145823 #
3
+ # MF version: 2.13.2 #
4
+ # Generated on 2025-01-08T14:49:03.615477 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -35,8 +35,8 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import tuple_util as tuple_util
39
38
  from . import events as events
39
+ from . import tuple_util as tuple_util
40
40
  from . import runner as runner
41
41
  from . import plugins as plugins
42
42
  from .plugins.datatools.s3.s3 import S3 as S3
@@ -143,114 +143,176 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
143
143
  ...
144
144
 
145
145
  @typing.overload
146
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
146
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
147
147
  """
148
- Specifies a timeout for your step.
149
-
150
- This decorator is useful if this step may hang indefinitely.
151
-
152
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
153
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
154
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
148
+ Creates a human-readable report, a Metaflow Card, after this step completes.
155
149
 
156
- Note that all the values specified in parameters are added together so if you specify
157
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
150
+ Note that you may add multiple `@card` decorators in a step with different parameters.
158
151
  """
159
152
  ...
160
153
 
161
154
  @typing.overload
162
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
155
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
163
156
  ...
164
157
 
165
158
  @typing.overload
166
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
159
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
167
160
  ...
168
161
 
169
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
162
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
170
163
  """
171
- Specifies a timeout for your step.
172
-
173
- This decorator is useful if this step may hang indefinitely.
174
-
175
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
176
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
177
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
164
+ Creates a human-readable report, a Metaflow Card, after this step completes.
178
165
 
179
- Note that all the values specified in parameters are added together so if you specify
180
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
166
+ Note that you may add multiple `@card` decorators in a step with different parameters.
181
167
  """
182
168
  ...
183
169
 
184
170
  @typing.overload
185
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
171
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
186
172
  """
187
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
173
+ Specifies secrets to be retrieved and injected as environment variables prior to
174
+ the execution of a step.
188
175
  """
189
176
  ...
190
177
 
191
178
  @typing.overload
192
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
179
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
193
180
  ...
194
181
 
195
182
  @typing.overload
196
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
183
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
197
184
  ...
198
185
 
199
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
186
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
200
187
  """
201
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
188
+ Specifies secrets to be retrieved and injected as environment variables prior to
189
+ the execution of a step.
202
190
  """
203
191
  ...
204
192
 
205
193
  @typing.overload
206
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
194
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
207
195
  """
208
- Specifies the PyPI packages for the step.
196
+ Specifies that the step will success under all circumstances.
209
197
 
210
- Information in this decorator will augment any
211
- attributes set in the `@pyi_base` flow-level decorator. Hence,
212
- you can use `@pypi_base` to set packages required by all
213
- steps and use `@pypi` to specify step-specific overrides.
198
+ The decorator will create an optional artifact, specified by `var`, which
199
+ contains the exception raised. You can use it to detect the presence
200
+ of errors, indicating that all happy-path artifacts produced by the step
201
+ are missing.
214
202
  """
215
203
  ...
216
204
 
217
205
  @typing.overload
218
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
206
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
219
207
  ...
220
208
 
221
209
  @typing.overload
222
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
210
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
223
211
  ...
224
212
 
225
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
213
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
226
214
  """
227
- Specifies the PyPI packages for the step.
215
+ Specifies that the step will success under all circumstances.
228
216
 
229
- Information in this decorator will augment any
230
- attributes set in the `@pyi_base` flow-level decorator. Hence,
231
- you can use `@pypi_base` to set packages required by all
232
- steps and use `@pypi` to specify step-specific overrides.
217
+ The decorator will create an optional artifact, specified by `var`, which
218
+ contains the exception raised. You can use it to detect the presence
219
+ of errors, indicating that all happy-path artifacts produced by the step
220
+ are missing.
221
+ """
222
+ ...
223
+
224
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
225
+ """
226
+ Specifies that this step should execute on Kubernetes.
233
227
  """
234
228
  ...
235
229
 
236
230
  @typing.overload
237
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
231
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
238
232
  """
239
- Specifies environment variables to be set prior to the execution of a step.
233
+ Specifies the resources needed when executing this step.
234
+
235
+ Use `@resources` to specify the resource requirements
236
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
237
+
238
+ You can choose the compute layer on the command line by executing e.g.
239
+ ```
240
+ python myflow.py run --with batch
241
+ ```
242
+ or
243
+ ```
244
+ python myflow.py run --with kubernetes
245
+ ```
246
+ which executes the flow on the desired system using the
247
+ requirements specified in `@resources`.
240
248
  """
241
249
  ...
242
250
 
243
251
  @typing.overload
244
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
252
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
245
253
  ...
246
254
 
247
255
  @typing.overload
248
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
256
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
249
257
  ...
250
258
 
251
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
259
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
252
260
  """
253
- Specifies environment variables to be set prior to the execution of a step.
261
+ Specifies the resources needed when executing this step.
262
+
263
+ Use `@resources` to specify the resource requirements
264
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
265
+
266
+ You can choose the compute layer on the command line by executing e.g.
267
+ ```
268
+ python myflow.py run --with batch
269
+ ```
270
+ or
271
+ ```
272
+ python myflow.py run --with kubernetes
273
+ ```
274
+ which executes the flow on the desired system using the
275
+ requirements specified in `@resources`.
276
+ """
277
+ ...
278
+
279
+ @typing.overload
280
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
281
+ """
282
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
283
+ """
284
+ ...
285
+
286
+ @typing.overload
287
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
288
+ ...
289
+
290
+ @typing.overload
291
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
292
+ ...
293
+
294
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
295
+ """
296
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
297
+ """
298
+ ...
299
+
300
+ @typing.overload
301
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
302
+ """
303
+ Decorator prototype for all step decorators. This function gets specialized
304
+ and imported for all decorators types by _import_plugin_decorators().
305
+ """
306
+ ...
307
+
308
+ @typing.overload
309
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
310
+ ...
311
+
312
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
313
+ """
314
+ Decorator prototype for all step decorators. This function gets specialized
315
+ and imported for all decorators types by _import_plugin_decorators().
254
316
  """
255
317
  ...
256
318
 
@@ -294,186 +356,173 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
294
356
  ...
295
357
 
296
358
  @typing.overload
297
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
359
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
298
360
  """
299
- Decorator prototype for all step decorators. This function gets specialized
300
- and imported for all decorators types by _import_plugin_decorators().
361
+ Specifies the Conda environment for the step.
362
+
363
+ Information in this decorator will augment any
364
+ attributes set in the `@conda_base` flow-level decorator. Hence,
365
+ you can use `@conda_base` to set packages required by all
366
+ steps and use `@conda` to specify step-specific overrides.
301
367
  """
302
368
  ...
303
369
 
304
370
  @typing.overload
305
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
371
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
306
372
  ...
307
373
 
308
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
374
+ @typing.overload
375
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
376
+ ...
377
+
378
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
309
379
  """
310
- Decorator prototype for all step decorators. This function gets specialized
311
- and imported for all decorators types by _import_plugin_decorators().
380
+ Specifies the Conda environment for the step.
381
+
382
+ Information in this decorator will augment any
383
+ attributes set in the `@conda_base` flow-level decorator. Hence,
384
+ you can use `@conda_base` to set packages required by all
385
+ steps and use `@conda` to specify step-specific overrides.
312
386
  """
313
387
  ...
314
388
 
315
389
  @typing.overload
316
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
390
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
317
391
  """
318
- Specifies the resources needed when executing this step.
319
-
320
- Use `@resources` to specify the resource requirements
321
- independently of the specific compute layer (`@batch`, `@kubernetes`).
392
+ Specifies the PyPI packages for the step.
322
393
 
323
- You can choose the compute layer on the command line by executing e.g.
324
- ```
325
- python myflow.py run --with batch
326
- ```
327
- or
328
- ```
329
- python myflow.py run --with kubernetes
330
- ```
331
- which executes the flow on the desired system using the
332
- requirements specified in `@resources`.
394
+ Information in this decorator will augment any
395
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
396
+ you can use `@pypi_base` to set packages required by all
397
+ steps and use `@pypi` to specify step-specific overrides.
333
398
  """
334
399
  ...
335
400
 
336
401
  @typing.overload
337
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
402
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
338
403
  ...
339
404
 
340
405
  @typing.overload
341
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
406
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
342
407
  ...
343
408
 
344
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
409
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
345
410
  """
346
- Specifies the resources needed when executing this step.
347
-
348
- Use `@resources` to specify the resource requirements
349
- independently of the specific compute layer (`@batch`, `@kubernetes`).
411
+ Specifies the PyPI packages for the step.
350
412
 
351
- You can choose the compute layer on the command line by executing e.g.
352
- ```
353
- python myflow.py run --with batch
354
- ```
355
- or
356
- ```
357
- python myflow.py run --with kubernetes
358
- ```
359
- which executes the flow on the desired system using the
360
- requirements specified in `@resources`.
413
+ Information in this decorator will augment any
414
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
415
+ you can use `@pypi_base` to set packages required by all
416
+ steps and use `@pypi` to specify step-specific overrides.
361
417
  """
362
418
  ...
363
419
 
364
420
  @typing.overload
365
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
421
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
366
422
  """
367
- Specifies secrets to be retrieved and injected as environment variables prior to
368
- the execution of a step.
423
+ Specifies environment variables to be set prior to the execution of a step.
369
424
  """
370
425
  ...
371
426
 
372
427
  @typing.overload
373
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
428
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
374
429
  ...
375
430
 
376
431
  @typing.overload
377
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
378
- ...
379
-
380
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
381
- """
382
- Specifies secrets to be retrieved and injected as environment variables prior to
383
- the execution of a step.
384
- """
432
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
385
433
  ...
386
434
 
387
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
435
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
388
436
  """
389
- Specifies that this step should execute on Kubernetes.
437
+ Specifies environment variables to be set prior to the execution of a step.
390
438
  """
391
439
  ...
392
440
 
393
441
  @typing.overload
394
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
442
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
395
443
  """
396
- Specifies the Conda environment for the step.
444
+ Specifies a timeout for your step.
397
445
 
398
- Information in this decorator will augment any
399
- attributes set in the `@conda_base` flow-level decorator. Hence,
400
- you can use `@conda_base` to set packages required by all
401
- steps and use `@conda` to specify step-specific overrides.
446
+ This decorator is useful if this step may hang indefinitely.
447
+
448
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
449
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
450
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
451
+
452
+ Note that all the values specified in parameters are added together so if you specify
453
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
402
454
  """
403
455
  ...
404
456
 
405
457
  @typing.overload
406
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
458
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
407
459
  ...
408
460
 
409
461
  @typing.overload
410
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
462
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
411
463
  ...
412
464
 
413
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
465
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
414
466
  """
415
- Specifies the Conda environment for the step.
467
+ Specifies a timeout for your step.
416
468
 
417
- Information in this decorator will augment any
418
- attributes set in the `@conda_base` flow-level decorator. Hence,
419
- you can use `@conda_base` to set packages required by all
420
- steps and use `@conda` to specify step-specific overrides.
469
+ This decorator is useful if this step may hang indefinitely.
470
+
471
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
472
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
473
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
474
+
475
+ Note that all the values specified in parameters are added together so if you specify
476
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
421
477
  """
422
478
  ...
423
479
 
424
- @typing.overload
425
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
480
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
426
481
  """
427
- Creates a human-readable report, a Metaflow Card, after this step completes.
428
-
429
- Note that you may add multiple `@card` decorators in a step with different parameters.
482
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
483
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
484
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
485
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
486
+ starts only after all sensors finish.
430
487
  """
431
488
  ...
432
489
 
433
- @typing.overload
434
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
435
- ...
436
-
437
- @typing.overload
438
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
439
- ...
440
-
441
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
490
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
442
491
  """
443
- Creates a human-readable report, a Metaflow Card, after this step completes.
492
+ Specifies what flows belong to the same project.
444
493
 
445
- Note that you may add multiple `@card` decorators in a step with different parameters.
494
+ A project-specific namespace is created for all flows that
495
+ use the same `@project(name)`.
446
496
  """
447
497
  ...
448
498
 
449
499
  @typing.overload
450
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
500
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
451
501
  """
452
- Specifies that the step will success under all circumstances.
502
+ Specifies the PyPI packages for all steps of the flow.
453
503
 
454
- The decorator will create an optional artifact, specified by `var`, which
455
- contains the exception raised. You can use it to detect the presence
456
- of errors, indicating that all happy-path artifacts produced by the step
457
- are missing.
504
+ Use `@pypi_base` to set common packages required by all
505
+ steps and use `@pypi` to specify step-specific overrides.
458
506
  """
459
507
  ...
460
508
 
461
509
  @typing.overload
462
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
510
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
463
511
  ...
464
512
 
465
- @typing.overload
466
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
513
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
514
+ """
515
+ Specifies the PyPI packages for all steps of the flow.
516
+
517
+ Use `@pypi_base` to set common packages required by all
518
+ steps and use `@pypi` to specify step-specific overrides.
519
+ """
467
520
  ...
468
521
 
469
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
522
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
470
523
  """
471
- Specifies that the step will success under all circumstances.
472
-
473
- The decorator will create an optional artifact, specified by `var`, which
474
- contains the exception raised. You can use it to detect the presence
475
- of errors, indicating that all happy-path artifacts produced by the step
476
- are missing.
524
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
525
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
477
526
  """
478
527
  ...
479
528
 
@@ -550,6 +599,25 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
550
599
  """
551
600
  ...
552
601
 
602
+ @typing.overload
603
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
604
+ """
605
+ Specifies the times when the flow should be run when running on a
606
+ production scheduler.
607
+ """
608
+ ...
609
+
610
+ @typing.overload
611
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
612
+ ...
613
+
614
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
615
+ """
616
+ Specifies the times when the flow should be run when running on a
617
+ production scheduler.
618
+ """
619
+ ...
620
+
553
621
  @typing.overload
554
622
  def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
555
623
  """
@@ -631,25 +699,6 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
631
699
  """
632
700
  ...
633
701
 
634
- @typing.overload
635
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
636
- """
637
- Specifies the times when the flow should be run when running on a
638
- production scheduler.
639
- """
640
- ...
641
-
642
- @typing.overload
643
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
644
- ...
645
-
646
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
647
- """
648
- Specifies the times when the flow should be run when running on a
649
- production scheduler.
650
- """
651
- ...
652
-
653
702
  @typing.overload
654
703
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
655
704
  """
@@ -673,52 +722,3 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
673
722
  """
674
723
  ...
675
724
 
676
- @typing.overload
677
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
678
- """
679
- Specifies the PyPI packages for all steps of the flow.
680
-
681
- Use `@pypi_base` to set common packages required by all
682
- steps and use `@pypi` to specify step-specific overrides.
683
- """
684
- ...
685
-
686
- @typing.overload
687
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
688
- ...
689
-
690
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
691
- """
692
- Specifies the PyPI packages for all steps of the flow.
693
-
694
- Use `@pypi_base` to set common packages required by all
695
- steps and use `@pypi` to specify step-specific overrides.
696
- """
697
- ...
698
-
699
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
700
- """
701
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
702
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
703
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
704
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
705
- starts only after all sensors finish.
706
- """
707
- ...
708
-
709
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
710
- """
711
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
712
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
713
- """
714
- ...
715
-
716
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
717
- """
718
- Specifies what flows belong to the same project.
719
-
720
- A project-specific namespace is created for all flows that
721
- use the same `@project(name)`.
722
- """
723
- ...
724
-