metaflow-stubs 2.12.29__py2.py3-none-any.whl → 2.12.31__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (158) hide show
  1. metaflow-stubs/__init__.pyi +241 -241
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +2 -2
  5. metaflow-stubs/client/core.pyi +4 -4
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +6 -6
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +3 -3
  13. metaflow-stubs/info_file.pyi +2 -2
  14. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  15. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  16. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  17. metaflow-stubs/metadata_provider/util.pyi +2 -2
  18. metaflow-stubs/metaflow_config.pyi +2 -2
  19. metaflow-stubs/metaflow_current.pyi +27 -27
  20. metaflow-stubs/mflog/__init__.pyi +2 -2
  21. metaflow-stubs/mflog/mflog.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/parameters.pyi +3 -3
  24. metaflow-stubs/plugins/__init__.pyi +12 -12
  25. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/airflow_cli.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  35. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  37. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  38. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  39. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  42. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  43. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  44. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  46. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  48. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  49. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  51. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  52. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  53. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +5 -5
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  64. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  68. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  69. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  70. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  71. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_cli.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  85. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  86. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  87. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  88. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  89. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  90. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  91. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  92. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  95. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  97. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  98. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  99. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  100. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  101. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  102. metaflow-stubs/plugins/events_decorator.pyi +12 -2
  103. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  107. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  108. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  109. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  110. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  114. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  116. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  118. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  119. metaflow-stubs/plugins/logs_cli.pyi +2 -2
  120. metaflow-stubs/plugins/package_cli.pyi +2 -2
  121. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/pypi/conda_environment.pyi +6 -6
  126. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  128. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  129. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  131. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  133. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  134. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  135. metaflow-stubs/plugins/tag_cli.pyi +2 -2
  136. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  137. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  138. metaflow-stubs/procpoll.pyi +2 -2
  139. metaflow-stubs/pylint_wrapper.pyi +2 -2
  140. metaflow-stubs/runner/__init__.pyi +2 -2
  141. metaflow-stubs/runner/deployer.pyi +5 -5
  142. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  143. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  144. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  145. metaflow-stubs/runner/nbrun.pyi +2 -2
  146. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  147. metaflow-stubs/runner/utils.pyi +3 -3
  148. metaflow-stubs/system/__init__.pyi +2 -2
  149. metaflow-stubs/system/system_logger.pyi +2 -2
  150. metaflow-stubs/system/system_monitor.pyi +2 -2
  151. metaflow-stubs/tagging_util.pyi +2 -2
  152. metaflow-stubs/tuple_util.pyi +2 -2
  153. metaflow-stubs/version.pyi +2 -2
  154. {metaflow_stubs-2.12.29.dist-info → metaflow_stubs-2.12.31.dist-info}/METADATA +2 -2
  155. metaflow_stubs-2.12.31.dist-info/RECORD +158 -0
  156. {metaflow_stubs-2.12.29.dist-info → metaflow_stubs-2.12.31.dist-info}/WHEEL +1 -1
  157. metaflow_stubs-2.12.29.dist-info/RECORD +0 -158
  158. {metaflow_stubs-2.12.29.dist-info → metaflow_stubs-2.12.31.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.29 #
4
- # Generated on 2024-11-07T22:19:34.657765 #
3
+ # MF version: 2.12.31 #
4
+ # Generated on 2024-11-22T20:12:01.558412 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -169,73 +169,29 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
169
169
  ...
170
170
 
171
171
  @typing.overload
172
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
173
- """
174
- Specifies environment variables to be set prior to the execution of a step.
175
- """
176
- ...
177
-
178
- @typing.overload
179
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
180
- ...
181
-
182
- @typing.overload
183
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
184
- ...
185
-
186
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
172
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
187
173
  """
188
- Specifies environment variables to be set prior to the execution of a step.
174
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
189
175
  """
190
176
  ...
191
177
 
192
178
  @typing.overload
193
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
194
- """
195
- Decorator prototype for all step decorators. This function gets specialized
196
- and imported for all decorators types by _import_plugin_decorators().
197
- """
179
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
198
180
  ...
199
181
 
200
182
  @typing.overload
201
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
202
- ...
203
-
204
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
205
- """
206
- Decorator prototype for all step decorators. This function gets specialized
207
- and imported for all decorators types by _import_plugin_decorators().
208
- """
183
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
209
184
  ...
210
185
 
211
- @typing.overload
212
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
186
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
213
187
  """
214
- Specifies the PyPI packages for the step.
215
-
216
- Information in this decorator will augment any
217
- attributes set in the `@pyi_base` flow-level decorator. Hence,
218
- you can use `@pypi_base` to set packages required by all
219
- steps and use `@pypi` to specify step-specific overrides.
188
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
220
189
  """
221
190
  ...
222
191
 
223
- @typing.overload
224
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
225
- ...
226
-
227
- @typing.overload
228
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
229
- ...
230
-
231
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
192
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
232
193
  """
233
- Specifies the PyPI packages for the step.
234
-
235
- Information in this decorator will augment any
236
- attributes set in the `@pyi_base` flow-level decorator. Hence,
237
- you can use `@pypi_base` to set packages required by all
238
- steps and use `@pypi` to specify step-specific overrides.
194
+ Specifies that this step should execute on Kubernetes.
239
195
  """
240
196
  ...
241
197
 
@@ -271,136 +227,101 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
271
227
  ...
272
228
 
273
229
  @typing.overload
274
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
230
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
275
231
  """
276
- Specifies the resources needed when executing this step.
232
+ Specifies a timeout for your step.
277
233
 
278
- Use `@resources` to specify the resource requirements
279
- independently of the specific compute layer (`@batch`, `@kubernetes`).
234
+ This decorator is useful if this step may hang indefinitely.
280
235
 
281
- You can choose the compute layer on the command line by executing e.g.
282
- ```
283
- python myflow.py run --with batch
284
- ```
285
- or
286
- ```
287
- python myflow.py run --with kubernetes
288
- ```
289
- which executes the flow on the desired system using the
290
- requirements specified in `@resources`.
236
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
237
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
238
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
239
+
240
+ Note that all the values specified in parameters are added together so if you specify
241
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
291
242
  """
292
243
  ...
293
244
 
294
245
  @typing.overload
295
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
246
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
296
247
  ...
297
248
 
298
249
  @typing.overload
299
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
250
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
300
251
  ...
301
252
 
302
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
253
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
303
254
  """
304
- Specifies the resources needed when executing this step.
255
+ Specifies a timeout for your step.
305
256
 
306
- Use `@resources` to specify the resource requirements
307
- independently of the specific compute layer (`@batch`, `@kubernetes`).
257
+ This decorator is useful if this step may hang indefinitely.
308
258
 
309
- You can choose the compute layer on the command line by executing e.g.
310
- ```
311
- python myflow.py run --with batch
312
- ```
313
- or
314
- ```
315
- python myflow.py run --with kubernetes
316
- ```
317
- which executes the flow on the desired system using the
318
- requirements specified in `@resources`.
319
- """
320
- ...
321
-
322
- @typing.overload
323
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
324
- """
325
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
326
- """
327
- ...
328
-
329
- @typing.overload
330
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
331
- ...
332
-
333
- @typing.overload
334
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
335
- ...
336
-
337
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
338
- """
339
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
259
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
260
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
261
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
262
+
263
+ Note that all the values specified in parameters are added together so if you specify
264
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
340
265
  """
341
266
  ...
342
267
 
343
268
  @typing.overload
344
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
269
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
345
270
  """
346
- Creates a human-readable report, a Metaflow Card, after this step completes.
271
+ Specifies the number of times the task corresponding
272
+ to a step needs to be retried.
347
273
 
348
- Note that you may add multiple `@card` decorators in a step with different parameters.
274
+ This decorator is useful for handling transient errors, such as networking issues.
275
+ If your task contains operations that can't be retried safely, e.g. database updates,
276
+ it is advisable to annotate it with `@retry(times=0)`.
277
+
278
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
279
+ decorator will execute a no-op task after all retries have been exhausted,
280
+ ensuring that the flow execution can continue.
349
281
  """
350
282
  ...
351
283
 
352
284
  @typing.overload
353
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
285
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
354
286
  ...
355
287
 
356
288
  @typing.overload
357
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
289
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
358
290
  ...
359
291
 
360
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
292
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
361
293
  """
362
- Creates a human-readable report, a Metaflow Card, after this step completes.
294
+ Specifies the number of times the task corresponding
295
+ to a step needs to be retried.
363
296
 
364
- Note that you may add multiple `@card` decorators in a step with different parameters.
297
+ This decorator is useful for handling transient errors, such as networking issues.
298
+ If your task contains operations that can't be retried safely, e.g. database updates,
299
+ it is advisable to annotate it with `@retry(times=0)`.
300
+
301
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
302
+ decorator will execute a no-op task after all retries have been exhausted,
303
+ ensuring that the flow execution can continue.
365
304
  """
366
305
  ...
367
306
 
368
307
  @typing.overload
369
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
308
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
370
309
  """
371
- Specifies a timeout for your step.
372
-
373
- This decorator is useful if this step may hang indefinitely.
374
-
375
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
376
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
377
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
378
-
379
- Note that all the values specified in parameters are added together so if you specify
380
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
310
+ Specifies environment variables to be set prior to the execution of a step.
381
311
  """
382
312
  ...
383
313
 
384
314
  @typing.overload
385
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
315
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
386
316
  ...
387
317
 
388
318
  @typing.overload
389
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
319
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
390
320
  ...
391
321
 
392
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
322
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
393
323
  """
394
- Specifies a timeout for your step.
395
-
396
- This decorator is useful if this step may hang indefinitely.
397
-
398
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
399
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
400
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
401
-
402
- Note that all the values specified in parameters are added together so if you specify
403
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
324
+ Specifies environment variables to be set prior to the execution of a step.
404
325
  """
405
326
  ...
406
327
 
@@ -427,166 +348,160 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
427
348
  """
428
349
  ...
429
350
 
430
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
431
- """
432
- Specifies that this step should execute on Kubernetes.
433
- """
434
- ...
435
-
436
351
  @typing.overload
437
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
352
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
438
353
  """
439
- Specifies the number of times the task corresponding
440
- to a step needs to be retried.
441
-
442
- This decorator is useful for handling transient errors, such as networking issues.
443
- If your task contains operations that can't be retried safely, e.g. database updates,
444
- it is advisable to annotate it with `@retry(times=0)`.
354
+ Creates a human-readable report, a Metaflow Card, after this step completes.
445
355
 
446
- This can be used in conjunction with the `@catch` decorator. The `@catch`
447
- decorator will execute a no-op task after all retries have been exhausted,
448
- ensuring that the flow execution can continue.
356
+ Note that you may add multiple `@card` decorators in a step with different parameters.
449
357
  """
450
358
  ...
451
359
 
452
360
  @typing.overload
453
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
361
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
454
362
  ...
455
363
 
456
364
  @typing.overload
457
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
365
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
458
366
  ...
459
367
 
460
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
368
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
461
369
  """
462
- Specifies the number of times the task corresponding
463
- to a step needs to be retried.
464
-
465
- This decorator is useful for handling transient errors, such as networking issues.
466
- If your task contains operations that can't be retried safely, e.g. database updates,
467
- it is advisable to annotate it with `@retry(times=0)`.
370
+ Creates a human-readable report, a Metaflow Card, after this step completes.
468
371
 
469
- This can be used in conjunction with the `@catch` decorator. The `@catch`
470
- decorator will execute a no-op task after all retries have been exhausted,
471
- ensuring that the flow execution can continue.
372
+ Note that you may add multiple `@card` decorators in a step with different parameters.
472
373
  """
473
374
  ...
474
375
 
475
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
376
+ @typing.overload
377
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
476
378
  """
477
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
478
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
479
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
480
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
481
- starts only after all sensors finish.
379
+ Decorator prototype for all step decorators. This function gets specialized
380
+ and imported for all decorators types by _import_plugin_decorators().
482
381
  """
483
382
  ...
484
383
 
485
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
384
+ @typing.overload
385
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
386
+ ...
387
+
388
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
486
389
  """
487
- Specifies what flows belong to the same project.
488
-
489
- A project-specific namespace is created for all flows that
490
- use the same `@project(name)`.
390
+ Decorator prototype for all step decorators. This function gets specialized
391
+ and imported for all decorators types by _import_plugin_decorators().
491
392
  """
492
393
  ...
493
394
 
494
395
  @typing.overload
495
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
396
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
496
397
  """
497
- Specifies the event(s) that this flow depends on.
398
+ Specifies the resources needed when executing this step.
498
399
 
499
- ```
500
- @trigger(event='foo')
501
- ```
502
- or
503
- ```
504
- @trigger(events=['foo', 'bar'])
505
- ```
400
+ Use `@resources` to specify the resource requirements
401
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
506
402
 
507
- Additionally, you can specify the parameter mappings
508
- to map event payload to Metaflow parameters for the flow.
403
+ You can choose the compute layer on the command line by executing e.g.
509
404
  ```
510
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
405
+ python myflow.py run --with batch
511
406
  ```
512
407
  or
513
408
  ```
514
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
515
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
516
- ```
517
-
518
- 'parameters' can also be a list of strings and tuples like so:
519
- ```
520
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
521
- ```
522
- This is equivalent to:
523
- ```
524
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
409
+ python myflow.py run --with kubernetes
525
410
  ```
411
+ which executes the flow on the desired system using the
412
+ requirements specified in `@resources`.
526
413
  """
527
414
  ...
528
415
 
529
416
  @typing.overload
530
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
417
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
531
418
  ...
532
419
 
533
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
420
+ @typing.overload
421
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
422
+ ...
423
+
424
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
534
425
  """
535
- Specifies the event(s) that this flow depends on.
426
+ Specifies the resources needed when executing this step.
536
427
 
537
- ```
538
- @trigger(event='foo')
539
- ```
540
- or
541
- ```
542
- @trigger(events=['foo', 'bar'])
543
- ```
428
+ Use `@resources` to specify the resource requirements
429
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
544
430
 
545
- Additionally, you can specify the parameter mappings
546
- to map event payload to Metaflow parameters for the flow.
431
+ You can choose the compute layer on the command line by executing e.g.
547
432
  ```
548
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
433
+ python myflow.py run --with batch
549
434
  ```
550
435
  or
551
436
  ```
552
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
553
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
437
+ python myflow.py run --with kubernetes
554
438
  ```
439
+ which executes the flow on the desired system using the
440
+ requirements specified in `@resources`.
441
+ """
442
+ ...
443
+
444
+ @typing.overload
445
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
446
+ """
447
+ Specifies the PyPI packages for the step.
555
448
 
556
- 'parameters' can also be a list of strings and tuples like so:
557
- ```
558
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
559
- ```
560
- This is equivalent to:
561
- ```
562
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
563
- ```
449
+ Information in this decorator will augment any
450
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
451
+ you can use `@pypi_base` to set packages required by all
452
+ steps and use `@pypi` to specify step-specific overrides.
564
453
  """
565
454
  ...
566
455
 
567
456
  @typing.overload
568
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
457
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
458
+ ...
459
+
460
+ @typing.overload
461
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
462
+ ...
463
+
464
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
569
465
  """
570
- Specifies the times when the flow should be run when running on a
571
- production scheduler.
466
+ Specifies the PyPI packages for the step.
467
+
468
+ Information in this decorator will augment any
469
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
470
+ you can use `@pypi_base` to set packages required by all
471
+ steps and use `@pypi` to specify step-specific overrides.
572
472
  """
573
473
  ...
574
474
 
575
475
  @typing.overload
576
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
476
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
477
+ """
478
+ Specifies the Conda environment for all steps of the flow.
479
+
480
+ Use `@conda_base` to set common libraries required by all
481
+ steps and use `@conda` to specify step-specific additions.
482
+ """
577
483
  ...
578
484
 
579
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
485
+ @typing.overload
486
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
487
+ ...
488
+
489
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
580
490
  """
581
- Specifies the times when the flow should be run when running on a
582
- production scheduler.
491
+ Specifies the Conda environment for all steps of the flow.
492
+
493
+ Use `@conda_base` to set common libraries required by all
494
+ steps and use `@conda` to specify step-specific additions.
583
495
  """
584
496
  ...
585
497
 
586
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
498
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
587
499
  """
588
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
589
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
500
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
501
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
502
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
503
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
504
+ starts only after all sensors finish.
590
505
  """
591
506
  ...
592
507
 
@@ -694,26 +609,111 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
694
609
  """
695
610
  ...
696
611
 
612
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
613
+ """
614
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
615
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
616
+ """
617
+ ...
618
+
619
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
620
+ """
621
+ Specifies what flows belong to the same project.
622
+
623
+ A project-specific namespace is created for all flows that
624
+ use the same `@project(name)`.
625
+ """
626
+ ...
627
+
697
628
  @typing.overload
698
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
629
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
699
630
  """
700
- Specifies the Conda environment for all steps of the flow.
631
+ Specifies the times when the flow should be run when running on a
632
+ production scheduler.
633
+ """
634
+ ...
635
+
636
+ @typing.overload
637
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
638
+ ...
639
+
640
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
641
+ """
642
+ Specifies the times when the flow should be run when running on a
643
+ production scheduler.
644
+ """
645
+ ...
646
+
647
+ @typing.overload
648
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
649
+ """
650
+ Specifies the event(s) that this flow depends on.
701
651
 
702
- Use `@conda_base` to set common libraries required by all
703
- steps and use `@conda` to specify step-specific additions.
652
+ ```
653
+ @trigger(event='foo')
654
+ ```
655
+ or
656
+ ```
657
+ @trigger(events=['foo', 'bar'])
658
+ ```
659
+
660
+ Additionally, you can specify the parameter mappings
661
+ to map event payload to Metaflow parameters for the flow.
662
+ ```
663
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
664
+ ```
665
+ or
666
+ ```
667
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
668
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
669
+ ```
670
+
671
+ 'parameters' can also be a list of strings and tuples like so:
672
+ ```
673
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
674
+ ```
675
+ This is equivalent to:
676
+ ```
677
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
678
+ ```
704
679
  """
705
680
  ...
706
681
 
707
682
  @typing.overload
708
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
683
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
709
684
  ...
710
685
 
711
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
686
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
712
687
  """
713
- Specifies the Conda environment for all steps of the flow.
688
+ Specifies the event(s) that this flow depends on.
714
689
 
715
- Use `@conda_base` to set common libraries required by all
716
- steps and use `@conda` to specify step-specific additions.
690
+ ```
691
+ @trigger(event='foo')
692
+ ```
693
+ or
694
+ ```
695
+ @trigger(events=['foo', 'bar'])
696
+ ```
697
+
698
+ Additionally, you can specify the parameter mappings
699
+ to map event payload to Metaflow parameters for the flow.
700
+ ```
701
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
702
+ ```
703
+ or
704
+ ```
705
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
706
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
707
+ ```
708
+
709
+ 'parameters' can also be a list of strings and tuples like so:
710
+ ```
711
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
712
+ ```
713
+ This is equivalent to:
714
+ ```
715
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
716
+ ```
717
717
  """
718
718
  ...
719
719