metaflow-stubs 2.16.3__py2.py3-none-any.whl → 2.16.4__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (166) hide show
  1. metaflow-stubs/__init__.pyi +702 -702
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +36 -36
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +6 -6
  24. metaflow-stubs/packaging_sys/backend.pyi +3 -3
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +6 -6
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  29. metaflow-stubs/parameters.pyi +3 -3
  30. metaflow-stubs/plugins/__init__.pyi +15 -15
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  47. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  100. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  104. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  121. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  125. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  128. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  129. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  131. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  133. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  134. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  135. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  136. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  137. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  138. metaflow-stubs/pylint_wrapper.pyi +2 -2
  139. metaflow-stubs/runner/__init__.pyi +2 -2
  140. metaflow-stubs/runner/deployer.pyi +5 -5
  141. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  142. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  143. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  144. metaflow-stubs/runner/nbrun.pyi +2 -2
  145. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  146. metaflow-stubs/runner/utils.pyi +4 -4
  147. metaflow-stubs/system/__init__.pyi +2 -2
  148. metaflow-stubs/system/system_logger.pyi +3 -3
  149. metaflow-stubs/system/system_monitor.pyi +2 -2
  150. metaflow-stubs/tagging_util.pyi +2 -2
  151. metaflow-stubs/tuple_util.pyi +2 -2
  152. metaflow-stubs/user_configs/__init__.pyi +2 -2
  153. metaflow-stubs/user_configs/config_options.pyi +3 -3
  154. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  155. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  156. metaflow-stubs/user_decorators/common.pyi +2 -2
  157. metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
  158. metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
  159. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  160. metaflow-stubs/user_decorators/user_step_decorator.pyi +4 -4
  161. metaflow-stubs/version.pyi +2 -2
  162. {metaflow_stubs-2.16.3.dist-info → metaflow_stubs-2.16.4.dist-info}/METADATA +2 -2
  163. metaflow_stubs-2.16.4.dist-info/RECORD +166 -0
  164. metaflow_stubs-2.16.3.dist-info/RECORD +0 -166
  165. {metaflow_stubs-2.16.3.dist-info → metaflow_stubs-2.16.4.dist-info}/WHEEL +0 -0
  166. {metaflow_stubs-2.16.3.dist-info → metaflow_stubs-2.16.4.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.16.3 #
4
- # Generated on 2025-07-16T15:23:11.248549 #
3
+ # MF version: 2.16.4 #
4
+ # Generated on 2025-07-17T18:30:30.670730 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -39,8 +39,8 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import tuple_util as tuple_util
43
42
  from . import metaflow_git as metaflow_git
43
+ from . import tuple_util as tuple_util
44
44
  from . import events as events
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
@@ -48,8 +48,8 @@ from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
50
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
51
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
51
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
53
  from . import cards as cards
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
@@ -152,6 +152,155 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
152
152
  """
153
153
  ...
154
154
 
155
+ @typing.overload
156
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
+ """
158
+ Specifies the resources needed when executing this step.
159
+
160
+ Use `@resources` to specify the resource requirements
161
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
162
+
163
+ You can choose the compute layer on the command line by executing e.g.
164
+ ```
165
+ python myflow.py run --with batch
166
+ ```
167
+ or
168
+ ```
169
+ python myflow.py run --with kubernetes
170
+ ```
171
+ which executes the flow on the desired system using the
172
+ requirements specified in `@resources`.
173
+
174
+
175
+ Parameters
176
+ ----------
177
+ cpu : int, default 1
178
+ Number of CPUs required for this step.
179
+ gpu : int, optional, default None
180
+ Number of GPUs required for this step.
181
+ disk : int, optional, default None
182
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
183
+ memory : int, default 4096
184
+ Memory size (in MB) required for this step.
185
+ shared_memory : int, optional, default None
186
+ The value for the size (in MiB) of the /dev/shm volume for this step.
187
+ This parameter maps to the `--shm-size` option in Docker.
188
+ """
189
+ ...
190
+
191
+ @typing.overload
192
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
193
+ ...
194
+
195
+ @typing.overload
196
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
197
+ ...
198
+
199
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
200
+ """
201
+ Specifies the resources needed when executing this step.
202
+
203
+ Use `@resources` to specify the resource requirements
204
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
205
+
206
+ You can choose the compute layer on the command line by executing e.g.
207
+ ```
208
+ python myflow.py run --with batch
209
+ ```
210
+ or
211
+ ```
212
+ python myflow.py run --with kubernetes
213
+ ```
214
+ which executes the flow on the desired system using the
215
+ requirements specified in `@resources`.
216
+
217
+
218
+ Parameters
219
+ ----------
220
+ cpu : int, default 1
221
+ Number of CPUs required for this step.
222
+ gpu : int, optional, default None
223
+ Number of GPUs required for this step.
224
+ disk : int, optional, default None
225
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
226
+ memory : int, default 4096
227
+ Memory size (in MB) required for this step.
228
+ shared_memory : int, optional, default None
229
+ The value for the size (in MiB) of the /dev/shm volume for this step.
230
+ This parameter maps to the `--shm-size` option in Docker.
231
+ """
232
+ ...
233
+
234
+ @typing.overload
235
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
236
+ """
237
+ Specifies that the step will success under all circumstances.
238
+
239
+ The decorator will create an optional artifact, specified by `var`, which
240
+ contains the exception raised. You can use it to detect the presence
241
+ of errors, indicating that all happy-path artifacts produced by the step
242
+ are missing.
243
+
244
+
245
+ Parameters
246
+ ----------
247
+ var : str, optional, default None
248
+ Name of the artifact in which to store the caught exception.
249
+ If not specified, the exception is not stored.
250
+ print_exception : bool, default True
251
+ Determines whether or not the exception is printed to
252
+ stdout when caught.
253
+ """
254
+ ...
255
+
256
+ @typing.overload
257
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
258
+ ...
259
+
260
+ @typing.overload
261
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
262
+ ...
263
+
264
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
265
+ """
266
+ Specifies that the step will success under all circumstances.
267
+
268
+ The decorator will create an optional artifact, specified by `var`, which
269
+ contains the exception raised. You can use it to detect the presence
270
+ of errors, indicating that all happy-path artifacts produced by the step
271
+ are missing.
272
+
273
+
274
+ Parameters
275
+ ----------
276
+ var : str, optional, default None
277
+ Name of the artifact in which to store the caught exception.
278
+ If not specified, the exception is not stored.
279
+ print_exception : bool, default True
280
+ Determines whether or not the exception is printed to
281
+ stdout when caught.
282
+ """
283
+ ...
284
+
285
+ @typing.overload
286
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
287
+ """
288
+ Decorator prototype for all step decorators. This function gets specialized
289
+ and imported for all decorators types by _import_plugin_decorators().
290
+ """
291
+ ...
292
+
293
+ @typing.overload
294
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
295
+ ...
296
+
297
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
298
+ """
299
+ Decorator prototype for all step decorators. This function gets specialized
300
+ and imported for all decorators types by _import_plugin_decorators().
301
+ """
302
+ ...
303
+
155
304
  @typing.overload
156
305
  def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
306
  """
@@ -185,6 +334,100 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
185
334
  """
186
335
  ...
187
336
 
337
+ @typing.overload
338
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
339
+ """
340
+ Specifies the number of times the task corresponding
341
+ to a step needs to be retried.
342
+
343
+ This decorator is useful for handling transient errors, such as networking issues.
344
+ If your task contains operations that can't be retried safely, e.g. database updates,
345
+ it is advisable to annotate it with `@retry(times=0)`.
346
+
347
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
348
+ decorator will execute a no-op task after all retries have been exhausted,
349
+ ensuring that the flow execution can continue.
350
+
351
+
352
+ Parameters
353
+ ----------
354
+ times : int, default 3
355
+ Number of times to retry this task.
356
+ minutes_between_retries : int, default 2
357
+ Number of minutes between retries.
358
+ """
359
+ ...
360
+
361
+ @typing.overload
362
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
363
+ ...
364
+
365
+ @typing.overload
366
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
367
+ ...
368
+
369
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
370
+ """
371
+ Specifies the number of times the task corresponding
372
+ to a step needs to be retried.
373
+
374
+ This decorator is useful for handling transient errors, such as networking issues.
375
+ If your task contains operations that can't be retried safely, e.g. database updates,
376
+ it is advisable to annotate it with `@retry(times=0)`.
377
+
378
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
379
+ decorator will execute a no-op task after all retries have been exhausted,
380
+ ensuring that the flow execution can continue.
381
+
382
+
383
+ Parameters
384
+ ----------
385
+ times : int, default 3
386
+ Number of times to retry this task.
387
+ minutes_between_retries : int, default 2
388
+ Number of minutes between retries.
389
+ """
390
+ ...
391
+
392
+ @typing.overload
393
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
394
+ """
395
+ Specifies secrets to be retrieved and injected as environment variables prior to
396
+ the execution of a step.
397
+
398
+
399
+ Parameters
400
+ ----------
401
+ sources : List[Union[str, Dict[str, Any]]], default: []
402
+ List of secret specs, defining how the secrets are to be retrieved
403
+ role : str, optional, default: None
404
+ Role to use for fetching secrets
405
+ """
406
+ ...
407
+
408
+ @typing.overload
409
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
410
+ ...
411
+
412
+ @typing.overload
413
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
414
+ ...
415
+
416
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
417
+ """
418
+ Specifies secrets to be retrieved and injected as environment variables prior to
419
+ the execution of a step.
420
+
421
+
422
+ Parameters
423
+ ----------
424
+ sources : List[Union[str, Dict[str, Any]]], default: []
425
+ List of secret specs, defining how the secrets are to be retrieved
426
+ role : str, optional, default: None
427
+ Role to use for fetching secrets
428
+ """
429
+ ...
430
+
188
431
  @typing.overload
189
432
  def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
190
433
  """
@@ -234,10 +477,9 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
234
477
  """
235
478
  ...
236
479
 
237
- @typing.overload
238
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
480
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
239
481
  """
240
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
482
+ Specifies that this step should execute on Kubernetes.
241
483
 
242
484
 
243
485
  Parameters
@@ -245,14 +487,222 @@ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optio
245
487
  cpu : int, default 1
246
488
  Number of CPUs required for this step. If `@resources` is
247
489
  also present, the maximum value from all decorators is used.
248
- gpu : int, default 0
249
- Number of GPUs required for this step. If `@resources` is
250
- also present, the maximum value from all decorators is used.
251
490
  memory : int, default 4096
252
491
  Memory size (in MB) required for this step. If
253
492
  `@resources` is also present, the maximum value from all decorators is
254
493
  used.
255
- image : str, optional, default None
494
+ disk : int, default 10240
495
+ Disk size (in MB) required for this step. If
496
+ `@resources` is also present, the maximum value from all decorators is
497
+ used.
498
+ image : str, optional, default None
499
+ Docker image to use when launching on Kubernetes. If not specified, and
500
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
501
+ not, a default Docker image mapping to the current version of Python is used.
502
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
503
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
504
+ image_pull_secrets: List[str], default []
505
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
506
+ Kubernetes image pull secrets to use when pulling container images
507
+ in Kubernetes.
508
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
509
+ Kubernetes service account to use when launching pod in Kubernetes.
510
+ secrets : List[str], optional, default None
511
+ Kubernetes secrets to use when launching pod in Kubernetes. These
512
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
513
+ in Metaflow configuration.
514
+ node_selector: Union[Dict[str,str], str], optional, default None
515
+ Kubernetes node selector(s) to apply to the pod running the task.
516
+ Can be passed in as a comma separated string of values e.g.
517
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
518
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
519
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
520
+ Kubernetes namespace to use when launching pod in Kubernetes.
521
+ gpu : int, optional, default None
522
+ Number of GPUs required for this step. A value of zero implies that
523
+ the scheduled node should not have GPUs.
524
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
525
+ The vendor of the GPUs to be used for this step.
526
+ tolerations : List[str], default []
527
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
528
+ Kubernetes tolerations to use when launching pod in Kubernetes.
529
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
530
+ Kubernetes labels to use when launching pod in Kubernetes.
531
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
532
+ Kubernetes annotations to use when launching pod in Kubernetes.
533
+ use_tmpfs : bool, default False
534
+ This enables an explicit tmpfs mount for this step.
535
+ tmpfs_tempdir : bool, default True
536
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
537
+ tmpfs_size : int, optional, default: None
538
+ The value for the size (in MiB) of the tmpfs mount for this step.
539
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
540
+ memory allocated for this step.
541
+ tmpfs_path : str, optional, default /metaflow_temp
542
+ Path to tmpfs mount for this step.
543
+ persistent_volume_claims : Dict[str, str], optional, default None
544
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
545
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
546
+ shared_memory: int, optional
547
+ Shared memory size (in MiB) required for this step
548
+ port: int, optional
549
+ Port number to specify in the Kubernetes job object
550
+ compute_pool : str, optional, default None
551
+ Compute pool to be used for for this step.
552
+ If not specified, any accessible compute pool within the perimeter is used.
553
+ hostname_resolution_timeout: int, default 10 * 60
554
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
555
+ Only applicable when @parallel is used.
556
+ qos: str, default: Burstable
557
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
558
+
559
+ security_context: Dict[str, Any], optional, default None
560
+ Container security context. Applies to the task container. Allows the following keys:
561
+ - privileged: bool, optional, default None
562
+ - allow_privilege_escalation: bool, optional, default None
563
+ - run_as_user: int, optional, default None
564
+ - run_as_group: int, optional, default None
565
+ - run_as_non_root: bool, optional, default None
566
+ """
567
+ ...
568
+
569
+ @typing.overload
570
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
571
+ """
572
+ Specifies the Conda environment for the step.
573
+
574
+ Information in this decorator will augment any
575
+ attributes set in the `@conda_base` flow-level decorator. Hence,
576
+ you can use `@conda_base` to set packages required by all
577
+ steps and use `@conda` to specify step-specific overrides.
578
+
579
+
580
+ Parameters
581
+ ----------
582
+ packages : Dict[str, str], default {}
583
+ Packages to use for this step. The key is the name of the package
584
+ and the value is the version to use.
585
+ libraries : Dict[str, str], default {}
586
+ Supported for backward compatibility. When used with packages, packages will take precedence.
587
+ python : str, optional, default None
588
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
589
+ that the version used will correspond to the version of the Python interpreter used to start the run.
590
+ disabled : bool, default False
591
+ If set to True, disables @conda.
592
+ """
593
+ ...
594
+
595
+ @typing.overload
596
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
597
+ ...
598
+
599
+ @typing.overload
600
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
601
+ ...
602
+
603
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
604
+ """
605
+ Specifies the Conda environment for the step.
606
+
607
+ Information in this decorator will augment any
608
+ attributes set in the `@conda_base` flow-level decorator. Hence,
609
+ you can use `@conda_base` to set packages required by all
610
+ steps and use `@conda` to specify step-specific overrides.
611
+
612
+
613
+ Parameters
614
+ ----------
615
+ packages : Dict[str, str], default {}
616
+ Packages to use for this step. The key is the name of the package
617
+ and the value is the version to use.
618
+ libraries : Dict[str, str], default {}
619
+ Supported for backward compatibility. When used with packages, packages will take precedence.
620
+ python : str, optional, default None
621
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
622
+ that the version used will correspond to the version of the Python interpreter used to start the run.
623
+ disabled : bool, default False
624
+ If set to True, disables @conda.
625
+ """
626
+ ...
627
+
628
+ @typing.overload
629
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
630
+ """
631
+ Specifies a timeout for your step.
632
+
633
+ This decorator is useful if this step may hang indefinitely.
634
+
635
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
636
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
637
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
638
+
639
+ Note that all the values specified in parameters are added together so if you specify
640
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
641
+
642
+
643
+ Parameters
644
+ ----------
645
+ seconds : int, default 0
646
+ Number of seconds to wait prior to timing out.
647
+ minutes : int, default 0
648
+ Number of minutes to wait prior to timing out.
649
+ hours : int, default 0
650
+ Number of hours to wait prior to timing out.
651
+ """
652
+ ...
653
+
654
+ @typing.overload
655
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
656
+ ...
657
+
658
+ @typing.overload
659
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
660
+ ...
661
+
662
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
663
+ """
664
+ Specifies a timeout for your step.
665
+
666
+ This decorator is useful if this step may hang indefinitely.
667
+
668
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
669
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
670
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
671
+
672
+ Note that all the values specified in parameters are added together so if you specify
673
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
674
+
675
+
676
+ Parameters
677
+ ----------
678
+ seconds : int, default 0
679
+ Number of seconds to wait prior to timing out.
680
+ minutes : int, default 0
681
+ Number of minutes to wait prior to timing out.
682
+ hours : int, default 0
683
+ Number of hours to wait prior to timing out.
684
+ """
685
+ ...
686
+
687
+ @typing.overload
688
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
689
+ """
690
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
691
+
692
+
693
+ Parameters
694
+ ----------
695
+ cpu : int, default 1
696
+ Number of CPUs required for this step. If `@resources` is
697
+ also present, the maximum value from all decorators is used.
698
+ gpu : int, default 0
699
+ Number of GPUs required for this step. If `@resources` is
700
+ also present, the maximum value from all decorators is used.
701
+ memory : int, default 4096
702
+ Memory size (in MB) required for this step. If
703
+ `@resources` is also present, the maximum value from all decorators is
704
+ used.
705
+ image : str, optional, default None
256
706
  Docker image to use when launching on AWS Batch. If not specified, and
257
707
  METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
258
708
  not, a default Docker image mapping to the current version of Python is used.
@@ -384,493 +834,43 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
384
834
  ...
385
835
 
386
836
  @typing.overload
387
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
837
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
388
838
  """
389
- Specifies the number of times the task corresponding
390
- to a step needs to be retried.
391
-
392
- This decorator is useful for handling transient errors, such as networking issues.
393
- If your task contains operations that can't be retried safely, e.g. database updates,
394
- it is advisable to annotate it with `@retry(times=0)`.
839
+ Specifies the PyPI packages for the step.
395
840
 
396
- This can be used in conjunction with the `@catch` decorator. The `@catch`
397
- decorator will execute a no-op task after all retries have been exhausted,
398
- ensuring that the flow execution can continue.
841
+ Information in this decorator will augment any
842
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
843
+ you can use `@pypi_base` to set packages required by all
844
+ steps and use `@pypi` to specify step-specific overrides.
399
845
 
400
846
 
401
847
  Parameters
402
848
  ----------
403
- times : int, default 3
404
- Number of times to retry this task.
405
- minutes_between_retries : int, default 2
406
- Number of minutes between retries.
849
+ packages : Dict[str, str], default: {}
850
+ Packages to use for this step. The key is the name of the package
851
+ and the value is the version to use.
852
+ python : str, optional, default: None
853
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
854
+ that the version used will correspond to the version of the Python interpreter used to start the run.
407
855
  """
408
856
  ...
409
857
 
410
858
  @typing.overload
411
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
859
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
412
860
  ...
413
861
 
414
862
  @typing.overload
415
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
863
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
416
864
  ...
417
865
 
418
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
866
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
419
867
  """
420
- Specifies the number of times the task corresponding
421
- to a step needs to be retried.
868
+ Specifies the PyPI packages for the step.
422
869
 
423
- This decorator is useful for handling transient errors, such as networking issues.
424
- If your task contains operations that can't be retried safely, e.g. database updates,
425
- it is advisable to annotate it with `@retry(times=0)`.
426
-
427
- This can be used in conjunction with the `@catch` decorator. The `@catch`
428
- decorator will execute a no-op task after all retries have been exhausted,
429
- ensuring that the flow execution can continue.
430
-
431
-
432
- Parameters
433
- ----------
434
- times : int, default 3
435
- Number of times to retry this task.
436
- minutes_between_retries : int, default 2
437
- Number of minutes between retries.
438
- """
439
- ...
440
-
441
- @typing.overload
442
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
443
- """
444
- Specifies the Conda environment for the step.
445
-
446
- Information in this decorator will augment any
447
- attributes set in the `@conda_base` flow-level decorator. Hence,
448
- you can use `@conda_base` to set packages required by all
449
- steps and use `@conda` to specify step-specific overrides.
450
-
451
-
452
- Parameters
453
- ----------
454
- packages : Dict[str, str], default {}
455
- Packages to use for this step. The key is the name of the package
456
- and the value is the version to use.
457
- libraries : Dict[str, str], default {}
458
- Supported for backward compatibility. When used with packages, packages will take precedence.
459
- python : str, optional, default None
460
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
461
- that the version used will correspond to the version of the Python interpreter used to start the run.
462
- disabled : bool, default False
463
- If set to True, disables @conda.
464
- """
465
- ...
466
-
467
- @typing.overload
468
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
469
- ...
470
-
471
- @typing.overload
472
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
473
- ...
474
-
475
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
476
- """
477
- Specifies the Conda environment for the step.
478
-
479
- Information in this decorator will augment any
480
- attributes set in the `@conda_base` flow-level decorator. Hence,
481
- you can use `@conda_base` to set packages required by all
482
- steps and use `@conda` to specify step-specific overrides.
483
-
484
-
485
- Parameters
486
- ----------
487
- packages : Dict[str, str], default {}
488
- Packages to use for this step. The key is the name of the package
489
- and the value is the version to use.
490
- libraries : Dict[str, str], default {}
491
- Supported for backward compatibility. When used with packages, packages will take precedence.
492
- python : str, optional, default None
493
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
494
- that the version used will correspond to the version of the Python interpreter used to start the run.
495
- disabled : bool, default False
496
- If set to True, disables @conda.
497
- """
498
- ...
499
-
500
- @typing.overload
501
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
502
- """
503
- Specifies the resources needed when executing this step.
504
-
505
- Use `@resources` to specify the resource requirements
506
- independently of the specific compute layer (`@batch`, `@kubernetes`).
507
-
508
- You can choose the compute layer on the command line by executing e.g.
509
- ```
510
- python myflow.py run --with batch
511
- ```
512
- or
513
- ```
514
- python myflow.py run --with kubernetes
515
- ```
516
- which executes the flow on the desired system using the
517
- requirements specified in `@resources`.
518
-
519
-
520
- Parameters
521
- ----------
522
- cpu : int, default 1
523
- Number of CPUs required for this step.
524
- gpu : int, optional, default None
525
- Number of GPUs required for this step.
526
- disk : int, optional, default None
527
- Disk size (in MB) required for this step. Only applies on Kubernetes.
528
- memory : int, default 4096
529
- Memory size (in MB) required for this step.
530
- shared_memory : int, optional, default None
531
- The value for the size (in MiB) of the /dev/shm volume for this step.
532
- This parameter maps to the `--shm-size` option in Docker.
533
- """
534
- ...
535
-
536
- @typing.overload
537
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
538
- ...
539
-
540
- @typing.overload
541
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
542
- ...
543
-
544
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
545
- """
546
- Specifies the resources needed when executing this step.
547
-
548
- Use `@resources` to specify the resource requirements
549
- independently of the specific compute layer (`@batch`, `@kubernetes`).
550
-
551
- You can choose the compute layer on the command line by executing e.g.
552
- ```
553
- python myflow.py run --with batch
554
- ```
555
- or
556
- ```
557
- python myflow.py run --with kubernetes
558
- ```
559
- which executes the flow on the desired system using the
560
- requirements specified in `@resources`.
561
-
562
-
563
- Parameters
564
- ----------
565
- cpu : int, default 1
566
- Number of CPUs required for this step.
567
- gpu : int, optional, default None
568
- Number of GPUs required for this step.
569
- disk : int, optional, default None
570
- Disk size (in MB) required for this step. Only applies on Kubernetes.
571
- memory : int, default 4096
572
- Memory size (in MB) required for this step.
573
- shared_memory : int, optional, default None
574
- The value for the size (in MiB) of the /dev/shm volume for this step.
575
- This parameter maps to the `--shm-size` option in Docker.
576
- """
577
- ...
578
-
579
- @typing.overload
580
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
581
- """
582
- Specifies that the step will success under all circumstances.
583
-
584
- The decorator will create an optional artifact, specified by `var`, which
585
- contains the exception raised. You can use it to detect the presence
586
- of errors, indicating that all happy-path artifacts produced by the step
587
- are missing.
588
-
589
-
590
- Parameters
591
- ----------
592
- var : str, optional, default None
593
- Name of the artifact in which to store the caught exception.
594
- If not specified, the exception is not stored.
595
- print_exception : bool, default True
596
- Determines whether or not the exception is printed to
597
- stdout when caught.
598
- """
599
- ...
600
-
601
- @typing.overload
602
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
603
- ...
604
-
605
- @typing.overload
606
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
607
- ...
608
-
609
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
610
- """
611
- Specifies that the step will success under all circumstances.
612
-
613
- The decorator will create an optional artifact, specified by `var`, which
614
- contains the exception raised. You can use it to detect the presence
615
- of errors, indicating that all happy-path artifacts produced by the step
616
- are missing.
617
-
618
-
619
- Parameters
620
- ----------
621
- var : str, optional, default None
622
- Name of the artifact in which to store the caught exception.
623
- If not specified, the exception is not stored.
624
- print_exception : bool, default True
625
- Determines whether or not the exception is printed to
626
- stdout when caught.
627
- """
628
- ...
629
-
630
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
631
- """
632
- Specifies that this step should execute on Kubernetes.
633
-
634
-
635
- Parameters
636
- ----------
637
- cpu : int, default 1
638
- Number of CPUs required for this step. If `@resources` is
639
- also present, the maximum value from all decorators is used.
640
- memory : int, default 4096
641
- Memory size (in MB) required for this step. If
642
- `@resources` is also present, the maximum value from all decorators is
643
- used.
644
- disk : int, default 10240
645
- Disk size (in MB) required for this step. If
646
- `@resources` is also present, the maximum value from all decorators is
647
- used.
648
- image : str, optional, default None
649
- Docker image to use when launching on Kubernetes. If not specified, and
650
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
651
- not, a default Docker image mapping to the current version of Python is used.
652
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
653
- If given, the imagePullPolicy to be applied to the Docker image of the step.
654
- image_pull_secrets: List[str], default []
655
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
656
- Kubernetes image pull secrets to use when pulling container images
657
- in Kubernetes.
658
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
659
- Kubernetes service account to use when launching pod in Kubernetes.
660
- secrets : List[str], optional, default None
661
- Kubernetes secrets to use when launching pod in Kubernetes. These
662
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
663
- in Metaflow configuration.
664
- node_selector: Union[Dict[str,str], str], optional, default None
665
- Kubernetes node selector(s) to apply to the pod running the task.
666
- Can be passed in as a comma separated string of values e.g.
667
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
668
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
669
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
670
- Kubernetes namespace to use when launching pod in Kubernetes.
671
- gpu : int, optional, default None
672
- Number of GPUs required for this step. A value of zero implies that
673
- the scheduled node should not have GPUs.
674
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
675
- The vendor of the GPUs to be used for this step.
676
- tolerations : List[str], default []
677
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
678
- Kubernetes tolerations to use when launching pod in Kubernetes.
679
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
680
- Kubernetes labels to use when launching pod in Kubernetes.
681
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
682
- Kubernetes annotations to use when launching pod in Kubernetes.
683
- use_tmpfs : bool, default False
684
- This enables an explicit tmpfs mount for this step.
685
- tmpfs_tempdir : bool, default True
686
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
687
- tmpfs_size : int, optional, default: None
688
- The value for the size (in MiB) of the tmpfs mount for this step.
689
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
690
- memory allocated for this step.
691
- tmpfs_path : str, optional, default /metaflow_temp
692
- Path to tmpfs mount for this step.
693
- persistent_volume_claims : Dict[str, str], optional, default None
694
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
695
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
696
- shared_memory: int, optional
697
- Shared memory size (in MiB) required for this step
698
- port: int, optional
699
- Port number to specify in the Kubernetes job object
700
- compute_pool : str, optional, default None
701
- Compute pool to be used for for this step.
702
- If not specified, any accessible compute pool within the perimeter is used.
703
- hostname_resolution_timeout: int, default 10 * 60
704
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
705
- Only applicable when @parallel is used.
706
- qos: str, default: Burstable
707
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
708
-
709
- security_context: Dict[str, Any], optional, default None
710
- Container security context. Applies to the task container. Allows the following keys:
711
- - privileged: bool, optional, default None
712
- - allow_privilege_escalation: bool, optional, default None
713
- - run_as_user: int, optional, default None
714
- - run_as_group: int, optional, default None
715
- - run_as_non_root: bool, optional, default None
716
- """
717
- ...
718
-
719
- @typing.overload
720
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
721
- """
722
- Specifies secrets to be retrieved and injected as environment variables prior to
723
- the execution of a step.
724
-
725
-
726
- Parameters
727
- ----------
728
- sources : List[Union[str, Dict[str, Any]]], default: []
729
- List of secret specs, defining how the secrets are to be retrieved
730
- role : str, optional, default: None
731
- Role to use for fetching secrets
732
- """
733
- ...
734
-
735
- @typing.overload
736
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
737
- ...
738
-
739
- @typing.overload
740
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
741
- ...
742
-
743
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
744
- """
745
- Specifies secrets to be retrieved and injected as environment variables prior to
746
- the execution of a step.
747
-
748
-
749
- Parameters
750
- ----------
751
- sources : List[Union[str, Dict[str, Any]]], default: []
752
- List of secret specs, defining how the secrets are to be retrieved
753
- role : str, optional, default: None
754
- Role to use for fetching secrets
755
- """
756
- ...
757
-
758
- @typing.overload
759
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
760
- """
761
- Decorator prototype for all step decorators. This function gets specialized
762
- and imported for all decorators types by _import_plugin_decorators().
763
- """
764
- ...
765
-
766
- @typing.overload
767
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
768
- ...
769
-
770
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
771
- """
772
- Decorator prototype for all step decorators. This function gets specialized
773
- and imported for all decorators types by _import_plugin_decorators().
774
- """
775
- ...
776
-
777
- @typing.overload
778
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
779
- """
780
- Specifies a timeout for your step.
781
-
782
- This decorator is useful if this step may hang indefinitely.
783
-
784
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
785
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
786
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
787
-
788
- Note that all the values specified in parameters are added together so if you specify
789
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
790
-
791
-
792
- Parameters
793
- ----------
794
- seconds : int, default 0
795
- Number of seconds to wait prior to timing out.
796
- minutes : int, default 0
797
- Number of minutes to wait prior to timing out.
798
- hours : int, default 0
799
- Number of hours to wait prior to timing out.
800
- """
801
- ...
802
-
803
- @typing.overload
804
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
805
- ...
806
-
807
- @typing.overload
808
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
809
- ...
810
-
811
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
812
- """
813
- Specifies a timeout for your step.
814
-
815
- This decorator is useful if this step may hang indefinitely.
816
-
817
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
818
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
819
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
820
-
821
- Note that all the values specified in parameters are added together so if you specify
822
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
823
-
824
-
825
- Parameters
826
- ----------
827
- seconds : int, default 0
828
- Number of seconds to wait prior to timing out.
829
- minutes : int, default 0
830
- Number of minutes to wait prior to timing out.
831
- hours : int, default 0
832
- Number of hours to wait prior to timing out.
833
- """
834
- ...
835
-
836
- @typing.overload
837
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
838
- """
839
- Specifies the PyPI packages for the step.
840
-
841
- Information in this decorator will augment any
842
- attributes set in the `@pyi_base` flow-level decorator. Hence,
843
- you can use `@pypi_base` to set packages required by all
844
- steps and use `@pypi` to specify step-specific overrides.
845
-
846
-
847
- Parameters
848
- ----------
849
- packages : Dict[str, str], default: {}
850
- Packages to use for this step. The key is the name of the package
851
- and the value is the version to use.
852
- python : str, optional, default: None
853
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
854
- that the version used will correspond to the version of the Python interpreter used to start the run.
855
- """
856
- ...
857
-
858
- @typing.overload
859
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
860
- ...
861
-
862
- @typing.overload
863
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
864
- ...
865
-
866
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
867
- """
868
- Specifies the PyPI packages for the step.
869
-
870
- Information in this decorator will augment any
871
- attributes set in the `@pyi_base` flow-level decorator. Hence,
872
- you can use `@pypi_base` to set packages required by all
873
- steps and use `@pypi` to specify step-specific overrides.
870
+ Information in this decorator will augment any
871
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
872
+ you can use `@pypi_base` to set packages required by all
873
+ steps and use `@pypi` to specify step-specific overrides.
874
874
 
875
875
 
876
876
  Parameters
@@ -884,79 +884,46 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
884
884
  """
885
885
  ...
886
886
 
887
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
887
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
888
888
  """
889
- Specifies what flows belong to the same project.
890
-
891
- A project-specific namespace is created for all flows that
892
- use the same `@project(name)`.
889
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
890
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
893
891
 
894
892
 
895
893
  Parameters
896
894
  ----------
895
+ timeout : int
896
+ Time, in seconds before the task times out and fails. (Default: 3600)
897
+ poke_interval : int
898
+ Time in seconds that the job should wait in between each try. (Default: 60)
899
+ mode : str
900
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
901
+ exponential_backoff : bool
902
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
903
+ pool : str
904
+ the slot pool this task should run in,
905
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
906
+ soft_fail : bool
907
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
897
908
  name : str
898
- Project name. Make sure that the name is unique amongst all
899
- projects that use the same production scheduler. The name may
900
- contain only lowercase alphanumeric characters and underscores.
901
-
902
- branch : Optional[str], default None
903
- The branch to use. If not specified, the branch is set to
904
- `user.<username>` unless `production` is set to `True`. This can
905
- also be set on the command line using `--branch` as a top-level option.
906
- It is an error to specify `branch` in the decorator and on the command line.
907
-
908
- production : bool, default False
909
- Whether or not the branch is the production branch. This can also be set on the
910
- command line using `--production` as a top-level option. It is an error to specify
911
- `production` in the decorator and on the command line.
912
- The project branch name will be:
913
- - if `branch` is specified:
914
- - if `production` is True: `prod.<branch>`
915
- - if `production` is False: `test.<branch>`
916
- - if `branch` is not specified:
917
- - if `production` is True: `prod`
918
- - if `production` is False: `user.<username>`
919
- """
920
- ...
921
-
922
- @typing.overload
923
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
924
- """
925
- Specifies the PyPI packages for all steps of the flow.
926
-
927
- Use `@pypi_base` to set common packages required by all
928
- steps and use `@pypi` to specify step-specific overrides.
929
-
930
- Parameters
931
- ----------
932
- packages : Dict[str, str], default: {}
933
- Packages to use for this flow. The key is the name of the package
934
- and the value is the version to use.
935
- python : str, optional, default: None
936
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
937
- that the version used will correspond to the version of the Python interpreter used to start the run.
938
- """
939
- ...
940
-
941
- @typing.overload
942
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
943
- ...
944
-
945
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
946
- """
947
- Specifies the PyPI packages for all steps of the flow.
948
-
949
- Use `@pypi_base` to set common packages required by all
950
- steps and use `@pypi` to specify step-specific overrides.
951
-
952
- Parameters
953
- ----------
954
- packages : Dict[str, str], default: {}
955
- Packages to use for this flow. The key is the name of the package
956
- and the value is the version to use.
957
- python : str, optional, default: None
958
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
959
- that the version used will correspond to the version of the Python interpreter used to start the run.
909
+ Name of the sensor on Airflow
910
+ description : str
911
+ Description of sensor in the Airflow UI
912
+ external_dag_id : str
913
+ The dag_id that contains the task you want to wait for.
914
+ external_task_ids : List[str]
915
+ The list of task_ids that you want to wait for.
916
+ If None (default value) the sensor waits for the DAG. (Default: None)
917
+ allowed_states : List[str]
918
+ Iterable of allowed states, (Default: ['success'])
919
+ failed_states : List[str]
920
+ Iterable of failed or dis-allowed states. (Default: None)
921
+ execution_delta : datetime.timedelta
922
+ time difference with the previous execution to look at,
923
+ the default is the same logical date as the current task or DAG. (Default: None)
924
+ check_existence: bool
925
+ Set to True to check if the external task exists or check if
926
+ the DAG to wait for exists. (Default: True)
960
927
  """
961
928
  ...
962
929
 
@@ -1012,138 +979,154 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1012
979
  ...
1013
980
 
1014
981
  @typing.overload
1015
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
982
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1016
983
  """
1017
- Specifies the event(s) that this flow depends on.
984
+ Specifies the flow(s) that this flow depends on.
1018
985
 
1019
986
  ```
1020
- @trigger(event='foo')
987
+ @trigger_on_finish(flow='FooFlow')
1021
988
  ```
1022
989
  or
1023
990
  ```
1024
- @trigger(events=['foo', 'bar'])
991
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1025
992
  ```
993
+ This decorator respects the @project decorator and triggers the flow
994
+ when upstream runs within the same namespace complete successfully
1026
995
 
1027
- Additionally, you can specify the parameter mappings
1028
- to map event payload to Metaflow parameters for the flow.
996
+ Additionally, you can specify project aware upstream flow dependencies
997
+ by specifying the fully qualified project_flow_name.
1029
998
  ```
1030
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
999
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1031
1000
  ```
1032
1001
  or
1033
1002
  ```
1034
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1035
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1003
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1036
1004
  ```
1037
1005
 
1038
- 'parameters' can also be a list of strings and tuples like so:
1039
- ```
1040
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1041
- ```
1042
- This is equivalent to:
1006
+ You can also specify just the project or project branch (other values will be
1007
+ inferred from the current project or project branch):
1043
1008
  ```
1044
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1009
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1045
1010
  ```
1046
1011
 
1012
+ Note that `branch` is typically one of:
1013
+ - `prod`
1014
+ - `user.bob`
1015
+ - `test.my_experiment`
1016
+ - `prod.staging`
1017
+
1047
1018
 
1048
1019
  Parameters
1049
1020
  ----------
1050
- event : Union[str, Dict[str, Any]], optional, default None
1051
- Event dependency for this flow.
1052
- events : List[Union[str, Dict[str, Any]]], default []
1053
- Events dependency for this flow.
1021
+ flow : Union[str, Dict[str, str]], optional, default None
1022
+ Upstream flow dependency for this flow.
1023
+ flows : List[Union[str, Dict[str, str]]], default []
1024
+ Upstream flow dependencies for this flow.
1054
1025
  options : Dict[str, Any], default {}
1055
1026
  Backend-specific configuration for tuning eventing behavior.
1056
1027
  """
1057
1028
  ...
1058
1029
 
1059
1030
  @typing.overload
1060
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1031
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1061
1032
  ...
1062
1033
 
1063
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1034
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1064
1035
  """
1065
- Specifies the event(s) that this flow depends on.
1036
+ Specifies the flow(s) that this flow depends on.
1066
1037
 
1067
1038
  ```
1068
- @trigger(event='foo')
1039
+ @trigger_on_finish(flow='FooFlow')
1069
1040
  ```
1070
1041
  or
1071
1042
  ```
1072
- @trigger(events=['foo', 'bar'])
1043
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1073
1044
  ```
1045
+ This decorator respects the @project decorator and triggers the flow
1046
+ when upstream runs within the same namespace complete successfully
1074
1047
 
1075
- Additionally, you can specify the parameter mappings
1076
- to map event payload to Metaflow parameters for the flow.
1048
+ Additionally, you can specify project aware upstream flow dependencies
1049
+ by specifying the fully qualified project_flow_name.
1077
1050
  ```
1078
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1051
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1079
1052
  ```
1080
1053
  or
1081
1054
  ```
1082
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1083
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1055
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1084
1056
  ```
1085
1057
 
1086
- 'parameters' can also be a list of strings and tuples like so:
1087
- ```
1088
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1089
- ```
1090
- This is equivalent to:
1058
+ You can also specify just the project or project branch (other values will be
1059
+ inferred from the current project or project branch):
1091
1060
  ```
1092
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1061
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1093
1062
  ```
1094
1063
 
1064
+ Note that `branch` is typically one of:
1065
+ - `prod`
1066
+ - `user.bob`
1067
+ - `test.my_experiment`
1068
+ - `prod.staging`
1069
+
1070
+
1071
+ Parameters
1072
+ ----------
1073
+ flow : Union[str, Dict[str, str]], optional, default None
1074
+ Upstream flow dependency for this flow.
1075
+ flows : List[Union[str, Dict[str, str]]], default []
1076
+ Upstream flow dependencies for this flow.
1077
+ options : Dict[str, Any], default {}
1078
+ Backend-specific configuration for tuning eventing behavior.
1079
+ """
1080
+ ...
1081
+
1082
+ @typing.overload
1083
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1084
+ """
1085
+ Specifies the times when the flow should be run when running on a
1086
+ production scheduler.
1087
+
1095
1088
 
1096
1089
  Parameters
1097
1090
  ----------
1098
- event : Union[str, Dict[str, Any]], optional, default None
1099
- Event dependency for this flow.
1100
- events : List[Union[str, Dict[str, Any]]], default []
1101
- Events dependency for this flow.
1102
- options : Dict[str, Any], default {}
1103
- Backend-specific configuration for tuning eventing behavior.
1091
+ hourly : bool, default False
1092
+ Run the workflow hourly.
1093
+ daily : bool, default True
1094
+ Run the workflow daily.
1095
+ weekly : bool, default False
1096
+ Run the workflow weekly.
1097
+ cron : str, optional, default None
1098
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1099
+ specified by this expression.
1100
+ timezone : str, optional, default None
1101
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1102
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1104
1103
  """
1105
1104
  ...
1106
1105
 
1107
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1106
+ @typing.overload
1107
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1108
+ ...
1109
+
1110
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1108
1111
  """
1109
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1110
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1112
+ Specifies the times when the flow should be run when running on a
1113
+ production scheduler.
1111
1114
 
1112
1115
 
1113
1116
  Parameters
1114
1117
  ----------
1115
- timeout : int
1116
- Time, in seconds before the task times out and fails. (Default: 3600)
1117
- poke_interval : int
1118
- Time in seconds that the job should wait in between each try. (Default: 60)
1119
- mode : str
1120
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1121
- exponential_backoff : bool
1122
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1123
- pool : str
1124
- the slot pool this task should run in,
1125
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1126
- soft_fail : bool
1127
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1128
- name : str
1129
- Name of the sensor on Airflow
1130
- description : str
1131
- Description of sensor in the Airflow UI
1132
- external_dag_id : str
1133
- The dag_id that contains the task you want to wait for.
1134
- external_task_ids : List[str]
1135
- The list of task_ids that you want to wait for.
1136
- If None (default value) the sensor waits for the DAG. (Default: None)
1137
- allowed_states : List[str]
1138
- Iterable of allowed states, (Default: ['success'])
1139
- failed_states : List[str]
1140
- Iterable of failed or dis-allowed states. (Default: None)
1141
- execution_delta : datetime.timedelta
1142
- time difference with the previous execution to look at,
1143
- the default is the same logical date as the current task or DAG. (Default: None)
1144
- check_existence: bool
1145
- Set to True to check if the external task exists or check if
1146
- the DAG to wait for exists. (Default: True)
1118
+ hourly : bool, default False
1119
+ Run the workflow hourly.
1120
+ daily : bool, default True
1121
+ Run the workflow daily.
1122
+ weekly : bool, default False
1123
+ Run the workflow weekly.
1124
+ cron : str, optional, default None
1125
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1126
+ specified by this expression.
1127
+ timezone : str, optional, default None
1128
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1129
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1147
1130
  """
1148
1131
  ...
1149
1132
 
@@ -1191,152 +1174,169 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1191
1174
  ...
1192
1175
 
1193
1176
  @typing.overload
1194
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1177
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1195
1178
  """
1196
- Specifies the times when the flow should be run when running on a
1197
- production scheduler.
1179
+ Specifies the PyPI packages for all steps of the flow.
1198
1180
 
1181
+ Use `@pypi_base` to set common packages required by all
1182
+ steps and use `@pypi` to specify step-specific overrides.
1199
1183
 
1200
1184
  Parameters
1201
1185
  ----------
1202
- hourly : bool, default False
1203
- Run the workflow hourly.
1204
- daily : bool, default True
1205
- Run the workflow daily.
1206
- weekly : bool, default False
1207
- Run the workflow weekly.
1208
- cron : str, optional, default None
1209
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1210
- specified by this expression.
1211
- timezone : str, optional, default None
1212
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1213
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1186
+ packages : Dict[str, str], default: {}
1187
+ Packages to use for this flow. The key is the name of the package
1188
+ and the value is the version to use.
1189
+ python : str, optional, default: None
1190
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1191
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1214
1192
  """
1215
1193
  ...
1216
1194
 
1217
1195
  @typing.overload
1218
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1196
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1219
1197
  ...
1220
1198
 
1221
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1199
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1222
1200
  """
1223
- Specifies the times when the flow should be run when running on a
1224
- production scheduler.
1201
+ Specifies the PyPI packages for all steps of the flow.
1225
1202
 
1203
+ Use `@pypi_base` to set common packages required by all
1204
+ steps and use `@pypi` to specify step-specific overrides.
1226
1205
 
1227
1206
  Parameters
1228
1207
  ----------
1229
- hourly : bool, default False
1230
- Run the workflow hourly.
1231
- daily : bool, default True
1232
- Run the workflow daily.
1233
- weekly : bool, default False
1234
- Run the workflow weekly.
1235
- cron : str, optional, default None
1236
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1237
- specified by this expression.
1238
- timezone : str, optional, default None
1239
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1240
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1208
+ packages : Dict[str, str], default: {}
1209
+ Packages to use for this flow. The key is the name of the package
1210
+ and the value is the version to use.
1211
+ python : str, optional, default: None
1212
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1213
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1214
+ """
1215
+ ...
1216
+
1217
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1218
+ """
1219
+ Specifies what flows belong to the same project.
1220
+
1221
+ A project-specific namespace is created for all flows that
1222
+ use the same `@project(name)`.
1223
+
1224
+
1225
+ Parameters
1226
+ ----------
1227
+ name : str
1228
+ Project name. Make sure that the name is unique amongst all
1229
+ projects that use the same production scheduler. The name may
1230
+ contain only lowercase alphanumeric characters and underscores.
1231
+
1232
+ branch : Optional[str], default None
1233
+ The branch to use. If not specified, the branch is set to
1234
+ `user.<username>` unless `production` is set to `True`. This can
1235
+ also be set on the command line using `--branch` as a top-level option.
1236
+ It is an error to specify `branch` in the decorator and on the command line.
1237
+
1238
+ production : bool, default False
1239
+ Whether or not the branch is the production branch. This can also be set on the
1240
+ command line using `--production` as a top-level option. It is an error to specify
1241
+ `production` in the decorator and on the command line.
1242
+ The project branch name will be:
1243
+ - if `branch` is specified:
1244
+ - if `production` is True: `prod.<branch>`
1245
+ - if `production` is False: `test.<branch>`
1246
+ - if `branch` is not specified:
1247
+ - if `production` is True: `prod`
1248
+ - if `production` is False: `user.<username>`
1241
1249
  """
1242
1250
  ...
1243
1251
 
1244
1252
  @typing.overload
1245
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1253
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1246
1254
  """
1247
- Specifies the flow(s) that this flow depends on.
1255
+ Specifies the event(s) that this flow depends on.
1248
1256
 
1249
1257
  ```
1250
- @trigger_on_finish(flow='FooFlow')
1258
+ @trigger(event='foo')
1251
1259
  ```
1252
1260
  or
1253
1261
  ```
1254
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1262
+ @trigger(events=['foo', 'bar'])
1255
1263
  ```
1256
- This decorator respects the @project decorator and triggers the flow
1257
- when upstream runs within the same namespace complete successfully
1258
1264
 
1259
- Additionally, you can specify project aware upstream flow dependencies
1260
- by specifying the fully qualified project_flow_name.
1265
+ Additionally, you can specify the parameter mappings
1266
+ to map event payload to Metaflow parameters for the flow.
1261
1267
  ```
1262
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1268
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1263
1269
  ```
1264
1270
  or
1265
1271
  ```
1266
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1272
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1273
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1267
1274
  ```
1268
1275
 
1269
- You can also specify just the project or project branch (other values will be
1270
- inferred from the current project or project branch):
1276
+ 'parameters' can also be a list of strings and tuples like so:
1271
1277
  ```
1272
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1278
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1279
+ ```
1280
+ This is equivalent to:
1281
+ ```
1282
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1273
1283
  ```
1274
-
1275
- Note that `branch` is typically one of:
1276
- - `prod`
1277
- - `user.bob`
1278
- - `test.my_experiment`
1279
- - `prod.staging`
1280
1284
 
1281
1285
 
1282
1286
  Parameters
1283
1287
  ----------
1284
- flow : Union[str, Dict[str, str]], optional, default None
1285
- Upstream flow dependency for this flow.
1286
- flows : List[Union[str, Dict[str, str]]], default []
1287
- Upstream flow dependencies for this flow.
1288
+ event : Union[str, Dict[str, Any]], optional, default None
1289
+ Event dependency for this flow.
1290
+ events : List[Union[str, Dict[str, Any]]], default []
1291
+ Events dependency for this flow.
1288
1292
  options : Dict[str, Any], default {}
1289
1293
  Backend-specific configuration for tuning eventing behavior.
1290
1294
  """
1291
1295
  ...
1292
1296
 
1293
1297
  @typing.overload
1294
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1298
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1295
1299
  ...
1296
1300
 
1297
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1301
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1298
1302
  """
1299
- Specifies the flow(s) that this flow depends on.
1303
+ Specifies the event(s) that this flow depends on.
1300
1304
 
1301
1305
  ```
1302
- @trigger_on_finish(flow='FooFlow')
1306
+ @trigger(event='foo')
1303
1307
  ```
1304
1308
  or
1305
1309
  ```
1306
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1310
+ @trigger(events=['foo', 'bar'])
1307
1311
  ```
1308
- This decorator respects the @project decorator and triggers the flow
1309
- when upstream runs within the same namespace complete successfully
1310
1312
 
1311
- Additionally, you can specify project aware upstream flow dependencies
1312
- by specifying the fully qualified project_flow_name.
1313
+ Additionally, you can specify the parameter mappings
1314
+ to map event payload to Metaflow parameters for the flow.
1313
1315
  ```
1314
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1316
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1315
1317
  ```
1316
1318
  or
1317
1319
  ```
1318
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1320
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1321
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1319
1322
  ```
1320
1323
 
1321
- You can also specify just the project or project branch (other values will be
1322
- inferred from the current project or project branch):
1324
+ 'parameters' can also be a list of strings and tuples like so:
1323
1325
  ```
1324
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1326
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1327
+ ```
1328
+ This is equivalent to:
1329
+ ```
1330
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1325
1331
  ```
1326
-
1327
- Note that `branch` is typically one of:
1328
- - `prod`
1329
- - `user.bob`
1330
- - `test.my_experiment`
1331
- - `prod.staging`
1332
1332
 
1333
1333
 
1334
1334
  Parameters
1335
1335
  ----------
1336
- flow : Union[str, Dict[str, str]], optional, default None
1337
- Upstream flow dependency for this flow.
1338
- flows : List[Union[str, Dict[str, str]]], default []
1339
- Upstream flow dependencies for this flow.
1336
+ event : Union[str, Dict[str, Any]], optional, default None
1337
+ Event dependency for this flow.
1338
+ events : List[Union[str, Dict[str, Any]]], default []
1339
+ Events dependency for this flow.
1340
1340
  options : Dict[str, Any], default {}
1341
1341
  Backend-specific configuration for tuning eventing behavior.
1342
1342
  """