metaflow-stubs 2.19.1__py2.py3-none-any.whl → 2.19.2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (168) hide show
  1. metaflow-stubs/__init__.pyi +499 -499
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +5 -5
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +22 -22
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  24. metaflow-stubs/packaging_sys/backend.pyi +5 -5
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +5 -5
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +6 -6
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +4 -4
  29. metaflow-stubs/parameters.pyi +3 -3
  30. metaflow-stubs/plugins/__init__.pyi +10 -10
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  47. metaflow-stubs/plugins/aws/__init__.pyi +4 -4
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +5 -5
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +5 -5
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +3 -3
  83. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  85. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  86. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  87. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  88. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  94. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  95. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  96. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  97. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  102. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  105. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  108. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  111. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  116. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  117. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/parsers.pyi +2 -2
  119. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  121. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  123. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  126. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  127. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  128. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  130. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  131. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  133. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  134. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  135. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  136. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  137. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  138. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  139. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  140. metaflow-stubs/pylint_wrapper.pyi +2 -2
  141. metaflow-stubs/runner/__init__.pyi +2 -2
  142. metaflow-stubs/runner/deployer.pyi +31 -31
  143. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  144. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  145. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  146. metaflow-stubs/runner/nbrun.pyi +2 -2
  147. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  148. metaflow-stubs/runner/utils.pyi +3 -3
  149. metaflow-stubs/system/__init__.pyi +2 -2
  150. metaflow-stubs/system/system_logger.pyi +2 -2
  151. metaflow-stubs/system/system_monitor.pyi +2 -2
  152. metaflow-stubs/tagging_util.pyi +2 -2
  153. metaflow-stubs/tuple_util.pyi +2 -2
  154. metaflow-stubs/user_configs/__init__.pyi +2 -2
  155. metaflow-stubs/user_configs/config_options.pyi +3 -3
  156. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  157. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  158. metaflow-stubs/user_decorators/common.pyi +2 -2
  159. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  160. metaflow-stubs/user_decorators/mutable_step.pyi +3 -3
  161. metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
  162. metaflow-stubs/user_decorators/user_step_decorator.pyi +4 -4
  163. metaflow-stubs/version.pyi +2 -2
  164. {metaflow_stubs-2.19.1.dist-info → metaflow_stubs-2.19.2.dist-info}/METADATA +2 -2
  165. metaflow_stubs-2.19.2.dist-info/RECORD +168 -0
  166. metaflow_stubs-2.19.1.dist-info/RECORD +0 -168
  167. {metaflow_stubs-2.19.1.dist-info → metaflow_stubs-2.19.2.dist-info}/WHEEL +0 -0
  168. {metaflow_stubs-2.19.1.dist-info → metaflow_stubs-2.19.2.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.19.1 #
4
- # Generated on 2025-10-28T01:39:06.721172 #
3
+ # MF version: 2.19.2 #
4
+ # Generated on 2025-10-28T11:13:58.765115 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -48,9 +48,9 @@ from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
50
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
51
- from .plugins.parsers import yaml_parser as yaml_parser
52
51
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
52
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
+ from .plugins.parsers import yaml_parser as yaml_parser
54
54
  from . import cards as cards
55
55
  from . import client as client
56
56
  from .client.core import namespace as namespace
@@ -155,57 +155,35 @@ def step(f: typing.Callable[[~FlowSpecDerived], NoneType] | typing.Callable[[~Fl
155
155
  ...
156
156
 
157
157
  @typing.overload
158
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
158
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
159
159
  """
160
- Specifies the number of times the task corresponding
161
- to a step needs to be retried.
162
-
163
- This decorator is useful for handling transient errors, such as networking issues.
164
- If your task contains operations that can't be retried safely, e.g. database updates,
165
- it is advisable to annotate it with `@retry(times=0)`.
166
-
167
- This can be used in conjunction with the `@catch` decorator. The `@catch`
168
- decorator will execute a no-op task after all retries have been exhausted,
169
- ensuring that the flow execution can continue.
160
+ Specifies environment variables to be set prior to the execution of a step.
170
161
 
171
162
 
172
163
  Parameters
173
164
  ----------
174
- times : int, default 3
175
- Number of times to retry this task.
176
- minutes_between_retries : int, default 2
177
- Number of minutes between retries.
165
+ vars : Dict[str, str], default {}
166
+ Dictionary of environment variables to set.
178
167
  """
179
168
  ...
180
169
 
181
170
  @typing.overload
182
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
171
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
183
172
  ...
184
173
 
185
174
  @typing.overload
186
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
175
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
187
176
  ...
188
177
 
189
- def retry(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, times: int = 3, minutes_between_retries: int = 2):
178
+ def environment(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, vars: typing.Dict[str, str] = {}):
190
179
  """
191
- Specifies the number of times the task corresponding
192
- to a step needs to be retried.
193
-
194
- This decorator is useful for handling transient errors, such as networking issues.
195
- If your task contains operations that can't be retried safely, e.g. database updates,
196
- it is advisable to annotate it with `@retry(times=0)`.
197
-
198
- This can be used in conjunction with the `@catch` decorator. The `@catch`
199
- decorator will execute a no-op task after all retries have been exhausted,
200
- ensuring that the flow execution can continue.
180
+ Specifies environment variables to be set prior to the execution of a step.
201
181
 
202
182
 
203
183
  Parameters
204
184
  ----------
205
- times : int, default 3
206
- Number of times to retry this task.
207
- minutes_between_retries : int, default 2
208
- Number of minutes between retries.
185
+ vars : Dict[str, str], default {}
186
+ Dictionary of environment variables to set.
209
187
  """
210
188
  ...
211
189
 
@@ -289,208 +267,167 @@ def resources(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_ge
289
267
  ...
290
268
 
291
269
  @typing.overload
292
- def pypi(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
270
+ def catch(*, var: str | None = None, print_exception: bool = True) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
293
271
  """
294
- Specifies the PyPI packages for the step.
272
+ Specifies that the step will success under all circumstances.
295
273
 
296
- Information in this decorator will augment any
297
- attributes set in the `@pyi_base` flow-level decorator. Hence,
298
- you can use `@pypi_base` to set packages required by all
299
- steps and use `@pypi` to specify step-specific overrides.
274
+ The decorator will create an optional artifact, specified by `var`, which
275
+ contains the exception raised. You can use it to detect the presence
276
+ of errors, indicating that all happy-path artifacts produced by the step
277
+ are missing.
300
278
 
301
279
 
302
280
  Parameters
303
281
  ----------
304
- packages : Dict[str, str], default: {}
305
- Packages to use for this step. The key is the name of the package
306
- and the value is the version to use.
307
- python : str, optional, default: None
308
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
309
- that the version used will correspond to the version of the Python interpreter used to start the run.
282
+ var : str, optional, default None
283
+ Name of the artifact in which to store the caught exception.
284
+ If not specified, the exception is not stored.
285
+ print_exception : bool, default True
286
+ Determines whether or not the exception is printed to
287
+ stdout when caught.
310
288
  """
311
289
  ...
312
290
 
313
291
  @typing.overload
314
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
292
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
315
293
  ...
316
294
 
317
295
  @typing.overload
318
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
296
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
319
297
  ...
320
298
 
321
- def pypi(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
299
+ def catch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, var: str | None = None, print_exception: bool = True):
322
300
  """
323
- Specifies the PyPI packages for the step.
301
+ Specifies that the step will success under all circumstances.
324
302
 
325
- Information in this decorator will augment any
326
- attributes set in the `@pyi_base` flow-level decorator. Hence,
327
- you can use `@pypi_base` to set packages required by all
328
- steps and use `@pypi` to specify step-specific overrides.
303
+ The decorator will create an optional artifact, specified by `var`, which
304
+ contains the exception raised. You can use it to detect the presence
305
+ of errors, indicating that all happy-path artifacts produced by the step
306
+ are missing.
329
307
 
330
308
 
331
309
  Parameters
332
310
  ----------
333
- packages : Dict[str, str], default: {}
334
- Packages to use for this step. The key is the name of the package
335
- and the value is the version to use.
336
- python : str, optional, default: None
337
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
338
- that the version used will correspond to the version of the Python interpreter used to start the run.
311
+ var : str, optional, default None
312
+ Name of the artifact in which to store the caught exception.
313
+ If not specified, the exception is not stored.
314
+ print_exception : bool, default True
315
+ Determines whether or not the exception is printed to
316
+ stdout when caught.
339
317
  """
340
318
  ...
341
319
 
342
320
  @typing.overload
343
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
321
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
344
322
  """
345
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
323
+ Specifies the number of times the task corresponding
324
+ to a step needs to be retried.
325
+
326
+ This decorator is useful for handling transient errors, such as networking issues.
327
+ If your task contains operations that can't be retried safely, e.g. database updates,
328
+ it is advisable to annotate it with `@retry(times=0)`.
329
+
330
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
331
+ decorator will execute a no-op task after all retries have been exhausted,
332
+ ensuring that the flow execution can continue.
346
333
 
347
334
 
348
335
  Parameters
349
336
  ----------
350
- cpu : int, default 1
351
- Number of CPUs required for this step. If `@resources` is
352
- also present, the maximum value from all decorators is used.
353
- gpu : int, default 0
354
- Number of GPUs required for this step. If `@resources` is
355
- also present, the maximum value from all decorators is used.
356
- memory : int, default 4096
357
- Memory size (in MB) required for this step. If
358
- `@resources` is also present, the maximum value from all decorators is
359
- used.
360
- image : str, optional, default None
361
- Docker image to use when launching on AWS Batch. If not specified, and
362
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
363
- not, a default Docker image mapping to the current version of Python is used.
364
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
365
- AWS Batch Job Queue to submit the job to.
366
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
367
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
368
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
369
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
370
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
371
- shared_memory : int, optional, default None
372
- The value for the size (in MiB) of the /dev/shm volume for this step.
373
- This parameter maps to the `--shm-size` option in Docker.
374
- max_swap : int, optional, default None
375
- The total amount of swap memory (in MiB) a container can use for this
376
- step. This parameter is translated to the `--memory-swap` option in
377
- Docker where the value is the sum of the container memory plus the
378
- `max_swap` value.
379
- swappiness : int, optional, default None
380
- This allows you to tune memory swappiness behavior for this step.
381
- A swappiness value of 0 causes swapping not to happen unless absolutely
382
- necessary. A swappiness value of 100 causes pages to be swapped very
383
- aggressively. Accepted values are whole numbers between 0 and 100.
384
- aws_batch_tags: Dict[str, str], optional, default None
385
- Sets arbitrary AWS tags on the AWS Batch compute environment.
386
- Set as string key-value pairs.
387
- use_tmpfs : bool, default False
388
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
389
- not available on Fargate compute environments
390
- tmpfs_tempdir : bool, default True
391
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
392
- tmpfs_size : int, optional, default None
393
- The value for the size (in MiB) of the tmpfs mount for this step.
394
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
395
- memory allocated for this step.
396
- tmpfs_path : str, optional, default None
397
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
398
- inferentia : int, default 0
399
- Number of Inferentia chips required for this step.
400
- trainium : int, default None
401
- Alias for inferentia. Use only one of the two.
402
- efa : int, default 0
403
- Number of elastic fabric adapter network devices to attach to container
404
- ephemeral_storage : int, default None
405
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
406
- This is only relevant for Fargate compute environments
407
- log_driver: str, optional, default None
408
- The log driver to use for the Amazon ECS container.
409
- log_options: List[str], optional, default None
410
- List of strings containing options for the chosen log driver. The configurable values
411
- depend on the `log driver` chosen. Validation of these options is not supported yet.
412
- Example: [`awslogs-group:aws/batch/job`]
337
+ times : int, default 3
338
+ Number of times to retry this task.
339
+ minutes_between_retries : int, default 2
340
+ Number of minutes between retries.
413
341
  """
414
342
  ...
415
343
 
416
344
  @typing.overload
417
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
345
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
418
346
  ...
419
347
 
420
348
  @typing.overload
421
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
349
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
422
350
  ...
423
351
 
424
- def batch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None):
352
+ def retry(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, times: int = 3, minutes_between_retries: int = 2):
425
353
  """
426
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
354
+ Specifies the number of times the task corresponding
355
+ to a step needs to be retried.
356
+
357
+ This decorator is useful for handling transient errors, such as networking issues.
358
+ If your task contains operations that can't be retried safely, e.g. database updates,
359
+ it is advisable to annotate it with `@retry(times=0)`.
360
+
361
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
362
+ decorator will execute a no-op task after all retries have been exhausted,
363
+ ensuring that the flow execution can continue.
427
364
 
428
365
 
429
366
  Parameters
430
367
  ----------
431
- cpu : int, default 1
432
- Number of CPUs required for this step. If `@resources` is
433
- also present, the maximum value from all decorators is used.
434
- gpu : int, default 0
435
- Number of GPUs required for this step. If `@resources` is
436
- also present, the maximum value from all decorators is used.
437
- memory : int, default 4096
438
- Memory size (in MB) required for this step. If
439
- `@resources` is also present, the maximum value from all decorators is
440
- used.
441
- image : str, optional, default None
442
- Docker image to use when launching on AWS Batch. If not specified, and
443
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
444
- not, a default Docker image mapping to the current version of Python is used.
445
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
446
- AWS Batch Job Queue to submit the job to.
447
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
448
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
449
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
450
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
451
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
452
- shared_memory : int, optional, default None
453
- The value for the size (in MiB) of the /dev/shm volume for this step.
454
- This parameter maps to the `--shm-size` option in Docker.
455
- max_swap : int, optional, default None
456
- The total amount of swap memory (in MiB) a container can use for this
457
- step. This parameter is translated to the `--memory-swap` option in
458
- Docker where the value is the sum of the container memory plus the
459
- `max_swap` value.
460
- swappiness : int, optional, default None
461
- This allows you to tune memory swappiness behavior for this step.
462
- A swappiness value of 0 causes swapping not to happen unless absolutely
463
- necessary. A swappiness value of 100 causes pages to be swapped very
464
- aggressively. Accepted values are whole numbers between 0 and 100.
465
- aws_batch_tags: Dict[str, str], optional, default None
466
- Sets arbitrary AWS tags on the AWS Batch compute environment.
467
- Set as string key-value pairs.
468
- use_tmpfs : bool, default False
469
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
470
- not available on Fargate compute environments
471
- tmpfs_tempdir : bool, default True
472
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
473
- tmpfs_size : int, optional, default None
474
- The value for the size (in MiB) of the tmpfs mount for this step.
475
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
476
- memory allocated for this step.
477
- tmpfs_path : str, optional, default None
478
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
479
- inferentia : int, default 0
480
- Number of Inferentia chips required for this step.
481
- trainium : int, default None
482
- Alias for inferentia. Use only one of the two.
483
- efa : int, default 0
484
- Number of elastic fabric adapter network devices to attach to container
485
- ephemeral_storage : int, default None
486
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
487
- This is only relevant for Fargate compute environments
488
- log_driver: str, optional, default None
489
- The log driver to use for the Amazon ECS container.
490
- log_options: List[str], optional, default None
491
- List of strings containing options for the chosen log driver. The configurable values
492
- depend on the `log driver` chosen. Validation of these options is not supported yet.
493
- Example: [`awslogs-group:aws/batch/job`]
368
+ times : int, default 3
369
+ Number of times to retry this task.
370
+ minutes_between_retries : int, default 2
371
+ Number of minutes between retries.
372
+ """
373
+ ...
374
+
375
+ @typing.overload
376
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
377
+ """
378
+ Specifies a timeout for your step.
379
+
380
+ This decorator is useful if this step may hang indefinitely.
381
+
382
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
383
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
384
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
385
+
386
+ Note that all the values specified in parameters are added together so if you specify
387
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
388
+
389
+
390
+ Parameters
391
+ ----------
392
+ seconds : int, default 0
393
+ Number of seconds to wait prior to timing out.
394
+ minutes : int, default 0
395
+ Number of minutes to wait prior to timing out.
396
+ hours : int, default 0
397
+ Number of hours to wait prior to timing out.
398
+ """
399
+ ...
400
+
401
+ @typing.overload
402
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
403
+ ...
404
+
405
+ @typing.overload
406
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
407
+ ...
408
+
409
+ def timeout(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
410
+ """
411
+ Specifies a timeout for your step.
412
+
413
+ This decorator is useful if this step may hang indefinitely.
414
+
415
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
416
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
417
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
418
+
419
+ Note that all the values specified in parameters are added together so if you specify
420
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
421
+
422
+
423
+ Parameters
424
+ ----------
425
+ seconds : int, default 0
426
+ Number of seconds to wait prior to timing out.
427
+ minutes : int, default 0
428
+ Number of minutes to wait prior to timing out.
429
+ hours : int, default 0
430
+ Number of hours to wait prior to timing out.
494
431
  """
495
432
  ...
496
433
 
@@ -584,168 +521,247 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: st
584
521
  ...
585
522
 
586
523
  @typing.overload
587
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
588
- """
589
- Decorator prototype for all step decorators. This function gets specialized
590
- and imported for all decorators types by _import_plugin_decorators().
591
- """
592
- ...
593
-
594
- @typing.overload
595
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
596
- ...
597
-
598
- def parallel(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None):
599
- """
600
- Decorator prototype for all step decorators. This function gets specialized
601
- and imported for all decorators types by _import_plugin_decorators().
602
- """
603
- ...
604
-
605
- @typing.overload
606
- def secrets(*, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
524
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
607
525
  """
608
- Specifies secrets to be retrieved and injected as environment variables prior to
609
- the execution of a step.
526
+ Specifies the PyPI packages for the step.
527
+
528
+ Information in this decorator will augment any
529
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
530
+ you can use `@pypi_base` to set packages required by all
531
+ steps and use `@pypi` to specify step-specific overrides.
610
532
 
611
533
 
612
534
  Parameters
613
535
  ----------
614
- sources : List[Union[str, Dict[str, Any]]], default: []
615
- List of secret specs, defining how the secrets are to be retrieved
616
- role : str, optional, default: None
617
- Role to use for fetching secrets
536
+ packages : Dict[str, str], default: {}
537
+ Packages to use for this step. The key is the name of the package
538
+ and the value is the version to use.
539
+ python : str, optional, default: None
540
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
541
+ that the version used will correspond to the version of the Python interpreter used to start the run.
618
542
  """
619
543
  ...
620
544
 
621
545
  @typing.overload
622
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
546
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
623
547
  ...
624
548
 
625
549
  @typing.overload
626
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
550
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
627
551
  ...
628
552
 
629
- def secrets(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None):
553
+ def pypi(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
630
554
  """
631
- Specifies secrets to be retrieved and injected as environment variables prior to
632
- the execution of a step.
555
+ Specifies the PyPI packages for the step.
556
+
557
+ Information in this decorator will augment any
558
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
559
+ you can use `@pypi_base` to set packages required by all
560
+ steps and use `@pypi` to specify step-specific overrides.
633
561
 
634
562
 
635
563
  Parameters
636
564
  ----------
637
- sources : List[Union[str, Dict[str, Any]]], default: []
638
- List of secret specs, defining how the secrets are to be retrieved
639
- role : str, optional, default: None
640
- Role to use for fetching secrets
565
+ packages : Dict[str, str], default: {}
566
+ Packages to use for this step. The key is the name of the package
567
+ and the value is the version to use.
568
+ python : str, optional, default: None
569
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
570
+ that the version used will correspond to the version of the Python interpreter used to start the run.
641
571
  """
642
572
  ...
643
573
 
644
574
  @typing.overload
645
- def card(*, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
575
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
646
576
  """
647
- Creates a human-readable report, a Metaflow Card, after this step completes.
648
-
649
- Note that you may add multiple `@card` decorators in a step with different parameters.
577
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
650
578
 
651
579
 
652
580
  Parameters
653
581
  ----------
654
- type : str, default 'default'
655
- Card type.
656
- id : str, optional, default None
657
- If multiple cards are present, use this id to identify this card.
658
- options : Dict[str, Any], default {}
659
- Options passed to the card. The contents depend on the card type.
660
- timeout : int, default 45
661
- Interrupt reporting if it takes more than this many seconds.
582
+ cpu : int, default 1
583
+ Number of CPUs required for this step. If `@resources` is
584
+ also present, the maximum value from all decorators is used.
585
+ gpu : int, default 0
586
+ Number of GPUs required for this step. If `@resources` is
587
+ also present, the maximum value from all decorators is used.
588
+ memory : int, default 4096
589
+ Memory size (in MB) required for this step. If
590
+ `@resources` is also present, the maximum value from all decorators is
591
+ used.
592
+ image : str, optional, default None
593
+ Docker image to use when launching on AWS Batch. If not specified, and
594
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
595
+ not, a default Docker image mapping to the current version of Python is used.
596
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
597
+ AWS Batch Job Queue to submit the job to.
598
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
599
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
600
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
601
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
602
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
603
+ shared_memory : int, optional, default None
604
+ The value for the size (in MiB) of the /dev/shm volume for this step.
605
+ This parameter maps to the `--shm-size` option in Docker.
606
+ max_swap : int, optional, default None
607
+ The total amount of swap memory (in MiB) a container can use for this
608
+ step. This parameter is translated to the `--memory-swap` option in
609
+ Docker where the value is the sum of the container memory plus the
610
+ `max_swap` value.
611
+ swappiness : int, optional, default None
612
+ This allows you to tune memory swappiness behavior for this step.
613
+ A swappiness value of 0 causes swapping not to happen unless absolutely
614
+ necessary. A swappiness value of 100 causes pages to be swapped very
615
+ aggressively. Accepted values are whole numbers between 0 and 100.
616
+ aws_batch_tags: Dict[str, str], optional, default None
617
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
618
+ Set as string key-value pairs.
619
+ use_tmpfs : bool, default False
620
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
621
+ not available on Fargate compute environments
622
+ tmpfs_tempdir : bool, default True
623
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
624
+ tmpfs_size : int, optional, default None
625
+ The value for the size (in MiB) of the tmpfs mount for this step.
626
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
627
+ memory allocated for this step.
628
+ tmpfs_path : str, optional, default None
629
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
630
+ inferentia : int, default 0
631
+ Number of Inferentia chips required for this step.
632
+ trainium : int, default None
633
+ Alias for inferentia. Use only one of the two.
634
+ efa : int, default 0
635
+ Number of elastic fabric adapter network devices to attach to container
636
+ ephemeral_storage : int, default None
637
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
638
+ This is only relevant for Fargate compute environments
639
+ log_driver: str, optional, default None
640
+ The log driver to use for the Amazon ECS container.
641
+ log_options: List[str], optional, default None
642
+ List of strings containing options for the chosen log driver. The configurable values
643
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
644
+ Example: [`awslogs-group:aws/batch/job`]
662
645
  """
663
646
  ...
664
647
 
665
648
  @typing.overload
666
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
649
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
667
650
  ...
668
651
 
669
652
  @typing.overload
670
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
653
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
671
654
  ...
672
655
 
673
- def card(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
656
+ def batch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None):
674
657
  """
675
- Creates a human-readable report, a Metaflow Card, after this step completes.
676
-
677
- Note that you may add multiple `@card` decorators in a step with different parameters.
658
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
678
659
 
679
660
 
680
661
  Parameters
681
662
  ----------
682
- type : str, default 'default'
683
- Card type.
684
- id : str, optional, default None
685
- If multiple cards are present, use this id to identify this card.
686
- options : Dict[str, Any], default {}
687
- Options passed to the card. The contents depend on the card type.
688
- timeout : int, default 45
689
- Interrupt reporting if it takes more than this many seconds.
663
+ cpu : int, default 1
664
+ Number of CPUs required for this step. If `@resources` is
665
+ also present, the maximum value from all decorators is used.
666
+ gpu : int, default 0
667
+ Number of GPUs required for this step. If `@resources` is
668
+ also present, the maximum value from all decorators is used.
669
+ memory : int, default 4096
670
+ Memory size (in MB) required for this step. If
671
+ `@resources` is also present, the maximum value from all decorators is
672
+ used.
673
+ image : str, optional, default None
674
+ Docker image to use when launching on AWS Batch. If not specified, and
675
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
676
+ not, a default Docker image mapping to the current version of Python is used.
677
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
678
+ AWS Batch Job Queue to submit the job to.
679
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
680
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
681
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
682
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
683
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
684
+ shared_memory : int, optional, default None
685
+ The value for the size (in MiB) of the /dev/shm volume for this step.
686
+ This parameter maps to the `--shm-size` option in Docker.
687
+ max_swap : int, optional, default None
688
+ The total amount of swap memory (in MiB) a container can use for this
689
+ step. This parameter is translated to the `--memory-swap` option in
690
+ Docker where the value is the sum of the container memory plus the
691
+ `max_swap` value.
692
+ swappiness : int, optional, default None
693
+ This allows you to tune memory swappiness behavior for this step.
694
+ A swappiness value of 0 causes swapping not to happen unless absolutely
695
+ necessary. A swappiness value of 100 causes pages to be swapped very
696
+ aggressively. Accepted values are whole numbers between 0 and 100.
697
+ aws_batch_tags: Dict[str, str], optional, default None
698
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
699
+ Set as string key-value pairs.
700
+ use_tmpfs : bool, default False
701
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
702
+ not available on Fargate compute environments
703
+ tmpfs_tempdir : bool, default True
704
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
705
+ tmpfs_size : int, optional, default None
706
+ The value for the size (in MiB) of the tmpfs mount for this step.
707
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
708
+ memory allocated for this step.
709
+ tmpfs_path : str, optional, default None
710
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
711
+ inferentia : int, default 0
712
+ Number of Inferentia chips required for this step.
713
+ trainium : int, default None
714
+ Alias for inferentia. Use only one of the two.
715
+ efa : int, default 0
716
+ Number of elastic fabric adapter network devices to attach to container
717
+ ephemeral_storage : int, default None
718
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
719
+ This is only relevant for Fargate compute environments
720
+ log_driver: str, optional, default None
721
+ The log driver to use for the Amazon ECS container.
722
+ log_options: List[str], optional, default None
723
+ List of strings containing options for the chosen log driver. The configurable values
724
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
725
+ Example: [`awslogs-group:aws/batch/job`]
690
726
  """
691
727
  ...
692
728
 
693
729
  @typing.overload
694
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
730
+ def secrets(*, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
695
731
  """
696
- Specifies a timeout for your step.
697
-
698
- This decorator is useful if this step may hang indefinitely.
699
-
700
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
701
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
702
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
703
-
704
- Note that all the values specified in parameters are added together so if you specify
705
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
732
+ Specifies secrets to be retrieved and injected as environment variables prior to
733
+ the execution of a step.
706
734
 
707
735
 
708
736
  Parameters
709
737
  ----------
710
- seconds : int, default 0
711
- Number of seconds to wait prior to timing out.
712
- minutes : int, default 0
713
- Number of minutes to wait prior to timing out.
714
- hours : int, default 0
715
- Number of hours to wait prior to timing out.
738
+ sources : List[Union[str, Dict[str, Any]]], default: []
739
+ List of secret specs, defining how the secrets are to be retrieved
740
+ role : str, optional, default: None
741
+ Role to use for fetching secrets
716
742
  """
717
743
  ...
718
744
 
719
745
  @typing.overload
720
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
746
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
721
747
  ...
722
748
 
723
749
  @typing.overload
724
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
750
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
725
751
  ...
726
752
 
727
- def timeout(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
753
+ def secrets(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None):
728
754
  """
729
- Specifies a timeout for your step.
730
-
731
- This decorator is useful if this step may hang indefinitely.
732
-
733
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
734
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
735
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
736
-
737
- Note that all the values specified in parameters are added together so if you specify
738
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
755
+ Specifies secrets to be retrieved and injected as environment variables prior to
756
+ the execution of a step.
739
757
 
740
758
 
741
759
  Parameters
742
760
  ----------
743
- seconds : int, default 0
744
- Number of seconds to wait prior to timing out.
745
- minutes : int, default 0
746
- Number of minutes to wait prior to timing out.
747
- hours : int, default 0
748
- Number of hours to wait prior to timing out.
761
+ sources : List[Union[str, Dict[str, Any]]], default: []
762
+ List of secret specs, defining how the secrets are to be retrieved
763
+ role : str, optional, default: None
764
+ Role to use for fetching secrets
749
765
  """
750
766
  ...
751
767
 
@@ -809,86 +825,212 @@ def conda(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_genera
809
825
  ...
810
826
 
811
827
  @typing.overload
812
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
828
+ def card(*, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
813
829
  """
814
- Specifies environment variables to be set prior to the execution of a step.
830
+ Creates a human-readable report, a Metaflow Card, after this step completes.
831
+
832
+ Note that you may add multiple `@card` decorators in a step with different parameters.
815
833
 
816
834
 
817
835
  Parameters
818
836
  ----------
819
- vars : Dict[str, str], default {}
820
- Dictionary of environment variables to set.
837
+ type : str, default 'default'
838
+ Card type.
839
+ id : str, optional, default None
840
+ If multiple cards are present, use this id to identify this card.
841
+ options : Dict[str, Any], default {}
842
+ Options passed to the card. The contents depend on the card type.
843
+ timeout : int, default 45
844
+ Interrupt reporting if it takes more than this many seconds.
821
845
  """
822
846
  ...
823
847
 
824
848
  @typing.overload
825
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
849
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
826
850
  ...
827
851
 
828
852
  @typing.overload
829
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
853
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
830
854
  ...
831
855
 
832
- def environment(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, vars: typing.Dict[str, str] = {}):
856
+ def card(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
833
857
  """
834
- Specifies environment variables to be set prior to the execution of a step.
858
+ Creates a human-readable report, a Metaflow Card, after this step completes.
859
+
860
+ Note that you may add multiple `@card` decorators in a step with different parameters.
861
+
862
+
863
+ Parameters
864
+ ----------
865
+ type : str, default 'default'
866
+ Card type.
867
+ id : str, optional, default None
868
+ If multiple cards are present, use this id to identify this card.
869
+ options : Dict[str, Any], default {}
870
+ Options passed to the card. The contents depend on the card type.
871
+ timeout : int, default 45
872
+ Interrupt reporting if it takes more than this many seconds.
873
+ """
874
+ ...
875
+
876
+ @typing.overload
877
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
878
+ """
879
+ Decorator prototype for all step decorators. This function gets specialized
880
+ and imported for all decorators types by _import_plugin_decorators().
881
+ """
882
+ ...
883
+
884
+ @typing.overload
885
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
886
+ ...
887
+
888
+ def parallel(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None):
889
+ """
890
+ Decorator prototype for all step decorators. This function gets specialized
891
+ and imported for all decorators types by _import_plugin_decorators().
892
+ """
893
+ ...
894
+
895
+ @typing.overload
896
+ def trigger_on_finish(*, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
897
+ """
898
+ Specifies the flow(s) that this flow depends on.
899
+
900
+ ```
901
+ @trigger_on_finish(flow='FooFlow')
902
+ ```
903
+ or
904
+ ```
905
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
906
+ ```
907
+ This decorator respects the @project decorator and triggers the flow
908
+ when upstream runs within the same namespace complete successfully
909
+
910
+ Additionally, you can specify project aware upstream flow dependencies
911
+ by specifying the fully qualified project_flow_name.
912
+ ```
913
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
914
+ ```
915
+ or
916
+ ```
917
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
918
+ ```
919
+
920
+ You can also specify just the project or project branch (other values will be
921
+ inferred from the current project or project branch):
922
+ ```
923
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
924
+ ```
925
+
926
+ Note that `branch` is typically one of:
927
+ - `prod`
928
+ - `user.bob`
929
+ - `test.my_experiment`
930
+ - `prod.staging`
931
+
932
+
933
+ Parameters
934
+ ----------
935
+ flow : Union[str, Dict[str, str]], optional, default None
936
+ Upstream flow dependency for this flow.
937
+ flows : List[Union[str, Dict[str, str]]], default []
938
+ Upstream flow dependencies for this flow.
939
+ options : Dict[str, Any], default {}
940
+ Backend-specific configuration for tuning eventing behavior.
941
+ """
942
+ ...
943
+
944
+ @typing.overload
945
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
946
+ ...
947
+
948
+ def trigger_on_finish(f: typing.Type[~FlowSpecDerived] | None = None, *, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}):
949
+ """
950
+ Specifies the flow(s) that this flow depends on.
951
+
952
+ ```
953
+ @trigger_on_finish(flow='FooFlow')
954
+ ```
955
+ or
956
+ ```
957
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
958
+ ```
959
+ This decorator respects the @project decorator and triggers the flow
960
+ when upstream runs within the same namespace complete successfully
961
+
962
+ Additionally, you can specify project aware upstream flow dependencies
963
+ by specifying the fully qualified project_flow_name.
964
+ ```
965
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
966
+ ```
967
+ or
968
+ ```
969
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
970
+ ```
971
+
972
+ You can also specify just the project or project branch (other values will be
973
+ inferred from the current project or project branch):
974
+ ```
975
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
976
+ ```
977
+
978
+ Note that `branch` is typically one of:
979
+ - `prod`
980
+ - `user.bob`
981
+ - `test.my_experiment`
982
+ - `prod.staging`
835
983
 
836
984
 
837
985
  Parameters
838
986
  ----------
839
- vars : Dict[str, str], default {}
840
- Dictionary of environment variables to set.
987
+ flow : Union[str, Dict[str, str]], optional, default None
988
+ Upstream flow dependency for this flow.
989
+ flows : List[Union[str, Dict[str, str]]], default []
990
+ Upstream flow dependencies for this flow.
991
+ options : Dict[str, Any], default {}
992
+ Backend-specific configuration for tuning eventing behavior.
841
993
  """
842
994
  ...
843
995
 
844
996
  @typing.overload
845
- def catch(*, var: str | None = None, print_exception: bool = True) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
997
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
846
998
  """
847
- Specifies that the step will success under all circumstances.
848
-
849
- The decorator will create an optional artifact, specified by `var`, which
850
- contains the exception raised. You can use it to detect the presence
851
- of errors, indicating that all happy-path artifacts produced by the step
852
- are missing.
999
+ Specifies the PyPI packages for all steps of the flow.
853
1000
 
1001
+ Use `@pypi_base` to set common packages required by all
1002
+ steps and use `@pypi` to specify step-specific overrides.
854
1003
 
855
1004
  Parameters
856
1005
  ----------
857
- var : str, optional, default None
858
- Name of the artifact in which to store the caught exception.
859
- If not specified, the exception is not stored.
860
- print_exception : bool, default True
861
- Determines whether or not the exception is printed to
862
- stdout when caught.
1006
+ packages : Dict[str, str], default: {}
1007
+ Packages to use for this flow. The key is the name of the package
1008
+ and the value is the version to use.
1009
+ python : str, optional, default: None
1010
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1011
+ that the version used will correspond to the version of the Python interpreter used to start the run.
863
1012
  """
864
1013
  ...
865
1014
 
866
1015
  @typing.overload
867
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
868
- ...
869
-
870
- @typing.overload
871
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1016
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
872
1017
  ...
873
1018
 
874
- def catch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, var: str | None = None, print_exception: bool = True):
1019
+ def pypi_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
875
1020
  """
876
- Specifies that the step will success under all circumstances.
877
-
878
- The decorator will create an optional artifact, specified by `var`, which
879
- contains the exception raised. You can use it to detect the presence
880
- of errors, indicating that all happy-path artifacts produced by the step
881
- are missing.
1021
+ Specifies the PyPI packages for all steps of the flow.
882
1022
 
1023
+ Use `@pypi_base` to set common packages required by all
1024
+ steps and use `@pypi` to specify step-specific overrides.
883
1025
 
884
1026
  Parameters
885
1027
  ----------
886
- var : str, optional, default None
887
- Name of the artifact in which to store the caught exception.
888
- If not specified, the exception is not stored.
889
- print_exception : bool, default True
890
- Determines whether or not the exception is printed to
891
- stdout when caught.
1028
+ packages : Dict[str, str], default: {}
1029
+ Packages to use for this flow. The key is the name of the package
1030
+ and the value is the version to use.
1031
+ python : str, optional, default: None
1032
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1033
+ that the version used will correspond to the version of the Python interpreter used to start the run.
892
1034
  """
893
1035
  ...
894
1036
 
@@ -985,44 +1127,38 @@ def trigger(f: typing.Type[~FlowSpecDerived] | None = None, *, event: str | typi
985
1127
  """
986
1128
  ...
987
1129
 
988
- @typing.overload
989
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1130
+ def project(*, name: str, branch: str | None = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
990
1131
  """
991
- Specifies the PyPI packages for all steps of the flow.
1132
+ Specifies what flows belong to the same project.
1133
+
1134
+ A project-specific namespace is created for all flows that
1135
+ use the same `@project(name)`.
992
1136
 
993
- Use `@pypi_base` to set common packages required by all
994
- steps and use `@pypi` to specify step-specific overrides.
995
1137
 
996
1138
  Parameters
997
1139
  ----------
998
- packages : Dict[str, str], default: {}
999
- Packages to use for this flow. The key is the name of the package
1000
- and the value is the version to use.
1001
- python : str, optional, default: None
1002
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1003
- that the version used will correspond to the version of the Python interpreter used to start the run.
1004
- """
1005
- ...
1006
-
1007
- @typing.overload
1008
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1009
- ...
1010
-
1011
- def pypi_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
1012
- """
1013
- Specifies the PyPI packages for all steps of the flow.
1140
+ name : str
1141
+ Project name. Make sure that the name is unique amongst all
1142
+ projects that use the same production scheduler. The name may
1143
+ contain only lowercase alphanumeric characters and underscores.
1014
1144
 
1015
- Use `@pypi_base` to set common packages required by all
1016
- steps and use `@pypi` to specify step-specific overrides.
1145
+ branch : Optional[str], default None
1146
+ The branch to use. If not specified, the branch is set to
1147
+ `user.<username>` unless `production` is set to `True`. This can
1148
+ also be set on the command line using `--branch` as a top-level option.
1149
+ It is an error to specify `branch` in the decorator and on the command line.
1017
1150
 
1018
- Parameters
1019
- ----------
1020
- packages : Dict[str, str], default: {}
1021
- Packages to use for this flow. The key is the name of the package
1022
- and the value is the version to use.
1023
- python : str, optional, default: None
1024
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1025
- that the version used will correspond to the version of the Python interpreter used to start the run.
1151
+ production : bool, default False
1152
+ Whether or not the branch is the production branch. This can also be set on the
1153
+ command line using `--production` as a top-level option. It is an error to specify
1154
+ `production` in the decorator and on the command line.
1155
+ The project branch name will be:
1156
+ - if `branch` is specified:
1157
+ - if `production` is True: `prod.<branch>`
1158
+ - if `production` is False: `test.<branch>`
1159
+ - if `branch` is not specified:
1160
+ - if `production` is True: `prod`
1161
+ - if `production` is False: `user.<username>`
1026
1162
  """
1027
1163
  ...
1028
1164
 
@@ -1069,142 +1205,6 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1069
1205
  """
1070
1206
  ...
1071
1207
 
1072
- def project(*, name: str, branch: str | None = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1073
- """
1074
- Specifies what flows belong to the same project.
1075
-
1076
- A project-specific namespace is created for all flows that
1077
- use the same `@project(name)`.
1078
-
1079
-
1080
- Parameters
1081
- ----------
1082
- name : str
1083
- Project name. Make sure that the name is unique amongst all
1084
- projects that use the same production scheduler. The name may
1085
- contain only lowercase alphanumeric characters and underscores.
1086
-
1087
- branch : Optional[str], default None
1088
- The branch to use. If not specified, the branch is set to
1089
- `user.<username>` unless `production` is set to `True`. This can
1090
- also be set on the command line using `--branch` as a top-level option.
1091
- It is an error to specify `branch` in the decorator and on the command line.
1092
-
1093
- production : bool, default False
1094
- Whether or not the branch is the production branch. This can also be set on the
1095
- command line using `--production` as a top-level option. It is an error to specify
1096
- `production` in the decorator and on the command line.
1097
- The project branch name will be:
1098
- - if `branch` is specified:
1099
- - if `production` is True: `prod.<branch>`
1100
- - if `production` is False: `test.<branch>`
1101
- - if `branch` is not specified:
1102
- - if `production` is True: `prod`
1103
- - if `production` is False: `user.<username>`
1104
- """
1105
- ...
1106
-
1107
- @typing.overload
1108
- def trigger_on_finish(*, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1109
- """
1110
- Specifies the flow(s) that this flow depends on.
1111
-
1112
- ```
1113
- @trigger_on_finish(flow='FooFlow')
1114
- ```
1115
- or
1116
- ```
1117
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1118
- ```
1119
- This decorator respects the @project decorator and triggers the flow
1120
- when upstream runs within the same namespace complete successfully
1121
-
1122
- Additionally, you can specify project aware upstream flow dependencies
1123
- by specifying the fully qualified project_flow_name.
1124
- ```
1125
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1126
- ```
1127
- or
1128
- ```
1129
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1130
- ```
1131
-
1132
- You can also specify just the project or project branch (other values will be
1133
- inferred from the current project or project branch):
1134
- ```
1135
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1136
- ```
1137
-
1138
- Note that `branch` is typically one of:
1139
- - `prod`
1140
- - `user.bob`
1141
- - `test.my_experiment`
1142
- - `prod.staging`
1143
-
1144
-
1145
- Parameters
1146
- ----------
1147
- flow : Union[str, Dict[str, str]], optional, default None
1148
- Upstream flow dependency for this flow.
1149
- flows : List[Union[str, Dict[str, str]]], default []
1150
- Upstream flow dependencies for this flow.
1151
- options : Dict[str, Any], default {}
1152
- Backend-specific configuration for tuning eventing behavior.
1153
- """
1154
- ...
1155
-
1156
- @typing.overload
1157
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1158
- ...
1159
-
1160
- def trigger_on_finish(f: typing.Type[~FlowSpecDerived] | None = None, *, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}):
1161
- """
1162
- Specifies the flow(s) that this flow depends on.
1163
-
1164
- ```
1165
- @trigger_on_finish(flow='FooFlow')
1166
- ```
1167
- or
1168
- ```
1169
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1170
- ```
1171
- This decorator respects the @project decorator and triggers the flow
1172
- when upstream runs within the same namespace complete successfully
1173
-
1174
- Additionally, you can specify project aware upstream flow dependencies
1175
- by specifying the fully qualified project_flow_name.
1176
- ```
1177
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1178
- ```
1179
- or
1180
- ```
1181
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1182
- ```
1183
-
1184
- You can also specify just the project or project branch (other values will be
1185
- inferred from the current project or project branch):
1186
- ```
1187
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1188
- ```
1189
-
1190
- Note that `branch` is typically one of:
1191
- - `prod`
1192
- - `user.bob`
1193
- - `test.my_experiment`
1194
- - `prod.staging`
1195
-
1196
-
1197
- Parameters
1198
- ----------
1199
- flow : Union[str, Dict[str, str]], optional, default None
1200
- Upstream flow dependency for this flow.
1201
- flows : List[Union[str, Dict[str, str]]], default []
1202
- Upstream flow dependencies for this flow.
1203
- options : Dict[str, Any], default {}
1204
- Backend-specific configuration for tuning eventing behavior.
1205
- """
1206
- ...
1207
-
1208
1208
  @typing.overload
1209
1209
  def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: str | None = None, timezone: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1210
1210
  """