metaflow-stubs 2.18.13__py2.py3-none-any.whl → 2.19.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (168) hide show
  1. metaflow-stubs/__init__.pyi +560 -559
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +3 -2
  7. metaflow-stubs/client/core.pyi +62 -31
  8. metaflow-stubs/client/filecache.pyi +20 -4
  9. metaflow-stubs/events.pyi +7 -8
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +9 -10
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +5 -5
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +16 -2
  20. metaflow-stubs/metaflow_current.pyi +15 -17
  21. metaflow-stubs/metaflow_git.pyi +3 -5
  22. metaflow-stubs/multicore_utils.pyi +4 -4
  23. metaflow-stubs/packaging_sys/__init__.pyi +41 -42
  24. metaflow-stubs/packaging_sys/backend.pyi +13 -13
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +5 -6
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +11 -11
  27. metaflow-stubs/packaging_sys/utils.pyi +3 -6
  28. metaflow-stubs/packaging_sys/v1.pyi +5 -5
  29. metaflow-stubs/parameters.pyi +7 -10
  30. metaflow-stubs/plugins/__init__.pyi +11 -11
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +6 -4
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +11 -9
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +5 -8
  47. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +6 -4
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +10 -8
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +5 -6
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +4 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +15 -15
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +5 -5
  83. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  85. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  86. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  87. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  88. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  89. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3.pyi +27 -35
  93. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  94. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  95. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  96. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  97. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  102. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  105. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  108. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +4 -4
  111. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  116. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  117. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/parsers.pyi +2 -2
  119. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  121. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/conda_environment.pyi +6 -6
  123. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  126. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  127. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  128. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  131. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/secrets_func.pyi +3 -5
  133. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  134. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  135. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  136. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  137. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  138. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  139. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  140. metaflow-stubs/pylint_wrapper.pyi +2 -2
  141. metaflow-stubs/runner/__init__.pyi +2 -2
  142. metaflow-stubs/runner/deployer.pyi +23 -22
  143. metaflow-stubs/runner/deployer_impl.pyi +6 -4
  144. metaflow-stubs/runner/metaflow_runner.pyi +134 -21
  145. metaflow-stubs/runner/nbdeploy.pyi +3 -3
  146. metaflow-stubs/runner/nbrun.pyi +3 -3
  147. metaflow-stubs/runner/subprocess_manager.pyi +8 -8
  148. metaflow-stubs/runner/utils.pyi +4 -5
  149. metaflow-stubs/system/__init__.pyi +2 -2
  150. metaflow-stubs/system/system_logger.pyi +4 -5
  151. metaflow-stubs/system/system_monitor.pyi +3 -3
  152. metaflow-stubs/tagging_util.pyi +2 -2
  153. metaflow-stubs/tuple_util.pyi +2 -2
  154. metaflow-stubs/user_configs/__init__.pyi +2 -2
  155. metaflow-stubs/user_configs/config_options.pyi +6 -7
  156. metaflow-stubs/user_configs/config_parameters.pyi +8 -8
  157. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  158. metaflow-stubs/user_decorators/common.pyi +6 -6
  159. metaflow-stubs/user_decorators/mutable_flow.pyi +8 -9
  160. metaflow-stubs/user_decorators/mutable_step.pyi +7 -10
  161. metaflow-stubs/user_decorators/user_flow_decorator.pyi +8 -9
  162. metaflow-stubs/user_decorators/user_step_decorator.pyi +24 -17
  163. metaflow-stubs/version.pyi +2 -2
  164. {metaflow_stubs-2.18.13.dist-info → metaflow_stubs-2.19.0.dist-info}/METADATA +2 -2
  165. metaflow_stubs-2.19.0.dist-info/RECORD +168 -0
  166. metaflow_stubs-2.18.13.dist-info/RECORD +0 -168
  167. {metaflow_stubs-2.18.13.dist-info → metaflow_stubs-2.19.0.dist-info}/WHEEL +0 -0
  168. {metaflow_stubs-2.18.13.dist-info → metaflow_stubs-2.19.0.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.13 #
4
- # Generated on 2025-10-20T17:35:52.643612 #
3
+ # MF version: 2.19.0 #
4
+ # Generated on 2025-10-26T02:29:29.828022 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -40,17 +40,17 @@ from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
42
  from . import metaflow_git as metaflow_git
43
- from . import events as events
44
43
  from . import tuple_util as tuple_util
44
+ from . import events as events
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
51
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
50
  from .plugins.parsers import yaml_parser as yaml_parser
51
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
54
54
  from . import cards as cards
55
55
  from . import client as client
56
56
  from .client.core import namespace as namespace
@@ -59,6 +59,7 @@ from .client.core import default_namespace as default_namespace
59
59
  from .client.core import metadata as metadata
60
60
  from .client.core import get_metadata as get_metadata
61
61
  from .client.core import default_metadata as default_metadata
62
+ from .client.core import inspect_spin as inspect_spin
62
63
  from .client.core import Metaflow as Metaflow
63
64
  from .client.core import Flow as Flow
64
65
  from .client.core import Run as Run
@@ -71,8 +72,8 @@ from .runner.deployer import Deployer as Deployer
71
72
  from .runner.deployer import DeployedFlow as DeployedFlow
72
73
  from .runner.nbdeploy import NBDeployer as NBDeployer
73
74
  from . import version as version
74
- from . import system as system
75
75
  from . import cli_components as cli_components
76
+ from . import system as system
76
77
  from . import pylint_wrapper as pylint_wrapper
77
78
  from . import cli as cli
78
79
 
@@ -119,7 +120,7 @@ def step(f: typing.Callable[[FlowSpecDerived], None]) -> typing.Callable[[FlowSp
119
120
  def step(f: typing.Callable[[FlowSpecDerived, typing.Any], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
120
121
  ...
121
122
 
122
- def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callable[[FlowSpecDerived, typing.Any], None]]):
123
+ def step(f: typing.Callable[[~FlowSpecDerived], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any], NoneType]):
123
124
  """
124
125
  Marks a method in a FlowSpec as a Metaflow Step. Note that this
125
126
  decorator needs to be placed as close to the method as possible (ie:
@@ -165,7 +166,7 @@ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Ca
165
166
  def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
166
167
  ...
167
168
 
168
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
169
+ def parallel(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None):
169
170
  """
170
171
  Decorator prototype for all step decorators. This function gets specialized
171
172
  and imported for all decorators types by _import_plugin_decorators().
@@ -173,7 +174,58 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
173
174
  ...
174
175
 
175
176
  @typing.overload
176
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
177
+ def catch(*, var: str | None = None, print_exception: bool = True) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
178
+ """
179
+ Specifies that the step will success under all circumstances.
180
+
181
+ The decorator will create an optional artifact, specified by `var`, which
182
+ contains the exception raised. You can use it to detect the presence
183
+ of errors, indicating that all happy-path artifacts produced by the step
184
+ are missing.
185
+
186
+
187
+ Parameters
188
+ ----------
189
+ var : str, optional, default None
190
+ Name of the artifact in which to store the caught exception.
191
+ If not specified, the exception is not stored.
192
+ print_exception : bool, default True
193
+ Determines whether or not the exception is printed to
194
+ stdout when caught.
195
+ """
196
+ ...
197
+
198
+ @typing.overload
199
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
200
+ ...
201
+
202
+ @typing.overload
203
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
204
+ ...
205
+
206
+ def catch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, var: str | None = None, print_exception: bool = True):
207
+ """
208
+ Specifies that the step will success under all circumstances.
209
+
210
+ The decorator will create an optional artifact, specified by `var`, which
211
+ contains the exception raised. You can use it to detect the presence
212
+ of errors, indicating that all happy-path artifacts produced by the step
213
+ are missing.
214
+
215
+
216
+ Parameters
217
+ ----------
218
+ var : str, optional, default None
219
+ Name of the artifact in which to store the caught exception.
220
+ If not specified, the exception is not stored.
221
+ print_exception : bool, default True
222
+ Determines whether or not the exception is printed to
223
+ stdout when caught.
224
+ """
225
+ ...
226
+
227
+ @typing.overload
228
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
177
229
  """
178
230
  Specifies the Conda environment for the step.
179
231
 
@@ -206,7 +258,7 @@ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Calla
206
258
  def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
207
259
  ...
208
260
 
209
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
261
+ def conda(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
210
262
  """
211
263
  Specifies the Conda environment for the step.
212
264
 
@@ -232,46 +284,36 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
232
284
  ...
233
285
 
234
286
  @typing.overload
235
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
287
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
236
288
  """
237
- Specifies secrets to be retrieved and injected as environment variables prior to
238
- the execution of a step.
289
+ Specifies the PyPI packages for the step.
290
+
291
+ Information in this decorator will augment any
292
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
293
+ you can use `@pypi_base` to set packages required by all
294
+ steps and use `@pypi` to specify step-specific overrides.
239
295
 
240
296
 
241
297
  Parameters
242
298
  ----------
243
- sources : List[Union[str, Dict[str, Any]]], default: []
244
- List of secret specs, defining how the secrets are to be retrieved
245
- role : str, optional, default: None
246
- Role to use for fetching secrets
299
+ packages : Dict[str, str], default: {}
300
+ Packages to use for this step. The key is the name of the package
301
+ and the value is the version to use.
302
+ python : str, optional, default: None
303
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
304
+ that the version used will correspond to the version of the Python interpreter used to start the run.
247
305
  """
248
306
  ...
249
307
 
250
308
  @typing.overload
251
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
309
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
252
310
  ...
253
311
 
254
312
  @typing.overload
255
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
256
- ...
257
-
258
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
259
- """
260
- Specifies secrets to be retrieved and injected as environment variables prior to
261
- the execution of a step.
262
-
263
-
264
- Parameters
265
- ----------
266
- sources : List[Union[str, Dict[str, Any]]], default: []
267
- List of secret specs, defining how the secrets are to be retrieved
268
- role : str, optional, default: None
269
- Role to use for fetching secrets
270
- """
313
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
271
314
  ...
272
315
 
273
- @typing.overload
274
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
316
+ def pypi(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
275
317
  """
276
318
  Specifies the PyPI packages for the step.
277
319
 
@@ -293,35 +335,85 @@ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] =
293
335
  ...
294
336
 
295
337
  @typing.overload
296
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
338
+ def resources(*, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
339
+ """
340
+ Specifies the resources needed when executing this step.
341
+
342
+ Use `@resources` to specify the resource requirements
343
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
344
+
345
+ You can choose the compute layer on the command line by executing e.g.
346
+ ```
347
+ python myflow.py run --with batch
348
+ ```
349
+ or
350
+ ```
351
+ python myflow.py run --with kubernetes
352
+ ```
353
+ which executes the flow on the desired system using the
354
+ requirements specified in `@resources`.
355
+
356
+
357
+ Parameters
358
+ ----------
359
+ cpu : int, default 1
360
+ Number of CPUs required for this step.
361
+ gpu : int, optional, default None
362
+ Number of GPUs required for this step.
363
+ disk : int, optional, default None
364
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
365
+ memory : int, default 4096
366
+ Memory size (in MB) required for this step.
367
+ shared_memory : int, optional, default None
368
+ The value for the size (in MiB) of the /dev/shm volume for this step.
369
+ This parameter maps to the `--shm-size` option in Docker.
370
+ """
297
371
  ...
298
372
 
299
373
  @typing.overload
300
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
374
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
375
+ ...
376
+
377
+ @typing.overload
378
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
301
379
  ...
302
380
 
303
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
381
+ def resources(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None):
304
382
  """
305
- Specifies the PyPI packages for the step.
383
+ Specifies the resources needed when executing this step.
306
384
 
307
- Information in this decorator will augment any
308
- attributes set in the `@pyi_base` flow-level decorator. Hence,
309
- you can use `@pypi_base` to set packages required by all
310
- steps and use `@pypi` to specify step-specific overrides.
385
+ Use `@resources` to specify the resource requirements
386
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
387
+
388
+ You can choose the compute layer on the command line by executing e.g.
389
+ ```
390
+ python myflow.py run --with batch
391
+ ```
392
+ or
393
+ ```
394
+ python myflow.py run --with kubernetes
395
+ ```
396
+ which executes the flow on the desired system using the
397
+ requirements specified in `@resources`.
311
398
 
312
399
 
313
400
  Parameters
314
401
  ----------
315
- packages : Dict[str, str], default: {}
316
- Packages to use for this step. The key is the name of the package
317
- and the value is the version to use.
318
- python : str, optional, default: None
319
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
320
- that the version used will correspond to the version of the Python interpreter used to start the run.
402
+ cpu : int, default 1
403
+ Number of CPUs required for this step.
404
+ gpu : int, optional, default None
405
+ Number of GPUs required for this step.
406
+ disk : int, optional, default None
407
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
408
+ memory : int, default 4096
409
+ Memory size (in MB) required for this step.
410
+ shared_memory : int, optional, default None
411
+ The value for the size (in MiB) of the /dev/shm volume for this step.
412
+ This parameter maps to the `--shm-size` option in Docker.
321
413
  """
322
414
  ...
323
415
 
324
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
416
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: str | None = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.List[str] | None = None, node_selector: typing.Dict[str, str] | str | None = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: int | None = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = '/metaflow_temp', persistent_volume_claims: typing.Dict[str, str] | None = None, shared_memory: int | None = None, port: int | None = None, compute_pool: str | None = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Dict[str, typing.Any] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
325
417
  """
326
418
  Specifies that this step should execute on Kubernetes.
327
419
 
@@ -411,7 +503,7 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
411
503
  ...
412
504
 
413
505
  @typing.overload
414
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
506
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
415
507
  """
416
508
  Specifies environment variables to be set prior to the execution of a step.
417
509
 
@@ -431,7 +523,7 @@ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing
431
523
  def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
432
524
  ...
433
525
 
434
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
526
+ def environment(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, vars: typing.Dict[str, str] = {}):
435
527
  """
436
528
  Specifies environment variables to be set prior to the execution of a step.
437
529
 
@@ -444,302 +536,68 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
444
536
  ...
445
537
 
446
538
  @typing.overload
447
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
539
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
448
540
  """
449
- Creates a human-readable report, a Metaflow Card, after this step completes.
541
+ Specifies a timeout for your step.
450
542
 
451
- Note that you may add multiple `@card` decorators in a step with different parameters.
543
+ This decorator is useful if this step may hang indefinitely.
544
+
545
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
546
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
547
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
548
+
549
+ Note that all the values specified in parameters are added together so if you specify
550
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
452
551
 
453
552
 
454
553
  Parameters
455
554
  ----------
456
- type : str, default 'default'
457
- Card type.
458
- id : str, optional, default None
459
- If multiple cards are present, use this id to identify this card.
460
- options : Dict[str, Any], default {}
461
- Options passed to the card. The contents depend on the card type.
462
- timeout : int, default 45
463
- Interrupt reporting if it takes more than this many seconds.
555
+ seconds : int, default 0
556
+ Number of seconds to wait prior to timing out.
557
+ minutes : int, default 0
558
+ Number of minutes to wait prior to timing out.
559
+ hours : int, default 0
560
+ Number of hours to wait prior to timing out.
464
561
  """
465
562
  ...
466
563
 
467
564
  @typing.overload
468
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
565
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
469
566
  ...
470
567
 
471
568
  @typing.overload
472
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
569
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
473
570
  ...
474
571
 
475
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
572
+ def timeout(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
476
573
  """
477
- Creates a human-readable report, a Metaflow Card, after this step completes.
574
+ Specifies a timeout for your step.
478
575
 
479
- Note that you may add multiple `@card` decorators in a step with different parameters.
576
+ This decorator is useful if this step may hang indefinitely.
577
+
578
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
579
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
580
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
581
+
582
+ Note that all the values specified in parameters are added together so if you specify
583
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
480
584
 
481
585
 
482
586
  Parameters
483
587
  ----------
484
- type : str, default 'default'
485
- Card type.
486
- id : str, optional, default None
487
- If multiple cards are present, use this id to identify this card.
488
- options : Dict[str, Any], default {}
489
- Options passed to the card. The contents depend on the card type.
490
- timeout : int, default 45
491
- Interrupt reporting if it takes more than this many seconds.
588
+ seconds : int, default 0
589
+ Number of seconds to wait prior to timing out.
590
+ minutes : int, default 0
591
+ Number of minutes to wait prior to timing out.
592
+ hours : int, default 0
593
+ Number of hours to wait prior to timing out.
492
594
  """
493
595
  ...
494
596
 
495
597
  @typing.overload
496
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
598
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
497
599
  """
498
- Specifies the number of times the task corresponding
499
- to a step needs to be retried.
500
-
501
- This decorator is useful for handling transient errors, such as networking issues.
502
- If your task contains operations that can't be retried safely, e.g. database updates,
503
- it is advisable to annotate it with `@retry(times=0)`.
504
-
505
- This can be used in conjunction with the `@catch` decorator. The `@catch`
506
- decorator will execute a no-op task after all retries have been exhausted,
507
- ensuring that the flow execution can continue.
508
-
509
-
510
- Parameters
511
- ----------
512
- times : int, default 3
513
- Number of times to retry this task.
514
- minutes_between_retries : int, default 2
515
- Number of minutes between retries.
516
- """
517
- ...
518
-
519
- @typing.overload
520
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
521
- ...
522
-
523
- @typing.overload
524
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
525
- ...
526
-
527
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
528
- """
529
- Specifies the number of times the task corresponding
530
- to a step needs to be retried.
531
-
532
- This decorator is useful for handling transient errors, such as networking issues.
533
- If your task contains operations that can't be retried safely, e.g. database updates,
534
- it is advisable to annotate it with `@retry(times=0)`.
535
-
536
- This can be used in conjunction with the `@catch` decorator. The `@catch`
537
- decorator will execute a no-op task after all retries have been exhausted,
538
- ensuring that the flow execution can continue.
539
-
540
-
541
- Parameters
542
- ----------
543
- times : int, default 3
544
- Number of times to retry this task.
545
- minutes_between_retries : int, default 2
546
- Number of minutes between retries.
547
- """
548
- ...
549
-
550
- @typing.overload
551
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
552
- """
553
- Specifies the resources needed when executing this step.
554
-
555
- Use `@resources` to specify the resource requirements
556
- independently of the specific compute layer (`@batch`, `@kubernetes`).
557
-
558
- You can choose the compute layer on the command line by executing e.g.
559
- ```
560
- python myflow.py run --with batch
561
- ```
562
- or
563
- ```
564
- python myflow.py run --with kubernetes
565
- ```
566
- which executes the flow on the desired system using the
567
- requirements specified in `@resources`.
568
-
569
-
570
- Parameters
571
- ----------
572
- cpu : int, default 1
573
- Number of CPUs required for this step.
574
- gpu : int, optional, default None
575
- Number of GPUs required for this step.
576
- disk : int, optional, default None
577
- Disk size (in MB) required for this step. Only applies on Kubernetes.
578
- memory : int, default 4096
579
- Memory size (in MB) required for this step.
580
- shared_memory : int, optional, default None
581
- The value for the size (in MiB) of the /dev/shm volume for this step.
582
- This parameter maps to the `--shm-size` option in Docker.
583
- """
584
- ...
585
-
586
- @typing.overload
587
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
588
- ...
589
-
590
- @typing.overload
591
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
592
- ...
593
-
594
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
595
- """
596
- Specifies the resources needed when executing this step.
597
-
598
- Use `@resources` to specify the resource requirements
599
- independently of the specific compute layer (`@batch`, `@kubernetes`).
600
-
601
- You can choose the compute layer on the command line by executing e.g.
602
- ```
603
- python myflow.py run --with batch
604
- ```
605
- or
606
- ```
607
- python myflow.py run --with kubernetes
608
- ```
609
- which executes the flow on the desired system using the
610
- requirements specified in `@resources`.
611
-
612
-
613
- Parameters
614
- ----------
615
- cpu : int, default 1
616
- Number of CPUs required for this step.
617
- gpu : int, optional, default None
618
- Number of GPUs required for this step.
619
- disk : int, optional, default None
620
- Disk size (in MB) required for this step. Only applies on Kubernetes.
621
- memory : int, default 4096
622
- Memory size (in MB) required for this step.
623
- shared_memory : int, optional, default None
624
- The value for the size (in MiB) of the /dev/shm volume for this step.
625
- This parameter maps to the `--shm-size` option in Docker.
626
- """
627
- ...
628
-
629
- @typing.overload
630
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
631
- """
632
- Specifies that the step will success under all circumstances.
633
-
634
- The decorator will create an optional artifact, specified by `var`, which
635
- contains the exception raised. You can use it to detect the presence
636
- of errors, indicating that all happy-path artifacts produced by the step
637
- are missing.
638
-
639
-
640
- Parameters
641
- ----------
642
- var : str, optional, default None
643
- Name of the artifact in which to store the caught exception.
644
- If not specified, the exception is not stored.
645
- print_exception : bool, default True
646
- Determines whether or not the exception is printed to
647
- stdout when caught.
648
- """
649
- ...
650
-
651
- @typing.overload
652
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
653
- ...
654
-
655
- @typing.overload
656
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
657
- ...
658
-
659
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
660
- """
661
- Specifies that the step will success under all circumstances.
662
-
663
- The decorator will create an optional artifact, specified by `var`, which
664
- contains the exception raised. You can use it to detect the presence
665
- of errors, indicating that all happy-path artifacts produced by the step
666
- are missing.
667
-
668
-
669
- Parameters
670
- ----------
671
- var : str, optional, default None
672
- Name of the artifact in which to store the caught exception.
673
- If not specified, the exception is not stored.
674
- print_exception : bool, default True
675
- Determines whether or not the exception is printed to
676
- stdout when caught.
677
- """
678
- ...
679
-
680
- @typing.overload
681
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
682
- """
683
- Specifies a timeout for your step.
684
-
685
- This decorator is useful if this step may hang indefinitely.
686
-
687
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
688
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
689
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
690
-
691
- Note that all the values specified in parameters are added together so if you specify
692
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
693
-
694
-
695
- Parameters
696
- ----------
697
- seconds : int, default 0
698
- Number of seconds to wait prior to timing out.
699
- minutes : int, default 0
700
- Number of minutes to wait prior to timing out.
701
- hours : int, default 0
702
- Number of hours to wait prior to timing out.
703
- """
704
- ...
705
-
706
- @typing.overload
707
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
708
- ...
709
-
710
- @typing.overload
711
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
712
- ...
713
-
714
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
715
- """
716
- Specifies a timeout for your step.
717
-
718
- This decorator is useful if this step may hang indefinitely.
719
-
720
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
721
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
722
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
723
-
724
- Note that all the values specified in parameters are added together so if you specify
725
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
726
-
727
-
728
- Parameters
729
- ----------
730
- seconds : int, default 0
731
- Number of seconds to wait prior to timing out.
732
- minutes : int, default 0
733
- Number of minutes to wait prior to timing out.
734
- hours : int, default 0
735
- Number of hours to wait prior to timing out.
736
- """
737
- ...
738
-
739
- @typing.overload
740
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
741
- """
742
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
600
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
743
601
 
744
602
 
745
603
  Parameters
@@ -818,7 +676,7 @@ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Calla
818
676
  def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
819
677
  ...
820
678
 
821
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
679
+ def batch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None):
822
680
  """
823
681
  Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
824
682
 
@@ -892,247 +750,145 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
892
750
  ...
893
751
 
894
752
  @typing.overload
895
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
753
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
896
754
  """
897
- Specifies the event(s) that this flow depends on.
898
-
899
- ```
900
- @trigger(event='foo')
901
- ```
902
- or
903
- ```
904
- @trigger(events=['foo', 'bar'])
905
- ```
755
+ Specifies the number of times the task corresponding
756
+ to a step needs to be retried.
906
757
 
907
- Additionally, you can specify the parameter mappings
908
- to map event payload to Metaflow parameters for the flow.
909
- ```
910
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
911
- ```
912
- or
913
- ```
914
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
915
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
916
- ```
758
+ This decorator is useful for handling transient errors, such as networking issues.
759
+ If your task contains operations that can't be retried safely, e.g. database updates,
760
+ it is advisable to annotate it with `@retry(times=0)`.
917
761
 
918
- 'parameters' can also be a list of strings and tuples like so:
919
- ```
920
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
921
- ```
922
- This is equivalent to:
923
- ```
924
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
925
- ```
762
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
763
+ decorator will execute a no-op task after all retries have been exhausted,
764
+ ensuring that the flow execution can continue.
926
765
 
927
766
 
928
767
  Parameters
929
768
  ----------
930
- event : Union[str, Dict[str, Any]], optional, default None
931
- Event dependency for this flow.
932
- events : List[Union[str, Dict[str, Any]]], default []
933
- Events dependency for this flow.
934
- options : Dict[str, Any], default {}
935
- Backend-specific configuration for tuning eventing behavior.
769
+ times : int, default 3
770
+ Number of times to retry this task.
771
+ minutes_between_retries : int, default 2
772
+ Number of minutes between retries.
936
773
  """
937
774
  ...
938
775
 
939
776
  @typing.overload
940
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
777
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
941
778
  ...
942
779
 
943
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
944
- """
945
- Specifies the event(s) that this flow depends on.
946
-
947
- ```
948
- @trigger(event='foo')
949
- ```
950
- or
951
- ```
952
- @trigger(events=['foo', 'bar'])
953
- ```
780
+ @typing.overload
781
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
782
+ ...
783
+
784
+ def retry(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, times: int = 3, minutes_between_retries: int = 2):
785
+ """
786
+ Specifies the number of times the task corresponding
787
+ to a step needs to be retried.
954
788
 
955
- Additionally, you can specify the parameter mappings
956
- to map event payload to Metaflow parameters for the flow.
957
- ```
958
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
959
- ```
960
- or
961
- ```
962
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
963
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
964
- ```
789
+ This decorator is useful for handling transient errors, such as networking issues.
790
+ If your task contains operations that can't be retried safely, e.g. database updates,
791
+ it is advisable to annotate it with `@retry(times=0)`.
965
792
 
966
- 'parameters' can also be a list of strings and tuples like so:
967
- ```
968
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
969
- ```
970
- This is equivalent to:
971
- ```
972
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
973
- ```
793
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
794
+ decorator will execute a no-op task after all retries have been exhausted,
795
+ ensuring that the flow execution can continue.
974
796
 
975
797
 
976
798
  Parameters
977
799
  ----------
978
- event : Union[str, Dict[str, Any]], optional, default None
979
- Event dependency for this flow.
980
- events : List[Union[str, Dict[str, Any]]], default []
981
- Events dependency for this flow.
982
- options : Dict[str, Any], default {}
983
- Backend-specific configuration for tuning eventing behavior.
800
+ times : int, default 3
801
+ Number of times to retry this task.
802
+ minutes_between_retries : int, default 2
803
+ Number of minutes between retries.
984
804
  """
985
805
  ...
986
806
 
987
807
  @typing.overload
988
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
808
+ def card(*, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
989
809
  """
990
- Specifies the flow(s) that this flow depends on.
991
-
992
- ```
993
- @trigger_on_finish(flow='FooFlow')
994
- ```
995
- or
996
- ```
997
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
998
- ```
999
- This decorator respects the @project decorator and triggers the flow
1000
- when upstream runs within the same namespace complete successfully
1001
-
1002
- Additionally, you can specify project aware upstream flow dependencies
1003
- by specifying the fully qualified project_flow_name.
1004
- ```
1005
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1006
- ```
1007
- or
1008
- ```
1009
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1010
- ```
1011
-
1012
- You can also specify just the project or project branch (other values will be
1013
- inferred from the current project or project branch):
1014
- ```
1015
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1016
- ```
810
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1017
811
 
1018
- Note that `branch` is typically one of:
1019
- - `prod`
1020
- - `user.bob`
1021
- - `test.my_experiment`
1022
- - `prod.staging`
812
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1023
813
 
1024
814
 
1025
815
  Parameters
1026
816
  ----------
1027
- flow : Union[str, Dict[str, str]], optional, default None
1028
- Upstream flow dependency for this flow.
1029
- flows : List[Union[str, Dict[str, str]]], default []
1030
- Upstream flow dependencies for this flow.
817
+ type : str, default 'default'
818
+ Card type.
819
+ id : str, optional, default None
820
+ If multiple cards are present, use this id to identify this card.
1031
821
  options : Dict[str, Any], default {}
1032
- Backend-specific configuration for tuning eventing behavior.
822
+ Options passed to the card. The contents depend on the card type.
823
+ timeout : int, default 45
824
+ Interrupt reporting if it takes more than this many seconds.
1033
825
  """
1034
826
  ...
1035
827
 
1036
828
  @typing.overload
1037
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
829
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
830
+ ...
831
+
832
+ @typing.overload
833
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1038
834
  ...
1039
835
 
1040
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
836
+ def card(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1041
837
  """
1042
- Specifies the flow(s) that this flow depends on.
1043
-
1044
- ```
1045
- @trigger_on_finish(flow='FooFlow')
1046
- ```
1047
- or
1048
- ```
1049
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1050
- ```
1051
- This decorator respects the @project decorator and triggers the flow
1052
- when upstream runs within the same namespace complete successfully
1053
-
1054
- Additionally, you can specify project aware upstream flow dependencies
1055
- by specifying the fully qualified project_flow_name.
1056
- ```
1057
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1058
- ```
1059
- or
1060
- ```
1061
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1062
- ```
1063
-
1064
- You can also specify just the project or project branch (other values will be
1065
- inferred from the current project or project branch):
1066
- ```
1067
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1068
- ```
838
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1069
839
 
1070
- Note that `branch` is typically one of:
1071
- - `prod`
1072
- - `user.bob`
1073
- - `test.my_experiment`
1074
- - `prod.staging`
840
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1075
841
 
1076
842
 
1077
843
  Parameters
1078
844
  ----------
1079
- flow : Union[str, Dict[str, str]], optional, default None
1080
- Upstream flow dependency for this flow.
1081
- flows : List[Union[str, Dict[str, str]]], default []
1082
- Upstream flow dependencies for this flow.
845
+ type : str, default 'default'
846
+ Card type.
847
+ id : str, optional, default None
848
+ If multiple cards are present, use this id to identify this card.
1083
849
  options : Dict[str, Any], default {}
1084
- Backend-specific configuration for tuning eventing behavior.
850
+ Options passed to the card. The contents depend on the card type.
851
+ timeout : int, default 45
852
+ Interrupt reporting if it takes more than this many seconds.
1085
853
  """
1086
854
  ...
1087
855
 
1088
856
  @typing.overload
1089
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
857
+ def secrets(*, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
1090
858
  """
1091
- Specifies the Conda environment for all steps of the flow.
1092
-
1093
- Use `@conda_base` to set common libraries required by all
1094
- steps and use `@conda` to specify step-specific additions.
859
+ Specifies secrets to be retrieved and injected as environment variables prior to
860
+ the execution of a step.
1095
861
 
1096
862
 
1097
863
  Parameters
1098
864
  ----------
1099
- packages : Dict[str, str], default {}
1100
- Packages to use for this flow. The key is the name of the package
1101
- and the value is the version to use.
1102
- libraries : Dict[str, str], default {}
1103
- Supported for backward compatibility. When used with packages, packages will take precedence.
1104
- python : str, optional, default None
1105
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1106
- that the version used will correspond to the version of the Python interpreter used to start the run.
1107
- disabled : bool, default False
1108
- If set to True, disables Conda.
865
+ sources : List[Union[str, Dict[str, Any]]], default: []
866
+ List of secret specs, defining how the secrets are to be retrieved
867
+ role : str, optional, default: None
868
+ Role to use for fetching secrets
1109
869
  """
1110
870
  ...
1111
871
 
1112
872
  @typing.overload
1113
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
873
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
874
+ ...
875
+
876
+ @typing.overload
877
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1114
878
  ...
1115
879
 
1116
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
880
+ def secrets(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None):
1117
881
  """
1118
- Specifies the Conda environment for all steps of the flow.
1119
-
1120
- Use `@conda_base` to set common libraries required by all
1121
- steps and use `@conda` to specify step-specific additions.
882
+ Specifies secrets to be retrieved and injected as environment variables prior to
883
+ the execution of a step.
1122
884
 
1123
885
 
1124
886
  Parameters
1125
887
  ----------
1126
- packages : Dict[str, str], default {}
1127
- Packages to use for this flow. The key is the name of the package
1128
- and the value is the version to use.
1129
- libraries : Dict[str, str], default {}
1130
- Supported for backward compatibility. When used with packages, packages will take precedence.
1131
- python : str, optional, default None
1132
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1133
- that the version used will correspond to the version of the Python interpreter used to start the run.
1134
- disabled : bool, default False
1135
- If set to True, disables Conda.
888
+ sources : List[Union[str, Dict[str, Any]]], default: []
889
+ List of secret specs, defining how the secrets are to be retrieved
890
+ role : str, optional, default: None
891
+ Role to use for fetching secrets
1136
892
  """
1137
893
  ...
1138
894
 
@@ -1180,47 +936,57 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1180
936
  ...
1181
937
 
1182
938
  @typing.overload
1183
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
939
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: str | None = None, timezone: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1184
940
  """
1185
- Specifies the PyPI packages for all steps of the flow.
941
+ Specifies the times when the flow should be run when running on a
942
+ production scheduler.
1186
943
 
1187
- Use `@pypi_base` to set common packages required by all
1188
- steps and use `@pypi` to specify step-specific overrides.
1189
944
 
1190
945
  Parameters
1191
946
  ----------
1192
- packages : Dict[str, str], default: {}
1193
- Packages to use for this flow. The key is the name of the package
1194
- and the value is the version to use.
1195
- python : str, optional, default: None
1196
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1197
- that the version used will correspond to the version of the Python interpreter used to start the run.
947
+ hourly : bool, default False
948
+ Run the workflow hourly.
949
+ daily : bool, default True
950
+ Run the workflow daily.
951
+ weekly : bool, default False
952
+ Run the workflow weekly.
953
+ cron : str, optional, default None
954
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
955
+ specified by this expression.
956
+ timezone : str, optional, default None
957
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
958
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1198
959
  """
1199
960
  ...
1200
961
 
1201
962
  @typing.overload
1202
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
963
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1203
964
  ...
1204
965
 
1205
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
966
+ def schedule(f: typing.Type[~FlowSpecDerived] | None = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: str | None = None, timezone: str | None = None):
1206
967
  """
1207
- Specifies the PyPI packages for all steps of the flow.
968
+ Specifies the times when the flow should be run when running on a
969
+ production scheduler.
1208
970
 
1209
- Use `@pypi_base` to set common packages required by all
1210
- steps and use `@pypi` to specify step-specific overrides.
1211
971
 
1212
972
  Parameters
1213
973
  ----------
1214
- packages : Dict[str, str], default: {}
1215
- Packages to use for this flow. The key is the name of the package
1216
- and the value is the version to use.
1217
- python : str, optional, default: None
1218
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1219
- that the version used will correspond to the version of the Python interpreter used to start the run.
974
+ hourly : bool, default False
975
+ Run the workflow hourly.
976
+ daily : bool, default True
977
+ Run the workflow daily.
978
+ weekly : bool, default False
979
+ Run the workflow weekly.
980
+ cron : str, optional, default None
981
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
982
+ specified by this expression.
983
+ timezone : str, optional, default None
984
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
985
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1220
986
  """
1221
987
  ...
1222
988
 
1223
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
989
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: str | typing.List[str], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1224
990
  """
1225
991
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1226
992
  before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
@@ -1264,57 +1030,107 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1264
1030
  ...
1265
1031
 
1266
1032
  @typing.overload
1267
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1033
+ def trigger_on_finish(*, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1268
1034
  """
1269
- Specifies the times when the flow should be run when running on a
1270
- production scheduler.
1035
+ Specifies the flow(s) that this flow depends on.
1271
1036
 
1037
+ ```
1038
+ @trigger_on_finish(flow='FooFlow')
1039
+ ```
1040
+ or
1041
+ ```
1042
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1043
+ ```
1044
+ This decorator respects the @project decorator and triggers the flow
1045
+ when upstream runs within the same namespace complete successfully
1272
1046
 
1273
- Parameters
1274
- ----------
1275
- hourly : bool, default False
1276
- Run the workflow hourly.
1277
- daily : bool, default True
1278
- Run the workflow daily.
1279
- weekly : bool, default False
1280
- Run the workflow weekly.
1281
- cron : str, optional, default None
1282
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1283
- specified by this expression.
1284
- timezone : str, optional, default None
1285
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1286
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1047
+ Additionally, you can specify project aware upstream flow dependencies
1048
+ by specifying the fully qualified project_flow_name.
1049
+ ```
1050
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1051
+ ```
1052
+ or
1053
+ ```
1054
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1055
+ ```
1056
+
1057
+ You can also specify just the project or project branch (other values will be
1058
+ inferred from the current project or project branch):
1059
+ ```
1060
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1061
+ ```
1062
+
1063
+ Note that `branch` is typically one of:
1064
+ - `prod`
1065
+ - `user.bob`
1066
+ - `test.my_experiment`
1067
+ - `prod.staging`
1068
+
1069
+
1070
+ Parameters
1071
+ ----------
1072
+ flow : Union[str, Dict[str, str]], optional, default None
1073
+ Upstream flow dependency for this flow.
1074
+ flows : List[Union[str, Dict[str, str]]], default []
1075
+ Upstream flow dependencies for this flow.
1076
+ options : Dict[str, Any], default {}
1077
+ Backend-specific configuration for tuning eventing behavior.
1287
1078
  """
1288
1079
  ...
1289
1080
 
1290
1081
  @typing.overload
1291
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1082
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1292
1083
  ...
1293
1084
 
1294
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1085
+ def trigger_on_finish(f: typing.Type[~FlowSpecDerived] | None = None, *, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}):
1295
1086
  """
1296
- Specifies the times when the flow should be run when running on a
1297
- production scheduler.
1087
+ Specifies the flow(s) that this flow depends on.
1088
+
1089
+ ```
1090
+ @trigger_on_finish(flow='FooFlow')
1091
+ ```
1092
+ or
1093
+ ```
1094
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1095
+ ```
1096
+ This decorator respects the @project decorator and triggers the flow
1097
+ when upstream runs within the same namespace complete successfully
1098
+
1099
+ Additionally, you can specify project aware upstream flow dependencies
1100
+ by specifying the fully qualified project_flow_name.
1101
+ ```
1102
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1103
+ ```
1104
+ or
1105
+ ```
1106
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1107
+ ```
1108
+
1109
+ You can also specify just the project or project branch (other values will be
1110
+ inferred from the current project or project branch):
1111
+ ```
1112
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1113
+ ```
1114
+
1115
+ Note that `branch` is typically one of:
1116
+ - `prod`
1117
+ - `user.bob`
1118
+ - `test.my_experiment`
1119
+ - `prod.staging`
1298
1120
 
1299
1121
 
1300
1122
  Parameters
1301
1123
  ----------
1302
- hourly : bool, default False
1303
- Run the workflow hourly.
1304
- daily : bool, default True
1305
- Run the workflow daily.
1306
- weekly : bool, default False
1307
- Run the workflow weekly.
1308
- cron : str, optional, default None
1309
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1310
- specified by this expression.
1311
- timezone : str, optional, default None
1312
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1313
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1124
+ flow : Union[str, Dict[str, str]], optional, default None
1125
+ Upstream flow dependency for this flow.
1126
+ flows : List[Union[str, Dict[str, str]]], default []
1127
+ Upstream flow dependencies for this flow.
1128
+ options : Dict[str, Any], default {}
1129
+ Backend-specific configuration for tuning eventing behavior.
1314
1130
  """
1315
1131
  ...
1316
1132
 
1317
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1133
+ def project(*, name: str, branch: str | None = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1318
1134
  """
1319
1135
  Specifies what flows belong to the same project.
1320
1136
 
@@ -1349,3 +1165,188 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1349
1165
  """
1350
1166
  ...
1351
1167
 
1168
+ @typing.overload
1169
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1170
+ """
1171
+ Specifies the Conda environment for all steps of the flow.
1172
+
1173
+ Use `@conda_base` to set common libraries required by all
1174
+ steps and use `@conda` to specify step-specific additions.
1175
+
1176
+
1177
+ Parameters
1178
+ ----------
1179
+ packages : Dict[str, str], default {}
1180
+ Packages to use for this flow. The key is the name of the package
1181
+ and the value is the version to use.
1182
+ libraries : Dict[str, str], default {}
1183
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1184
+ python : str, optional, default None
1185
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1186
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1187
+ disabled : bool, default False
1188
+ If set to True, disables Conda.
1189
+ """
1190
+ ...
1191
+
1192
+ @typing.overload
1193
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1194
+ ...
1195
+
1196
+ def conda_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
1197
+ """
1198
+ Specifies the Conda environment for all steps of the flow.
1199
+
1200
+ Use `@conda_base` to set common libraries required by all
1201
+ steps and use `@conda` to specify step-specific additions.
1202
+
1203
+
1204
+ Parameters
1205
+ ----------
1206
+ packages : Dict[str, str], default {}
1207
+ Packages to use for this flow. The key is the name of the package
1208
+ and the value is the version to use.
1209
+ libraries : Dict[str, str], default {}
1210
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1211
+ python : str, optional, default None
1212
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1213
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1214
+ disabled : bool, default False
1215
+ If set to True, disables Conda.
1216
+ """
1217
+ ...
1218
+
1219
+ @typing.overload
1220
+ def trigger(*, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1221
+ """
1222
+ Specifies the event(s) that this flow depends on.
1223
+
1224
+ ```
1225
+ @trigger(event='foo')
1226
+ ```
1227
+ or
1228
+ ```
1229
+ @trigger(events=['foo', 'bar'])
1230
+ ```
1231
+
1232
+ Additionally, you can specify the parameter mappings
1233
+ to map event payload to Metaflow parameters for the flow.
1234
+ ```
1235
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1236
+ ```
1237
+ or
1238
+ ```
1239
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1240
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1241
+ ```
1242
+
1243
+ 'parameters' can also be a list of strings and tuples like so:
1244
+ ```
1245
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1246
+ ```
1247
+ This is equivalent to:
1248
+ ```
1249
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1250
+ ```
1251
+
1252
+
1253
+ Parameters
1254
+ ----------
1255
+ event : Union[str, Dict[str, Any]], optional, default None
1256
+ Event dependency for this flow.
1257
+ events : List[Union[str, Dict[str, Any]]], default []
1258
+ Events dependency for this flow.
1259
+ options : Dict[str, Any], default {}
1260
+ Backend-specific configuration for tuning eventing behavior.
1261
+ """
1262
+ ...
1263
+
1264
+ @typing.overload
1265
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1266
+ ...
1267
+
1268
+ def trigger(f: typing.Type[~FlowSpecDerived] | None = None, *, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}):
1269
+ """
1270
+ Specifies the event(s) that this flow depends on.
1271
+
1272
+ ```
1273
+ @trigger(event='foo')
1274
+ ```
1275
+ or
1276
+ ```
1277
+ @trigger(events=['foo', 'bar'])
1278
+ ```
1279
+
1280
+ Additionally, you can specify the parameter mappings
1281
+ to map event payload to Metaflow parameters for the flow.
1282
+ ```
1283
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1284
+ ```
1285
+ or
1286
+ ```
1287
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1288
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1289
+ ```
1290
+
1291
+ 'parameters' can also be a list of strings and tuples like so:
1292
+ ```
1293
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1294
+ ```
1295
+ This is equivalent to:
1296
+ ```
1297
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1298
+ ```
1299
+
1300
+
1301
+ Parameters
1302
+ ----------
1303
+ event : Union[str, Dict[str, Any]], optional, default None
1304
+ Event dependency for this flow.
1305
+ events : List[Union[str, Dict[str, Any]]], default []
1306
+ Events dependency for this flow.
1307
+ options : Dict[str, Any], default {}
1308
+ Backend-specific configuration for tuning eventing behavior.
1309
+ """
1310
+ ...
1311
+
1312
+ @typing.overload
1313
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1314
+ """
1315
+ Specifies the PyPI packages for all steps of the flow.
1316
+
1317
+ Use `@pypi_base` to set common packages required by all
1318
+ steps and use `@pypi` to specify step-specific overrides.
1319
+
1320
+ Parameters
1321
+ ----------
1322
+ packages : Dict[str, str], default: {}
1323
+ Packages to use for this flow. The key is the name of the package
1324
+ and the value is the version to use.
1325
+ python : str, optional, default: None
1326
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1327
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1328
+ """
1329
+ ...
1330
+
1331
+ @typing.overload
1332
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1333
+ ...
1334
+
1335
+ def pypi_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
1336
+ """
1337
+ Specifies the PyPI packages for all steps of the flow.
1338
+
1339
+ Use `@pypi_base` to set common packages required by all
1340
+ steps and use `@pypi` to specify step-specific overrides.
1341
+
1342
+ Parameters
1343
+ ----------
1344
+ packages : Dict[str, str], default: {}
1345
+ Packages to use for this flow. The key is the name of the package
1346
+ and the value is the version to use.
1347
+ python : str, optional, default: None
1348
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1349
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1350
+ """
1351
+ ...
1352
+