metaflow-stubs 2.19.3__py2.py3-none-any.whl → 2.19.4__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (168) hide show
  1. metaflow-stubs/__init__.pyi +583 -583
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +4 -4
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +27 -27
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  24. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +6 -6
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +4 -4
  29. metaflow-stubs/parameters.pyi +4 -4
  30. metaflow-stubs/plugins/__init__.pyi +14 -14
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  47. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +5 -5
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +4 -4
  83. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  85. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  86. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  87. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  88. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  89. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  93. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  94. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  95. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  96. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  97. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  102. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  105. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  108. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  111. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  116. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  117. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/parsers.pyi +2 -2
  119. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  121. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  123. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  126. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  127. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  128. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  130. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  131. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  133. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  134. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  135. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  136. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  137. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  138. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  139. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  140. metaflow-stubs/pylint_wrapper.pyi +2 -2
  141. metaflow-stubs/runner/__init__.pyi +2 -2
  142. metaflow-stubs/runner/deployer.pyi +33 -33
  143. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  144. metaflow-stubs/runner/metaflow_runner.pyi +5 -5
  145. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  146. metaflow-stubs/runner/nbrun.pyi +2 -2
  147. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  148. metaflow-stubs/runner/utils.pyi +4 -4
  149. metaflow-stubs/system/__init__.pyi +2 -2
  150. metaflow-stubs/system/system_logger.pyi +2 -2
  151. metaflow-stubs/system/system_monitor.pyi +2 -2
  152. metaflow-stubs/tagging_util.pyi +2 -2
  153. metaflow-stubs/tuple_util.pyi +2 -2
  154. metaflow-stubs/user_configs/__init__.pyi +2 -2
  155. metaflow-stubs/user_configs/config_options.pyi +3 -3
  156. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  157. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  158. metaflow-stubs/user_decorators/common.pyi +2 -2
  159. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  160. metaflow-stubs/user_decorators/mutable_step.pyi +3 -3
  161. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  162. metaflow-stubs/user_decorators/user_step_decorator.pyi +5 -5
  163. metaflow-stubs/version.pyi +2 -2
  164. {metaflow_stubs-2.19.3.dist-info → metaflow_stubs-2.19.4.dist-info}/METADATA +2 -2
  165. metaflow_stubs-2.19.4.dist-info/RECORD +168 -0
  166. metaflow_stubs-2.19.3.dist-info/RECORD +0 -168
  167. {metaflow_stubs-2.19.3.dist-info → metaflow_stubs-2.19.4.dist-info}/WHEEL +0 -0
  168. {metaflow_stubs-2.19.3.dist-info → metaflow_stubs-2.19.4.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.19.3 #
4
- # Generated on 2025-10-28T12:26:25.237849 #
3
+ # MF version: 2.19.4 #
4
+ # Generated on 2025-10-29T20:17:51.770903 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -47,10 +47,10 @@ from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
51
- from .plugins.parsers import yaml_parser as yaml_parser
52
50
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
51
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
52
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
+ from .plugins.parsers import yaml_parser as yaml_parser
54
54
  from . import cards as cards
55
55
  from . import client as client
56
56
  from .client.core import namespace as namespace
@@ -154,6 +154,145 @@ def step(f: typing.Callable[[~FlowSpecDerived], NoneType] | typing.Callable[[~Fl
154
154
  """
155
155
  ...
156
156
 
157
+ @typing.overload
158
+ def secrets(*, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
159
+ """
160
+ Specifies secrets to be retrieved and injected as environment variables prior to
161
+ the execution of a step.
162
+
163
+
164
+ Parameters
165
+ ----------
166
+ sources : List[Union[str, Dict[str, Any]]], default: []
167
+ List of secret specs, defining how the secrets are to be retrieved
168
+ role : str, optional, default: None
169
+ Role to use for fetching secrets
170
+ """
171
+ ...
172
+
173
+ @typing.overload
174
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
175
+ ...
176
+
177
+ @typing.overload
178
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
179
+ ...
180
+
181
+ def secrets(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None):
182
+ """
183
+ Specifies secrets to be retrieved and injected as environment variables prior to
184
+ the execution of a step.
185
+
186
+
187
+ Parameters
188
+ ----------
189
+ sources : List[Union[str, Dict[str, Any]]], default: []
190
+ List of secret specs, defining how the secrets are to be retrieved
191
+ role : str, optional, default: None
192
+ Role to use for fetching secrets
193
+ """
194
+ ...
195
+
196
+ @typing.overload
197
+ def card(*, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
198
+ """
199
+ Creates a human-readable report, a Metaflow Card, after this step completes.
200
+
201
+ Note that you may add multiple `@card` decorators in a step with different parameters.
202
+
203
+
204
+ Parameters
205
+ ----------
206
+ type : str, default 'default'
207
+ Card type.
208
+ id : str, optional, default None
209
+ If multiple cards are present, use this id to identify this card.
210
+ options : Dict[str, Any], default {}
211
+ Options passed to the card. The contents depend on the card type.
212
+ timeout : int, default 45
213
+ Interrupt reporting if it takes more than this many seconds.
214
+ """
215
+ ...
216
+
217
+ @typing.overload
218
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
219
+ ...
220
+
221
+ @typing.overload
222
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
223
+ ...
224
+
225
+ def card(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
226
+ """
227
+ Creates a human-readable report, a Metaflow Card, after this step completes.
228
+
229
+ Note that you may add multiple `@card` decorators in a step with different parameters.
230
+
231
+
232
+ Parameters
233
+ ----------
234
+ type : str, default 'default'
235
+ Card type.
236
+ id : str, optional, default None
237
+ If multiple cards are present, use this id to identify this card.
238
+ options : Dict[str, Any], default {}
239
+ Options passed to the card. The contents depend on the card type.
240
+ timeout : int, default 45
241
+ Interrupt reporting if it takes more than this many seconds.
242
+ """
243
+ ...
244
+
245
+ @typing.overload
246
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
247
+ """
248
+ Specifies the PyPI packages for the step.
249
+
250
+ Information in this decorator will augment any
251
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
252
+ you can use `@pypi_base` to set packages required by all
253
+ steps and use `@pypi` to specify step-specific overrides.
254
+
255
+
256
+ Parameters
257
+ ----------
258
+ packages : Dict[str, str], default: {}
259
+ Packages to use for this step. The key is the name of the package
260
+ and the value is the version to use.
261
+ python : str, optional, default: None
262
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
263
+ that the version used will correspond to the version of the Python interpreter used to start the run.
264
+ """
265
+ ...
266
+
267
+ @typing.overload
268
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
269
+ ...
270
+
271
+ @typing.overload
272
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
273
+ ...
274
+
275
+ def pypi(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
276
+ """
277
+ Specifies the PyPI packages for the step.
278
+
279
+ Information in this decorator will augment any
280
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
281
+ you can use `@pypi_base` to set packages required by all
282
+ steps and use `@pypi` to specify step-specific overrides.
283
+
284
+
285
+ Parameters
286
+ ----------
287
+ packages : Dict[str, str], default: {}
288
+ Packages to use for this step. The key is the name of the package
289
+ and the value is the version to use.
290
+ python : str, optional, default: None
291
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
292
+ that the version used will correspond to the version of the Python interpreter used to start the run.
293
+ """
294
+ ...
295
+
157
296
  @typing.overload
158
297
  def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
159
298
  """
@@ -214,108 +353,174 @@ def conda(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_genera
214
353
  ...
215
354
 
216
355
  @typing.overload
217
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
356
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
218
357
  """
219
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
358
+ Specifies the number of times the task corresponding
359
+ to a step needs to be retried.
360
+
361
+ This decorator is useful for handling transient errors, such as networking issues.
362
+ If your task contains operations that can't be retried safely, e.g. database updates,
363
+ it is advisable to annotate it with `@retry(times=0)`.
364
+
365
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
366
+ decorator will execute a no-op task after all retries have been exhausted,
367
+ ensuring that the flow execution can continue.
220
368
 
221
369
 
222
370
  Parameters
223
371
  ----------
224
- cpu : int, default 1
225
- Number of CPUs required for this step. If `@resources` is
226
- also present, the maximum value from all decorators is used.
227
- gpu : int, default 0
228
- Number of GPUs required for this step. If `@resources` is
229
- also present, the maximum value from all decorators is used.
230
- memory : int, default 4096
231
- Memory size (in MB) required for this step. If
232
- `@resources` is also present, the maximum value from all decorators is
233
- used.
234
- image : str, optional, default None
235
- Docker image to use when launching on AWS Batch. If not specified, and
236
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
237
- not, a default Docker image mapping to the current version of Python is used.
238
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
239
- AWS Batch Job Queue to submit the job to.
240
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
241
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
242
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
243
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
244
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
245
- shared_memory : int, optional, default None
246
- The value for the size (in MiB) of the /dev/shm volume for this step.
247
- This parameter maps to the `--shm-size` option in Docker.
248
- max_swap : int, optional, default None
249
- The total amount of swap memory (in MiB) a container can use for this
250
- step. This parameter is translated to the `--memory-swap` option in
251
- Docker where the value is the sum of the container memory plus the
252
- `max_swap` value.
253
- swappiness : int, optional, default None
254
- This allows you to tune memory swappiness behavior for this step.
255
- A swappiness value of 0 causes swapping not to happen unless absolutely
256
- necessary. A swappiness value of 100 causes pages to be swapped very
257
- aggressively. Accepted values are whole numbers between 0 and 100.
258
- aws_batch_tags: Dict[str, str], optional, default None
259
- Sets arbitrary AWS tags on the AWS Batch compute environment.
260
- Set as string key-value pairs.
261
- use_tmpfs : bool, default False
262
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
263
- not available on Fargate compute environments
264
- tmpfs_tempdir : bool, default True
265
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
266
- tmpfs_size : int, optional, default None
267
- The value for the size (in MiB) of the tmpfs mount for this step.
268
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
269
- memory allocated for this step.
270
- tmpfs_path : str, optional, default None
271
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
272
- inferentia : int, default 0
273
- Number of Inferentia chips required for this step.
274
- trainium : int, default None
275
- Alias for inferentia. Use only one of the two.
276
- efa : int, default 0
277
- Number of elastic fabric adapter network devices to attach to container
278
- ephemeral_storage : int, default None
279
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
280
- This is only relevant for Fargate compute environments
281
- log_driver: str, optional, default None
282
- The log driver to use for the Amazon ECS container.
283
- log_options: List[str], optional, default None
284
- List of strings containing options for the chosen log driver. The configurable values
285
- depend on the `log driver` chosen. Validation of these options is not supported yet.
286
- Example: [`awslogs-group:aws/batch/job`]
372
+ times : int, default 3
373
+ Number of times to retry this task.
374
+ minutes_between_retries : int, default 2
375
+ Number of minutes between retries.
287
376
  """
288
377
  ...
289
378
 
290
379
  @typing.overload
291
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
380
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
292
381
  ...
293
382
 
294
383
  @typing.overload
295
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
384
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
296
385
  ...
297
386
 
298
- def batch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None):
387
+ def retry(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, times: int = 3, minutes_between_retries: int = 2):
299
388
  """
300
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
389
+ Specifies the number of times the task corresponding
390
+ to a step needs to be retried.
391
+
392
+ This decorator is useful for handling transient errors, such as networking issues.
393
+ If your task contains operations that can't be retried safely, e.g. database updates,
394
+ it is advisable to annotate it with `@retry(times=0)`.
395
+
396
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
397
+ decorator will execute a no-op task after all retries have been exhausted,
398
+ ensuring that the flow execution can continue.
301
399
 
302
400
 
303
401
  Parameters
304
402
  ----------
305
- cpu : int, default 1
306
- Number of CPUs required for this step. If `@resources` is
307
- also present, the maximum value from all decorators is used.
308
- gpu : int, default 0
309
- Number of GPUs required for this step. If `@resources` is
310
- also present, the maximum value from all decorators is used.
311
- memory : int, default 4096
312
- Memory size (in MB) required for this step. If
313
- `@resources` is also present, the maximum value from all decorators is
314
- used.
315
- image : str, optional, default None
316
- Docker image to use when launching on AWS Batch. If not specified, and
317
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
318
- not, a default Docker image mapping to the current version of Python is used.
403
+ times : int, default 3
404
+ Number of times to retry this task.
405
+ minutes_between_retries : int, default 2
406
+ Number of minutes between retries.
407
+ """
408
+ ...
409
+
410
+ @typing.overload
411
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
412
+ """
413
+ Specifies a timeout for your step.
414
+
415
+ This decorator is useful if this step may hang indefinitely.
416
+
417
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
418
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
419
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
420
+
421
+ Note that all the values specified in parameters are added together so if you specify
422
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
423
+
424
+
425
+ Parameters
426
+ ----------
427
+ seconds : int, default 0
428
+ Number of seconds to wait prior to timing out.
429
+ minutes : int, default 0
430
+ Number of minutes to wait prior to timing out.
431
+ hours : int, default 0
432
+ Number of hours to wait prior to timing out.
433
+ """
434
+ ...
435
+
436
+ @typing.overload
437
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
438
+ ...
439
+
440
+ @typing.overload
441
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
442
+ ...
443
+
444
+ def timeout(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
445
+ """
446
+ Specifies a timeout for your step.
447
+
448
+ This decorator is useful if this step may hang indefinitely.
449
+
450
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
451
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
452
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
453
+
454
+ Note that all the values specified in parameters are added together so if you specify
455
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
456
+
457
+
458
+ Parameters
459
+ ----------
460
+ seconds : int, default 0
461
+ Number of seconds to wait prior to timing out.
462
+ minutes : int, default 0
463
+ Number of minutes to wait prior to timing out.
464
+ hours : int, default 0
465
+ Number of hours to wait prior to timing out.
466
+ """
467
+ ...
468
+
469
+ @typing.overload
470
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
471
+ """
472
+ Specifies environment variables to be set prior to the execution of a step.
473
+
474
+
475
+ Parameters
476
+ ----------
477
+ vars : Dict[str, str], default {}
478
+ Dictionary of environment variables to set.
479
+ """
480
+ ...
481
+
482
+ @typing.overload
483
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
484
+ ...
485
+
486
+ @typing.overload
487
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
488
+ ...
489
+
490
+ def environment(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, vars: typing.Dict[str, str] = {}):
491
+ """
492
+ Specifies environment variables to be set prior to the execution of a step.
493
+
494
+
495
+ Parameters
496
+ ----------
497
+ vars : Dict[str, str], default {}
498
+ Dictionary of environment variables to set.
499
+ """
500
+ ...
501
+
502
+ @typing.overload
503
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
504
+ """
505
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
506
+
507
+
508
+ Parameters
509
+ ----------
510
+ cpu : int, default 1
511
+ Number of CPUs required for this step. If `@resources` is
512
+ also present, the maximum value from all decorators is used.
513
+ gpu : int, default 0
514
+ Number of GPUs required for this step. If `@resources` is
515
+ also present, the maximum value from all decorators is used.
516
+ memory : int, default 4096
517
+ Memory size (in MB) required for this step. If
518
+ `@resources` is also present, the maximum value from all decorators is
519
+ used.
520
+ image : str, optional, default None
521
+ Docker image to use when launching on AWS Batch. If not specified, and
522
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
523
+ not, a default Docker image mapping to the current version of Python is used.
319
524
  queue : str, default METAFLOW_BATCH_JOB_QUEUE
320
525
  AWS Batch Job Queue to submit the job to.
321
526
  iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
@@ -369,111 +574,83 @@ def batch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_genera
369
574
  ...
370
575
 
371
576
  @typing.overload
372
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
373
- """
374
- Decorator prototype for all step decorators. This function gets specialized
375
- and imported for all decorators types by _import_plugin_decorators().
376
- """
377
- ...
378
-
379
- @typing.overload
380
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
381
- ...
382
-
383
- def parallel(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None):
384
- """
385
- Decorator prototype for all step decorators. This function gets specialized
386
- and imported for all decorators types by _import_plugin_decorators().
387
- """
388
- ...
389
-
390
- @typing.overload
391
- def secrets(*, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
392
- """
393
- Specifies secrets to be retrieved and injected as environment variables prior to
394
- the execution of a step.
395
-
396
-
397
- Parameters
398
- ----------
399
- sources : List[Union[str, Dict[str, Any]]], default: []
400
- List of secret specs, defining how the secrets are to be retrieved
401
- role : str, optional, default: None
402
- Role to use for fetching secrets
403
- """
404
- ...
405
-
406
- @typing.overload
407
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
408
- ...
409
-
410
- @typing.overload
411
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
412
- ...
413
-
414
- def secrets(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None):
415
- """
416
- Specifies secrets to be retrieved and injected as environment variables prior to
417
- the execution of a step.
418
-
419
-
420
- Parameters
421
- ----------
422
- sources : List[Union[str, Dict[str, Any]]], default: []
423
- List of secret specs, defining how the secrets are to be retrieved
424
- role : str, optional, default: None
425
- Role to use for fetching secrets
426
- """
427
- ...
428
-
429
- @typing.overload
430
- def catch(*, var: str | None = None, print_exception: bool = True) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
431
- """
432
- Specifies that the step will success under all circumstances.
433
-
434
- The decorator will create an optional artifact, specified by `var`, which
435
- contains the exception raised. You can use it to detect the presence
436
- of errors, indicating that all happy-path artifacts produced by the step
437
- are missing.
438
-
439
-
440
- Parameters
441
- ----------
442
- var : str, optional, default None
443
- Name of the artifact in which to store the caught exception.
444
- If not specified, the exception is not stored.
445
- print_exception : bool, default True
446
- Determines whether or not the exception is printed to
447
- stdout when caught.
448
- """
449
- ...
450
-
451
- @typing.overload
452
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
577
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
453
578
  ...
454
579
 
455
580
  @typing.overload
456
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
581
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
457
582
  ...
458
583
 
459
- def catch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, var: str | None = None, print_exception: bool = True):
584
+ def batch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None):
460
585
  """
461
- Specifies that the step will success under all circumstances.
462
-
463
- The decorator will create an optional artifact, specified by `var`, which
464
- contains the exception raised. You can use it to detect the presence
465
- of errors, indicating that all happy-path artifacts produced by the step
466
- are missing.
586
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
467
587
 
468
588
 
469
589
  Parameters
470
590
  ----------
471
- var : str, optional, default None
472
- Name of the artifact in which to store the caught exception.
473
- If not specified, the exception is not stored.
474
- print_exception : bool, default True
475
- Determines whether or not the exception is printed to
476
- stdout when caught.
591
+ cpu : int, default 1
592
+ Number of CPUs required for this step. If `@resources` is
593
+ also present, the maximum value from all decorators is used.
594
+ gpu : int, default 0
595
+ Number of GPUs required for this step. If `@resources` is
596
+ also present, the maximum value from all decorators is used.
597
+ memory : int, default 4096
598
+ Memory size (in MB) required for this step. If
599
+ `@resources` is also present, the maximum value from all decorators is
600
+ used.
601
+ image : str, optional, default None
602
+ Docker image to use when launching on AWS Batch. If not specified, and
603
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
604
+ not, a default Docker image mapping to the current version of Python is used.
605
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
606
+ AWS Batch Job Queue to submit the job to.
607
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
608
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
609
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
610
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
611
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
612
+ shared_memory : int, optional, default None
613
+ The value for the size (in MiB) of the /dev/shm volume for this step.
614
+ This parameter maps to the `--shm-size` option in Docker.
615
+ max_swap : int, optional, default None
616
+ The total amount of swap memory (in MiB) a container can use for this
617
+ step. This parameter is translated to the `--memory-swap` option in
618
+ Docker where the value is the sum of the container memory plus the
619
+ `max_swap` value.
620
+ swappiness : int, optional, default None
621
+ This allows you to tune memory swappiness behavior for this step.
622
+ A swappiness value of 0 causes swapping not to happen unless absolutely
623
+ necessary. A swappiness value of 100 causes pages to be swapped very
624
+ aggressively. Accepted values are whole numbers between 0 and 100.
625
+ aws_batch_tags: Dict[str, str], optional, default None
626
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
627
+ Set as string key-value pairs.
628
+ use_tmpfs : bool, default False
629
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
630
+ not available on Fargate compute environments
631
+ tmpfs_tempdir : bool, default True
632
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
633
+ tmpfs_size : int, optional, default None
634
+ The value for the size (in MiB) of the tmpfs mount for this step.
635
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
636
+ memory allocated for this step.
637
+ tmpfs_path : str, optional, default None
638
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
639
+ inferentia : int, default 0
640
+ Number of Inferentia chips required for this step.
641
+ trainium : int, default None
642
+ Alias for inferentia. Use only one of the two.
643
+ efa : int, default 0
644
+ Number of elastic fabric adapter network devices to attach to container
645
+ ephemeral_storage : int, default None
646
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
647
+ This is only relevant for Fargate compute environments
648
+ log_driver: str, optional, default None
649
+ The log driver to use for the Amazon ECS container.
650
+ log_options: List[str], optional, default None
651
+ List of strings containing options for the chosen log driver. The configurable values
652
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
653
+ Example: [`awslogs-group:aws/batch/job`]
477
654
  """
478
655
  ...
479
656
 
@@ -603,292 +780,251 @@ def resources(*, cpu: int = 1, gpu: int | None = None, disk: int | None = None,
603
780
  ...
604
781
 
605
782
  @typing.overload
606
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
607
- ...
608
-
609
- @typing.overload
610
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
611
- ...
612
-
613
- def resources(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None):
614
- """
615
- Specifies the resources needed when executing this step.
616
-
617
- Use `@resources` to specify the resource requirements
618
- independently of the specific compute layer (`@batch`, `@kubernetes`).
619
-
620
- You can choose the compute layer on the command line by executing e.g.
621
- ```
622
- python myflow.py run --with batch
623
- ```
624
- or
625
- ```
626
- python myflow.py run --with kubernetes
627
- ```
628
- which executes the flow on the desired system using the
629
- requirements specified in `@resources`.
630
-
631
-
632
- Parameters
633
- ----------
634
- cpu : int, default 1
635
- Number of CPUs required for this step.
636
- gpu : int, optional, default None
637
- Number of GPUs required for this step.
638
- disk : int, optional, default None
639
- Disk size (in MB) required for this step. Only applies on Kubernetes.
640
- memory : int, default 4096
641
- Memory size (in MB) required for this step.
642
- shared_memory : int, optional, default None
643
- The value for the size (in MiB) of the /dev/shm volume for this step.
644
- This parameter maps to the `--shm-size` option in Docker.
645
- """
646
- ...
647
-
648
- @typing.overload
649
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
650
- """
651
- Specifies the number of times the task corresponding
652
- to a step needs to be retried.
653
-
654
- This decorator is useful for handling transient errors, such as networking issues.
655
- If your task contains operations that can't be retried safely, e.g. database updates,
656
- it is advisable to annotate it with `@retry(times=0)`.
657
-
658
- This can be used in conjunction with the `@catch` decorator. The `@catch`
659
- decorator will execute a no-op task after all retries have been exhausted,
660
- ensuring that the flow execution can continue.
661
-
662
-
663
- Parameters
664
- ----------
665
- times : int, default 3
666
- Number of times to retry this task.
667
- minutes_between_retries : int, default 2
668
- Number of minutes between retries.
669
- """
670
- ...
671
-
672
- @typing.overload
673
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
674
- ...
675
-
676
- @typing.overload
677
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
678
- ...
679
-
680
- def retry(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, times: int = 3, minutes_between_retries: int = 2):
681
- """
682
- Specifies the number of times the task corresponding
683
- to a step needs to be retried.
684
-
685
- This decorator is useful for handling transient errors, such as networking issues.
686
- If your task contains operations that can't be retried safely, e.g. database updates,
687
- it is advisable to annotate it with `@retry(times=0)`.
688
-
689
- This can be used in conjunction with the `@catch` decorator. The `@catch`
690
- decorator will execute a no-op task after all retries have been exhausted,
691
- ensuring that the flow execution can continue.
692
-
693
-
694
- Parameters
695
- ----------
696
- times : int, default 3
697
- Number of times to retry this task.
698
- minutes_between_retries : int, default 2
699
- Number of minutes between retries.
700
- """
701
- ...
702
-
703
- @typing.overload
704
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
705
- """
706
- Specifies environment variables to be set prior to the execution of a step.
707
-
708
-
709
- Parameters
710
- ----------
711
- vars : Dict[str, str], default {}
712
- Dictionary of environment variables to set.
713
- """
714
- ...
715
-
716
- @typing.overload
717
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
783
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
718
784
  ...
719
785
 
720
786
  @typing.overload
721
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
787
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
722
788
  ...
723
789
 
724
- def environment(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, vars: typing.Dict[str, str] = {}):
790
+ def resources(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None):
725
791
  """
726
- Specifies environment variables to be set prior to the execution of a step.
727
-
792
+ Specifies the resources needed when executing this step.
728
793
 
729
- Parameters
730
- ----------
731
- vars : Dict[str, str], default {}
732
- Dictionary of environment variables to set.
733
- """
734
- ...
735
-
736
- @typing.overload
737
- def card(*, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
738
- """
739
- Creates a human-readable report, a Metaflow Card, after this step completes.
794
+ Use `@resources` to specify the resource requirements
795
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
740
796
 
741
- Note that you may add multiple `@card` decorators in a step with different parameters.
797
+ You can choose the compute layer on the command line by executing e.g.
798
+ ```
799
+ python myflow.py run --with batch
800
+ ```
801
+ or
802
+ ```
803
+ python myflow.py run --with kubernetes
804
+ ```
805
+ which executes the flow on the desired system using the
806
+ requirements specified in `@resources`.
742
807
 
743
808
 
744
809
  Parameters
745
810
  ----------
746
- type : str, default 'default'
747
- Card type.
748
- id : str, optional, default None
749
- If multiple cards are present, use this id to identify this card.
750
- options : Dict[str, Any], default {}
751
- Options passed to the card. The contents depend on the card type.
752
- timeout : int, default 45
753
- Interrupt reporting if it takes more than this many seconds.
811
+ cpu : int, default 1
812
+ Number of CPUs required for this step.
813
+ gpu : int, optional, default None
814
+ Number of GPUs required for this step.
815
+ disk : int, optional, default None
816
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
817
+ memory : int, default 4096
818
+ Memory size (in MB) required for this step.
819
+ shared_memory : int, optional, default None
820
+ The value for the size (in MiB) of the /dev/shm volume for this step.
821
+ This parameter maps to the `--shm-size` option in Docker.
754
822
  """
755
823
  ...
756
824
 
757
825
  @typing.overload
758
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
826
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
827
+ """
828
+ Decorator prototype for all step decorators. This function gets specialized
829
+ and imported for all decorators types by _import_plugin_decorators().
830
+ """
759
831
  ...
760
832
 
761
833
  @typing.overload
762
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
834
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
763
835
  ...
764
836
 
765
- def card(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
837
+ def parallel(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None):
766
838
  """
767
- Creates a human-readable report, a Metaflow Card, after this step completes.
768
-
769
- Note that you may add multiple `@card` decorators in a step with different parameters.
770
-
771
-
772
- Parameters
773
- ----------
774
- type : str, default 'default'
775
- Card type.
776
- id : str, optional, default None
777
- If multiple cards are present, use this id to identify this card.
778
- options : Dict[str, Any], default {}
779
- Options passed to the card. The contents depend on the card type.
780
- timeout : int, default 45
781
- Interrupt reporting if it takes more than this many seconds.
839
+ Decorator prototype for all step decorators. This function gets specialized
840
+ and imported for all decorators types by _import_plugin_decorators().
782
841
  """
783
842
  ...
784
843
 
785
844
  @typing.overload
786
- def pypi(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
845
+ def catch(*, var: str | None = None, print_exception: bool = True) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
787
846
  """
788
- Specifies the PyPI packages for the step.
847
+ Specifies that the step will success under all circumstances.
789
848
 
790
- Information in this decorator will augment any
791
- attributes set in the `@pyi_base` flow-level decorator. Hence,
792
- you can use `@pypi_base` to set packages required by all
793
- steps and use `@pypi` to specify step-specific overrides.
849
+ The decorator will create an optional artifact, specified by `var`, which
850
+ contains the exception raised. You can use it to detect the presence
851
+ of errors, indicating that all happy-path artifacts produced by the step
852
+ are missing.
794
853
 
795
854
 
796
855
  Parameters
797
856
  ----------
798
- packages : Dict[str, str], default: {}
799
- Packages to use for this step. The key is the name of the package
800
- and the value is the version to use.
801
- python : str, optional, default: None
802
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
803
- that the version used will correspond to the version of the Python interpreter used to start the run.
857
+ var : str, optional, default None
858
+ Name of the artifact in which to store the caught exception.
859
+ If not specified, the exception is not stored.
860
+ print_exception : bool, default True
861
+ Determines whether or not the exception is printed to
862
+ stdout when caught.
804
863
  """
805
864
  ...
806
865
 
807
866
  @typing.overload
808
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
867
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
809
868
  ...
810
869
 
811
870
  @typing.overload
812
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
871
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
813
872
  ...
814
873
 
815
- def pypi(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
874
+ def catch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, var: str | None = None, print_exception: bool = True):
816
875
  """
817
- Specifies the PyPI packages for the step.
876
+ Specifies that the step will success under all circumstances.
818
877
 
819
- Information in this decorator will augment any
820
- attributes set in the `@pyi_base` flow-level decorator. Hence,
821
- you can use `@pypi_base` to set packages required by all
822
- steps and use `@pypi` to specify step-specific overrides.
878
+ The decorator will create an optional artifact, specified by `var`, which
879
+ contains the exception raised. You can use it to detect the presence
880
+ of errors, indicating that all happy-path artifacts produced by the step
881
+ are missing.
823
882
 
824
883
 
825
884
  Parameters
826
885
  ----------
827
- packages : Dict[str, str], default: {}
828
- Packages to use for this step. The key is the name of the package
829
- and the value is the version to use.
830
- python : str, optional, default: None
831
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
832
- that the version used will correspond to the version of the Python interpreter used to start the run.
886
+ var : str, optional, default None
887
+ Name of the artifact in which to store the caught exception.
888
+ If not specified, the exception is not stored.
889
+ print_exception : bool, default True
890
+ Determines whether or not the exception is printed to
891
+ stdout when caught.
833
892
  """
834
893
  ...
835
894
 
836
895
  @typing.overload
837
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
896
+ def trigger(*, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
838
897
  """
839
- Specifies a timeout for your step.
898
+ Specifies the event(s) that this flow depends on.
840
899
 
841
- This decorator is useful if this step may hang indefinitely.
900
+ ```
901
+ @trigger(event='foo')
902
+ ```
903
+ or
904
+ ```
905
+ @trigger(events=['foo', 'bar'])
906
+ ```
842
907
 
843
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
844
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
845
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
908
+ Additionally, you can specify the parameter mappings
909
+ to map event payload to Metaflow parameters for the flow.
910
+ ```
911
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
912
+ ```
913
+ or
914
+ ```
915
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
916
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
917
+ ```
846
918
 
847
- Note that all the values specified in parameters are added together so if you specify
848
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
919
+ 'parameters' can also be a list of strings and tuples like so:
920
+ ```
921
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
922
+ ```
923
+ This is equivalent to:
924
+ ```
925
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
926
+ ```
849
927
 
850
928
 
851
929
  Parameters
852
930
  ----------
853
- seconds : int, default 0
854
- Number of seconds to wait prior to timing out.
855
- minutes : int, default 0
856
- Number of minutes to wait prior to timing out.
857
- hours : int, default 0
858
- Number of hours to wait prior to timing out.
931
+ event : Union[str, Dict[str, Any]], optional, default None
932
+ Event dependency for this flow.
933
+ events : List[Union[str, Dict[str, Any]]], default []
934
+ Events dependency for this flow.
935
+ options : Dict[str, Any], default {}
936
+ Backend-specific configuration for tuning eventing behavior.
859
937
  """
860
938
  ...
861
939
 
862
940
  @typing.overload
863
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
864
- ...
865
-
866
- @typing.overload
867
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
941
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
868
942
  ...
869
943
 
870
- def timeout(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
944
+ def trigger(f: typing.Type[~FlowSpecDerived] | None = None, *, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}):
871
945
  """
872
- Specifies a timeout for your step.
946
+ Specifies the event(s) that this flow depends on.
873
947
 
874
- This decorator is useful if this step may hang indefinitely.
948
+ ```
949
+ @trigger(event='foo')
950
+ ```
951
+ or
952
+ ```
953
+ @trigger(events=['foo', 'bar'])
954
+ ```
875
955
 
876
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
877
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
878
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
956
+ Additionally, you can specify the parameter mappings
957
+ to map event payload to Metaflow parameters for the flow.
958
+ ```
959
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
960
+ ```
961
+ or
962
+ ```
963
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
964
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
965
+ ```
879
966
 
880
- Note that all the values specified in parameters are added together so if you specify
881
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
967
+ 'parameters' can also be a list of strings and tuples like so:
968
+ ```
969
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
970
+ ```
971
+ This is equivalent to:
972
+ ```
973
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
974
+ ```
882
975
 
883
976
 
884
977
  Parameters
885
978
  ----------
886
- seconds : int, default 0
887
- Number of seconds to wait prior to timing out.
888
- minutes : int, default 0
889
- Number of minutes to wait prior to timing out.
890
- hours : int, default 0
891
- Number of hours to wait prior to timing out.
979
+ event : Union[str, Dict[str, Any]], optional, default None
980
+ Event dependency for this flow.
981
+ events : List[Union[str, Dict[str, Any]]], default []
982
+ Events dependency for this flow.
983
+ options : Dict[str, Any], default {}
984
+ Backend-specific configuration for tuning eventing behavior.
985
+ """
986
+ ...
987
+
988
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: str | typing.List[str], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
989
+ """
990
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
991
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
992
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
993
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
994
+ starts only after all sensors finish.
995
+
996
+
997
+ Parameters
998
+ ----------
999
+ timeout : int
1000
+ Time, in seconds before the task times out and fails. (Default: 3600)
1001
+ poke_interval : int
1002
+ Time in seconds that the job should wait in between each try. (Default: 60)
1003
+ mode : str
1004
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1005
+ exponential_backoff : bool
1006
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1007
+ pool : str
1008
+ the slot pool this task should run in,
1009
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1010
+ soft_fail : bool
1011
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1012
+ name : str
1013
+ Name of the sensor on Airflow
1014
+ description : str
1015
+ Description of sensor in the Airflow UI
1016
+ bucket_key : Union[str, List[str]]
1017
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1018
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1019
+ bucket_name : str
1020
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1021
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1022
+ wildcard_match : bool
1023
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1024
+ aws_conn_id : str
1025
+ a reference to the s3 connection on Airflow. (Default: None)
1026
+ verify : bool
1027
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
892
1028
  """
893
1029
  ...
894
1030
 
@@ -927,6 +1063,57 @@ def project(*, name: str, branch: str | None = None, production: bool = False) -
927
1063
  """
928
1064
  ...
929
1065
 
1066
+ @typing.overload
1067
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1068
+ """
1069
+ Specifies the Conda environment for all steps of the flow.
1070
+
1071
+ Use `@conda_base` to set common libraries required by all
1072
+ steps and use `@conda` to specify step-specific additions.
1073
+
1074
+
1075
+ Parameters
1076
+ ----------
1077
+ packages : Dict[str, str], default {}
1078
+ Packages to use for this flow. The key is the name of the package
1079
+ and the value is the version to use.
1080
+ libraries : Dict[str, str], default {}
1081
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1082
+ python : str, optional, default None
1083
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1084
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1085
+ disabled : bool, default False
1086
+ If set to True, disables Conda.
1087
+ """
1088
+ ...
1089
+
1090
+ @typing.overload
1091
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1092
+ ...
1093
+
1094
+ def conda_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
1095
+ """
1096
+ Specifies the Conda environment for all steps of the flow.
1097
+
1098
+ Use `@conda_base` to set common libraries required by all
1099
+ steps and use `@conda` to specify step-specific additions.
1100
+
1101
+
1102
+ Parameters
1103
+ ----------
1104
+ packages : Dict[str, str], default {}
1105
+ Packages to use for this flow. The key is the name of the package
1106
+ and the value is the version to use.
1107
+ libraries : Dict[str, str], default {}
1108
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1109
+ python : str, optional, default None
1110
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1111
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1112
+ disabled : bool, default False
1113
+ If set to True, disables Conda.
1114
+ """
1115
+ ...
1116
+
930
1117
  @typing.overload
931
1118
  def pypi_base(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
932
1119
  """
@@ -1112,100 +1299,6 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1112
1299
  """
1113
1300
  ...
1114
1301
 
1115
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: str | typing.List[str], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1116
- """
1117
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1118
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1119
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1120
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1121
- starts only after all sensors finish.
1122
-
1123
-
1124
- Parameters
1125
- ----------
1126
- timeout : int
1127
- Time, in seconds before the task times out and fails. (Default: 3600)
1128
- poke_interval : int
1129
- Time in seconds that the job should wait in between each try. (Default: 60)
1130
- mode : str
1131
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1132
- exponential_backoff : bool
1133
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1134
- pool : str
1135
- the slot pool this task should run in,
1136
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1137
- soft_fail : bool
1138
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1139
- name : str
1140
- Name of the sensor on Airflow
1141
- description : str
1142
- Description of sensor in the Airflow UI
1143
- bucket_key : Union[str, List[str]]
1144
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1145
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1146
- bucket_name : str
1147
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1148
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1149
- wildcard_match : bool
1150
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1151
- aws_conn_id : str
1152
- a reference to the s3 connection on Airflow. (Default: None)
1153
- verify : bool
1154
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1155
- """
1156
- ...
1157
-
1158
- @typing.overload
1159
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1160
- """
1161
- Specifies the Conda environment for all steps of the flow.
1162
-
1163
- Use `@conda_base` to set common libraries required by all
1164
- steps and use `@conda` to specify step-specific additions.
1165
-
1166
-
1167
- Parameters
1168
- ----------
1169
- packages : Dict[str, str], default {}
1170
- Packages to use for this flow. The key is the name of the package
1171
- and the value is the version to use.
1172
- libraries : Dict[str, str], default {}
1173
- Supported for backward compatibility. When used with packages, packages will take precedence.
1174
- python : str, optional, default None
1175
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1176
- that the version used will correspond to the version of the Python interpreter used to start the run.
1177
- disabled : bool, default False
1178
- If set to True, disables Conda.
1179
- """
1180
- ...
1181
-
1182
- @typing.overload
1183
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1184
- ...
1185
-
1186
- def conda_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
1187
- """
1188
- Specifies the Conda environment for all steps of the flow.
1189
-
1190
- Use `@conda_base` to set common libraries required by all
1191
- steps and use `@conda` to specify step-specific additions.
1192
-
1193
-
1194
- Parameters
1195
- ----------
1196
- packages : Dict[str, str], default {}
1197
- Packages to use for this flow. The key is the name of the package
1198
- and the value is the version to use.
1199
- libraries : Dict[str, str], default {}
1200
- Supported for backward compatibility. When used with packages, packages will take precedence.
1201
- python : str, optional, default None
1202
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1203
- that the version used will correspond to the version of the Python interpreter used to start the run.
1204
- disabled : bool, default False
1205
- If set to True, disables Conda.
1206
- """
1207
- ...
1208
-
1209
1302
  @typing.overload
1210
1303
  def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: str | None = None, timezone: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1211
1304
  """
@@ -1257,96 +1350,3 @@ def schedule(f: typing.Type[~FlowSpecDerived] | None = None, *, hourly: bool = F
1257
1350
  """
1258
1351
  ...
1259
1352
 
1260
- @typing.overload
1261
- def trigger(*, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1262
- """
1263
- Specifies the event(s) that this flow depends on.
1264
-
1265
- ```
1266
- @trigger(event='foo')
1267
- ```
1268
- or
1269
- ```
1270
- @trigger(events=['foo', 'bar'])
1271
- ```
1272
-
1273
- Additionally, you can specify the parameter mappings
1274
- to map event payload to Metaflow parameters for the flow.
1275
- ```
1276
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1277
- ```
1278
- or
1279
- ```
1280
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1281
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1282
- ```
1283
-
1284
- 'parameters' can also be a list of strings and tuples like so:
1285
- ```
1286
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1287
- ```
1288
- This is equivalent to:
1289
- ```
1290
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1291
- ```
1292
-
1293
-
1294
- Parameters
1295
- ----------
1296
- event : Union[str, Dict[str, Any]], optional, default None
1297
- Event dependency for this flow.
1298
- events : List[Union[str, Dict[str, Any]]], default []
1299
- Events dependency for this flow.
1300
- options : Dict[str, Any], default {}
1301
- Backend-specific configuration for tuning eventing behavior.
1302
- """
1303
- ...
1304
-
1305
- @typing.overload
1306
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1307
- ...
1308
-
1309
- def trigger(f: typing.Type[~FlowSpecDerived] | None = None, *, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}):
1310
- """
1311
- Specifies the event(s) that this flow depends on.
1312
-
1313
- ```
1314
- @trigger(event='foo')
1315
- ```
1316
- or
1317
- ```
1318
- @trigger(events=['foo', 'bar'])
1319
- ```
1320
-
1321
- Additionally, you can specify the parameter mappings
1322
- to map event payload to Metaflow parameters for the flow.
1323
- ```
1324
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1325
- ```
1326
- or
1327
- ```
1328
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1329
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1330
- ```
1331
-
1332
- 'parameters' can also be a list of strings and tuples like so:
1333
- ```
1334
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1335
- ```
1336
- This is equivalent to:
1337
- ```
1338
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1339
- ```
1340
-
1341
-
1342
- Parameters
1343
- ----------
1344
- event : Union[str, Dict[str, Any]], optional, default None
1345
- Event dependency for this flow.
1346
- events : List[Union[str, Dict[str, Any]]], default []
1347
- Events dependency for this flow.
1348
- options : Dict[str, Any], default {}
1349
- Backend-specific configuration for tuning eventing behavior.
1350
- """
1351
- ...
1352
-