metaflow-stubs 2.17.1__py2.py3-none-any.whl → 2.17.3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (166) hide show
  1. metaflow-stubs/__init__.pyi +641 -641
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +13 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +24 -24
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  24. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  29. metaflow-stubs/parameters.pyi +3 -3
  30. metaflow-stubs/plugins/__init__.pyi +14 -14
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +7 -3
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  47. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +5 -5
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +5 -5
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  100. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +5 -5
  104. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  121. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  125. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  128. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  129. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  131. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  133. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  134. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  135. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  136. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  137. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  138. metaflow-stubs/pylint_wrapper.pyi +2 -2
  139. metaflow-stubs/runner/__init__.pyi +2 -2
  140. metaflow-stubs/runner/deployer.pyi +34 -34
  141. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  142. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  143. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  144. metaflow-stubs/runner/nbrun.pyi +2 -2
  145. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  146. metaflow-stubs/runner/utils.pyi +3 -3
  147. metaflow-stubs/system/__init__.pyi +2 -2
  148. metaflow-stubs/system/system_logger.pyi +2 -2
  149. metaflow-stubs/system/system_monitor.pyi +2 -2
  150. metaflow-stubs/tagging_util.pyi +2 -2
  151. metaflow-stubs/tuple_util.pyi +2 -2
  152. metaflow-stubs/user_configs/__init__.pyi +2 -2
  153. metaflow-stubs/user_configs/config_options.pyi +3 -3
  154. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  155. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  156. metaflow-stubs/user_decorators/common.pyi +2 -2
  157. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  158. metaflow-stubs/user_decorators/mutable_step.pyi +6 -6
  159. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  160. metaflow-stubs/user_decorators/user_step_decorator.pyi +19 -6
  161. metaflow-stubs/version.pyi +2 -2
  162. {metaflow_stubs-2.17.1.dist-info → metaflow_stubs-2.17.3.dist-info}/METADATA +2 -2
  163. metaflow_stubs-2.17.3.dist-info/RECORD +166 -0
  164. metaflow_stubs-2.17.1.dist-info/RECORD +0 -166
  165. {metaflow_stubs-2.17.1.dist-info → metaflow_stubs-2.17.3.dist-info}/WHEEL +0 -0
  166. {metaflow_stubs-2.17.1.dist-info → metaflow_stubs-2.17.3.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.17.1 #
4
- # Generated on 2025-08-11T22:07:35.926648 #
3
+ # MF version: 2.17.3 #
4
+ # Generated on 2025-08-21T22:44:50.438086 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -40,16 +40,16 @@ from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
42
  from . import metaflow_git as metaflow_git
43
- from . import events as events
44
43
  from . import tuple_util as tuple_util
44
+ from . import events as events
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
50
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
51
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
51
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
53
  from . import cards as cards
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
@@ -153,41 +153,131 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
153
153
  ...
154
154
 
155
155
  @typing.overload
156
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
156
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
157
  """
158
- Specifies secrets to be retrieved and injected as environment variables prior to
159
- the execution of a step.
158
+ Specifies that the step will success under all circumstances.
159
+
160
+ The decorator will create an optional artifact, specified by `var`, which
161
+ contains the exception raised. You can use it to detect the presence
162
+ of errors, indicating that all happy-path artifacts produced by the step
163
+ are missing.
160
164
 
161
165
 
162
166
  Parameters
163
167
  ----------
164
- sources : List[Union[str, Dict[str, Any]]], default: []
165
- List of secret specs, defining how the secrets are to be retrieved
166
- role : str, optional, default: None
167
- Role to use for fetching secrets
168
+ var : str, optional, default None
169
+ Name of the artifact in which to store the caught exception.
170
+ If not specified, the exception is not stored.
171
+ print_exception : bool, default True
172
+ Determines whether or not the exception is printed to
173
+ stdout when caught.
168
174
  """
169
175
  ...
170
176
 
171
177
  @typing.overload
172
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
178
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
173
179
  ...
174
180
 
175
181
  @typing.overload
176
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
182
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
177
183
  ...
178
184
 
179
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
185
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
180
186
  """
181
- Specifies secrets to be retrieved and injected as environment variables prior to
182
- the execution of a step.
187
+ Specifies that the step will success under all circumstances.
188
+
189
+ The decorator will create an optional artifact, specified by `var`, which
190
+ contains the exception raised. You can use it to detect the presence
191
+ of errors, indicating that all happy-path artifacts produced by the step
192
+ are missing.
183
193
 
184
194
 
185
195
  Parameters
186
196
  ----------
187
- sources : List[Union[str, Dict[str, Any]]], default: []
188
- List of secret specs, defining how the secrets are to be retrieved
189
- role : str, optional, default: None
190
- Role to use for fetching secrets
197
+ var : str, optional, default None
198
+ Name of the artifact in which to store the caught exception.
199
+ If not specified, the exception is not stored.
200
+ print_exception : bool, default True
201
+ Determines whether or not the exception is printed to
202
+ stdout when caught.
203
+ """
204
+ ...
205
+
206
+ @typing.overload
207
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
208
+ """
209
+ Specifies a timeout for your step.
210
+
211
+ This decorator is useful if this step may hang indefinitely.
212
+
213
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
214
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
215
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
216
+
217
+ Note that all the values specified in parameters are added together so if you specify
218
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
219
+
220
+
221
+ Parameters
222
+ ----------
223
+ seconds : int, default 0
224
+ Number of seconds to wait prior to timing out.
225
+ minutes : int, default 0
226
+ Number of minutes to wait prior to timing out.
227
+ hours : int, default 0
228
+ Number of hours to wait prior to timing out.
229
+ """
230
+ ...
231
+
232
+ @typing.overload
233
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
234
+ ...
235
+
236
+ @typing.overload
237
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
238
+ ...
239
+
240
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
241
+ """
242
+ Specifies a timeout for your step.
243
+
244
+ This decorator is useful if this step may hang indefinitely.
245
+
246
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
247
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
248
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
249
+
250
+ Note that all the values specified in parameters are added together so if you specify
251
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
252
+
253
+
254
+ Parameters
255
+ ----------
256
+ seconds : int, default 0
257
+ Number of seconds to wait prior to timing out.
258
+ minutes : int, default 0
259
+ Number of minutes to wait prior to timing out.
260
+ hours : int, default 0
261
+ Number of hours to wait prior to timing out.
262
+ """
263
+ ...
264
+
265
+ @typing.overload
266
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
267
+ """
268
+ Decorator prototype for all step decorators. This function gets specialized
269
+ and imported for all decorators types by _import_plugin_decorators().
270
+ """
271
+ ...
272
+
273
+ @typing.overload
274
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
275
+ ...
276
+
277
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
278
+ """
279
+ Decorator prototype for all step decorators. This function gets specialized
280
+ and imported for all decorators types by _import_plugin_decorators().
191
281
  """
192
282
  ...
193
283
 
@@ -251,102 +341,181 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
251
341
  ...
252
342
 
253
343
  @typing.overload
254
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
344
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
255
345
  """
256
- Specifies environment variables to be set prior to the execution of a step.
346
+ Specifies the resources needed when executing this step.
347
+
348
+ Use `@resources` to specify the resource requirements
349
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
350
+
351
+ You can choose the compute layer on the command line by executing e.g.
352
+ ```
353
+ python myflow.py run --with batch
354
+ ```
355
+ or
356
+ ```
357
+ python myflow.py run --with kubernetes
358
+ ```
359
+ which executes the flow on the desired system using the
360
+ requirements specified in `@resources`.
257
361
 
258
362
 
259
363
  Parameters
260
364
  ----------
261
- vars : Dict[str, str], default {}
262
- Dictionary of environment variables to set.
365
+ cpu : int, default 1
366
+ Number of CPUs required for this step.
367
+ gpu : int, optional, default None
368
+ Number of GPUs required for this step.
369
+ disk : int, optional, default None
370
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
371
+ memory : int, default 4096
372
+ Memory size (in MB) required for this step.
373
+ shared_memory : int, optional, default None
374
+ The value for the size (in MiB) of the /dev/shm volume for this step.
375
+ This parameter maps to the `--shm-size` option in Docker.
263
376
  """
264
377
  ...
265
378
 
266
379
  @typing.overload
267
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
380
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
268
381
  ...
269
382
 
270
383
  @typing.overload
271
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
384
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
272
385
  ...
273
386
 
274
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
387
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
275
388
  """
276
- Specifies environment variables to be set prior to the execution of a step.
389
+ Specifies the resources needed when executing this step.
390
+
391
+ Use `@resources` to specify the resource requirements
392
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
393
+
394
+ You can choose the compute layer on the command line by executing e.g.
395
+ ```
396
+ python myflow.py run --with batch
397
+ ```
398
+ or
399
+ ```
400
+ python myflow.py run --with kubernetes
401
+ ```
402
+ which executes the flow on the desired system using the
403
+ requirements specified in `@resources`.
277
404
 
278
405
 
279
406
  Parameters
280
407
  ----------
281
- vars : Dict[str, str], default {}
282
- Dictionary of environment variables to set.
408
+ cpu : int, default 1
409
+ Number of CPUs required for this step.
410
+ gpu : int, optional, default None
411
+ Number of GPUs required for this step.
412
+ disk : int, optional, default None
413
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
414
+ memory : int, default 4096
415
+ Memory size (in MB) required for this step.
416
+ shared_memory : int, optional, default None
417
+ The value for the size (in MiB) of the /dev/shm volume for this step.
418
+ This parameter maps to the `--shm-size` option in Docker.
283
419
  """
284
420
  ...
285
421
 
286
422
  @typing.overload
287
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
423
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
288
424
  """
289
- Specifies the number of times the task corresponding
290
- to a step needs to be retried.
291
-
292
- This decorator is useful for handling transient errors, such as networking issues.
293
- If your task contains operations that can't be retried safely, e.g. database updates,
294
- it is advisable to annotate it with `@retry(times=0)`.
425
+ Creates a human-readable report, a Metaflow Card, after this step completes.
295
426
 
296
- This can be used in conjunction with the `@catch` decorator. The `@catch`
297
- decorator will execute a no-op task after all retries have been exhausted,
298
- ensuring that the flow execution can continue.
427
+ Note that you may add multiple `@card` decorators in a step with different parameters.
299
428
 
300
429
 
301
430
  Parameters
302
431
  ----------
303
- times : int, default 3
304
- Number of times to retry this task.
305
- minutes_between_retries : int, default 2
306
- Number of minutes between retries.
432
+ type : str, default 'default'
433
+ Card type.
434
+ id : str, optional, default None
435
+ If multiple cards are present, use this id to identify this card.
436
+ options : Dict[str, Any], default {}
437
+ Options passed to the card. The contents depend on the card type.
438
+ timeout : int, default 45
439
+ Interrupt reporting if it takes more than this many seconds.
307
440
  """
308
441
  ...
309
442
 
310
443
  @typing.overload
311
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
444
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
312
445
  ...
313
446
 
314
447
  @typing.overload
315
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
448
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
316
449
  ...
317
450
 
318
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
451
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
319
452
  """
320
- Specifies the number of times the task corresponding
321
- to a step needs to be retried.
322
-
323
- This decorator is useful for handling transient errors, such as networking issues.
324
- If your task contains operations that can't be retried safely, e.g. database updates,
325
- it is advisable to annotate it with `@retry(times=0)`.
453
+ Creates a human-readable report, a Metaflow Card, after this step completes.
326
454
 
327
- This can be used in conjunction with the `@catch` decorator. The `@catch`
328
- decorator will execute a no-op task after all retries have been exhausted,
329
- ensuring that the flow execution can continue.
455
+ Note that you may add multiple `@card` decorators in a step with different parameters.
330
456
 
331
457
 
332
458
  Parameters
333
459
  ----------
334
- times : int, default 3
335
- Number of times to retry this task.
336
- minutes_between_retries : int, default 2
337
- Number of minutes between retries.
460
+ type : str, default 'default'
461
+ Card type.
462
+ id : str, optional, default None
463
+ If multiple cards are present, use this id to identify this card.
464
+ options : Dict[str, Any], default {}
465
+ Options passed to the card. The contents depend on the card type.
466
+ timeout : int, default 45
467
+ Interrupt reporting if it takes more than this many seconds.
338
468
  """
339
469
  ...
340
470
 
341
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
471
+ @typing.overload
472
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
342
473
  """
343
- Specifies that this step should execute on Kubernetes.
474
+ Specifies secrets to be retrieved and injected as environment variables prior to
475
+ the execution of a step.
344
476
 
345
477
 
346
478
  Parameters
347
479
  ----------
348
- cpu : int, default 1
349
- Number of CPUs required for this step. If `@resources` is
480
+ sources : List[Union[str, Dict[str, Any]]], default: []
481
+ List of secret specs, defining how the secrets are to be retrieved
482
+ role : str, optional, default: None
483
+ Role to use for fetching secrets
484
+ """
485
+ ...
486
+
487
+ @typing.overload
488
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
489
+ ...
490
+
491
+ @typing.overload
492
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
493
+ ...
494
+
495
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
496
+ """
497
+ Specifies secrets to be retrieved and injected as environment variables prior to
498
+ the execution of a step.
499
+
500
+
501
+ Parameters
502
+ ----------
503
+ sources : List[Union[str, Dict[str, Any]]], default: []
504
+ List of secret specs, defining how the secrets are to be retrieved
505
+ role : str, optional, default: None
506
+ Role to use for fetching secrets
507
+ """
508
+ ...
509
+
510
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
511
+ """
512
+ Specifies that this step should execute on Kubernetes.
513
+
514
+
515
+ Parameters
516
+ ----------
517
+ cpu : int, default 1
518
+ Number of CPUs required for this step. If `@resources` is
350
519
  also present, the maximum value from all decorators is used.
351
520
  memory : int, default 4096
352
521
  Memory size (in MB) required for this step. If
@@ -427,6 +596,155 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
427
596
  """
428
597
  ...
429
598
 
599
+ @typing.overload
600
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
601
+ """
602
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
603
+
604
+
605
+ Parameters
606
+ ----------
607
+ cpu : int, default 1
608
+ Number of CPUs required for this step. If `@resources` is
609
+ also present, the maximum value from all decorators is used.
610
+ gpu : int, default 0
611
+ Number of GPUs required for this step. If `@resources` is
612
+ also present, the maximum value from all decorators is used.
613
+ memory : int, default 4096
614
+ Memory size (in MB) required for this step. If
615
+ `@resources` is also present, the maximum value from all decorators is
616
+ used.
617
+ image : str, optional, default None
618
+ Docker image to use when launching on AWS Batch. If not specified, and
619
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
620
+ not, a default Docker image mapping to the current version of Python is used.
621
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
622
+ AWS Batch Job Queue to submit the job to.
623
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
624
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
625
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
626
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
627
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
628
+ shared_memory : int, optional, default None
629
+ The value for the size (in MiB) of the /dev/shm volume for this step.
630
+ This parameter maps to the `--shm-size` option in Docker.
631
+ max_swap : int, optional, default None
632
+ The total amount of swap memory (in MiB) a container can use for this
633
+ step. This parameter is translated to the `--memory-swap` option in
634
+ Docker where the value is the sum of the container memory plus the
635
+ `max_swap` value.
636
+ swappiness : int, optional, default None
637
+ This allows you to tune memory swappiness behavior for this step.
638
+ A swappiness value of 0 causes swapping not to happen unless absolutely
639
+ necessary. A swappiness value of 100 causes pages to be swapped very
640
+ aggressively. Accepted values are whole numbers between 0 and 100.
641
+ use_tmpfs : bool, default False
642
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
643
+ not available on Fargate compute environments
644
+ tmpfs_tempdir : bool, default True
645
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
646
+ tmpfs_size : int, optional, default None
647
+ The value for the size (in MiB) of the tmpfs mount for this step.
648
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
649
+ memory allocated for this step.
650
+ tmpfs_path : str, optional, default None
651
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
652
+ inferentia : int, default 0
653
+ Number of Inferentia chips required for this step.
654
+ trainium : int, default None
655
+ Alias for inferentia. Use only one of the two.
656
+ efa : int, default 0
657
+ Number of elastic fabric adapter network devices to attach to container
658
+ ephemeral_storage : int, default None
659
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
660
+ This is only relevant for Fargate compute environments
661
+ log_driver: str, optional, default None
662
+ The log driver to use for the Amazon ECS container.
663
+ log_options: List[str], optional, default None
664
+ List of strings containing options for the chosen log driver. The configurable values
665
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
666
+ Example: [`awslogs-group:aws/batch/job`]
667
+ """
668
+ ...
669
+
670
+ @typing.overload
671
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
672
+ ...
673
+
674
+ @typing.overload
675
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
676
+ ...
677
+
678
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
679
+ """
680
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
681
+
682
+
683
+ Parameters
684
+ ----------
685
+ cpu : int, default 1
686
+ Number of CPUs required for this step. If `@resources` is
687
+ also present, the maximum value from all decorators is used.
688
+ gpu : int, default 0
689
+ Number of GPUs required for this step. If `@resources` is
690
+ also present, the maximum value from all decorators is used.
691
+ memory : int, default 4096
692
+ Memory size (in MB) required for this step. If
693
+ `@resources` is also present, the maximum value from all decorators is
694
+ used.
695
+ image : str, optional, default None
696
+ Docker image to use when launching on AWS Batch. If not specified, and
697
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
698
+ not, a default Docker image mapping to the current version of Python is used.
699
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
700
+ AWS Batch Job Queue to submit the job to.
701
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
702
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
703
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
704
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
705
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
706
+ shared_memory : int, optional, default None
707
+ The value for the size (in MiB) of the /dev/shm volume for this step.
708
+ This parameter maps to the `--shm-size` option in Docker.
709
+ max_swap : int, optional, default None
710
+ The total amount of swap memory (in MiB) a container can use for this
711
+ step. This parameter is translated to the `--memory-swap` option in
712
+ Docker where the value is the sum of the container memory plus the
713
+ `max_swap` value.
714
+ swappiness : int, optional, default None
715
+ This allows you to tune memory swappiness behavior for this step.
716
+ A swappiness value of 0 causes swapping not to happen unless absolutely
717
+ necessary. A swappiness value of 100 causes pages to be swapped very
718
+ aggressively. Accepted values are whole numbers between 0 and 100.
719
+ use_tmpfs : bool, default False
720
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
721
+ not available on Fargate compute environments
722
+ tmpfs_tempdir : bool, default True
723
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
724
+ tmpfs_size : int, optional, default None
725
+ The value for the size (in MiB) of the tmpfs mount for this step.
726
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
727
+ memory allocated for this step.
728
+ tmpfs_path : str, optional, default None
729
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
730
+ inferentia : int, default 0
731
+ Number of Inferentia chips required for this step.
732
+ trainium : int, default None
733
+ Alias for inferentia. Use only one of the two.
734
+ efa : int, default 0
735
+ Number of elastic fabric adapter network devices to attach to container
736
+ ephemeral_storage : int, default None
737
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
738
+ This is only relevant for Fargate compute environments
739
+ log_driver: str, optional, default None
740
+ The log driver to use for the Amazon ECS container.
741
+ log_options: List[str], optional, default None
742
+ List of strings containing options for the chosen log driver. The configurable values
743
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
744
+ Example: [`awslogs-group:aws/batch/job`]
745
+ """
746
+ ...
747
+
430
748
  @typing.overload
431
749
  def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
432
750
  """
@@ -479,459 +797,269 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
479
797
  ...
480
798
 
481
799
  @typing.overload
482
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
800
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
483
801
  """
484
- Creates a human-readable report, a Metaflow Card, after this step completes.
802
+ Specifies the number of times the task corresponding
803
+ to a step needs to be retried.
485
804
 
486
- Note that you may add multiple `@card` decorators in a step with different parameters.
805
+ This decorator is useful for handling transient errors, such as networking issues.
806
+ If your task contains operations that can't be retried safely, e.g. database updates,
807
+ it is advisable to annotate it with `@retry(times=0)`.
808
+
809
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
810
+ decorator will execute a no-op task after all retries have been exhausted,
811
+ ensuring that the flow execution can continue.
487
812
 
488
813
 
489
814
  Parameters
490
815
  ----------
491
- type : str, default 'default'
492
- Card type.
493
- id : str, optional, default None
494
- If multiple cards are present, use this id to identify this card.
495
- options : Dict[str, Any], default {}
496
- Options passed to the card. The contents depend on the card type.
497
- timeout : int, default 45
498
- Interrupt reporting if it takes more than this many seconds.
816
+ times : int, default 3
817
+ Number of times to retry this task.
818
+ minutes_between_retries : int, default 2
819
+ Number of minutes between retries.
499
820
  """
500
821
  ...
501
822
 
502
823
  @typing.overload
503
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
824
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
504
825
  ...
505
826
 
506
827
  @typing.overload
507
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
828
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
508
829
  ...
509
830
 
510
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
831
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
511
832
  """
512
- Creates a human-readable report, a Metaflow Card, after this step completes.
833
+ Specifies the number of times the task corresponding
834
+ to a step needs to be retried.
513
835
 
514
- Note that you may add multiple `@card` decorators in a step with different parameters.
836
+ This decorator is useful for handling transient errors, such as networking issues.
837
+ If your task contains operations that can't be retried safely, e.g. database updates,
838
+ it is advisable to annotate it with `@retry(times=0)`.
839
+
840
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
841
+ decorator will execute a no-op task after all retries have been exhausted,
842
+ ensuring that the flow execution can continue.
515
843
 
516
844
 
517
845
  Parameters
518
846
  ----------
519
- type : str, default 'default'
520
- Card type.
521
- id : str, optional, default None
522
- If multiple cards are present, use this id to identify this card.
523
- options : Dict[str, Any], default {}
524
- Options passed to the card. The contents depend on the card type.
525
- timeout : int, default 45
526
- Interrupt reporting if it takes more than this many seconds.
847
+ times : int, default 3
848
+ Number of times to retry this task.
849
+ minutes_between_retries : int, default 2
850
+ Number of minutes between retries.
527
851
  """
528
852
  ...
529
853
 
530
854
  @typing.overload
531
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
855
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
532
856
  """
533
- Decorator prototype for all step decorators. This function gets specialized
534
- and imported for all decorators types by _import_plugin_decorators().
857
+ Specifies environment variables to be set prior to the execution of a step.
858
+
859
+
860
+ Parameters
861
+ ----------
862
+ vars : Dict[str, str], default {}
863
+ Dictionary of environment variables to set.
535
864
  """
536
865
  ...
537
866
 
538
867
  @typing.overload
539
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
540
- ...
541
-
542
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
543
- """
544
- Decorator prototype for all step decorators. This function gets specialized
545
- and imported for all decorators types by _import_plugin_decorators().
546
- """
547
- ...
548
-
549
- @typing.overload
550
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
551
- """
552
- Specifies a timeout for your step.
553
-
554
- This decorator is useful if this step may hang indefinitely.
555
-
556
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
557
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
558
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
559
-
560
- Note that all the values specified in parameters are added together so if you specify
561
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
562
-
563
-
564
- Parameters
565
- ----------
566
- seconds : int, default 0
567
- Number of seconds to wait prior to timing out.
568
- minutes : int, default 0
569
- Number of minutes to wait prior to timing out.
570
- hours : int, default 0
571
- Number of hours to wait prior to timing out.
572
- """
868
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
573
869
  ...
574
870
 
575
871
  @typing.overload
576
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
577
- ...
578
-
579
- @typing.overload
580
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
872
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
581
873
  ...
582
874
 
583
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
875
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
584
876
  """
585
- Specifies a timeout for your step.
586
-
587
- This decorator is useful if this step may hang indefinitely.
588
-
589
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
590
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
591
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
592
-
593
- Note that all the values specified in parameters are added together so if you specify
594
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
877
+ Specifies environment variables to be set prior to the execution of a step.
595
878
 
596
879
 
597
880
  Parameters
598
881
  ----------
599
- seconds : int, default 0
600
- Number of seconds to wait prior to timing out.
601
- minutes : int, default 0
602
- Number of minutes to wait prior to timing out.
603
- hours : int, default 0
604
- Number of hours to wait prior to timing out.
882
+ vars : Dict[str, str], default {}
883
+ Dictionary of environment variables to set.
605
884
  """
606
885
  ...
607
886
 
608
887
  @typing.overload
609
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
888
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
610
889
  """
611
- Specifies the resources needed when executing this step.
890
+ Specifies the event(s) that this flow depends on.
612
891
 
613
- Use `@resources` to specify the resource requirements
614
- independently of the specific compute layer (`@batch`, `@kubernetes`).
892
+ ```
893
+ @trigger(event='foo')
894
+ ```
895
+ or
896
+ ```
897
+ @trigger(events=['foo', 'bar'])
898
+ ```
615
899
 
616
- You can choose the compute layer on the command line by executing e.g.
900
+ Additionally, you can specify the parameter mappings
901
+ to map event payload to Metaflow parameters for the flow.
617
902
  ```
618
- python myflow.py run --with batch
903
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
619
904
  ```
620
905
  or
621
906
  ```
622
- python myflow.py run --with kubernetes
907
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
908
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
909
+ ```
910
+
911
+ 'parameters' can also be a list of strings and tuples like so:
912
+ ```
913
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
914
+ ```
915
+ This is equivalent to:
916
+ ```
917
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
623
918
  ```
624
- which executes the flow on the desired system using the
625
- requirements specified in `@resources`.
626
919
 
627
920
 
628
921
  Parameters
629
922
  ----------
630
- cpu : int, default 1
631
- Number of CPUs required for this step.
632
- gpu : int, optional, default None
633
- Number of GPUs required for this step.
634
- disk : int, optional, default None
635
- Disk size (in MB) required for this step. Only applies on Kubernetes.
636
- memory : int, default 4096
637
- Memory size (in MB) required for this step.
638
- shared_memory : int, optional, default None
639
- The value for the size (in MiB) of the /dev/shm volume for this step.
640
- This parameter maps to the `--shm-size` option in Docker.
923
+ event : Union[str, Dict[str, Any]], optional, default None
924
+ Event dependency for this flow.
925
+ events : List[Union[str, Dict[str, Any]]], default []
926
+ Events dependency for this flow.
927
+ options : Dict[str, Any], default {}
928
+ Backend-specific configuration for tuning eventing behavior.
641
929
  """
642
930
  ...
643
931
 
644
932
  @typing.overload
645
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
646
- ...
647
-
648
- @typing.overload
649
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
933
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
650
934
  ...
651
935
 
652
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
936
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
653
937
  """
654
- Specifies the resources needed when executing this step.
655
-
656
- Use `@resources` to specify the resource requirements
657
- independently of the specific compute layer (`@batch`, `@kubernetes`).
938
+ Specifies the event(s) that this flow depends on.
658
939
 
659
- You can choose the compute layer on the command line by executing e.g.
660
940
  ```
661
- python myflow.py run --with batch
941
+ @trigger(event='foo')
662
942
  ```
663
943
  or
664
944
  ```
665
- python myflow.py run --with kubernetes
945
+ @trigger(events=['foo', 'bar'])
666
946
  ```
667
- which executes the flow on the desired system using the
668
- requirements specified in `@resources`.
669
947
 
948
+ Additionally, you can specify the parameter mappings
949
+ to map event payload to Metaflow parameters for the flow.
950
+ ```
951
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
952
+ ```
953
+ or
954
+ ```
955
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
956
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
957
+ ```
670
958
 
671
- Parameters
672
- ----------
673
- cpu : int, default 1
674
- Number of CPUs required for this step.
675
- gpu : int, optional, default None
676
- Number of GPUs required for this step.
677
- disk : int, optional, default None
678
- Disk size (in MB) required for this step. Only applies on Kubernetes.
679
- memory : int, default 4096
680
- Memory size (in MB) required for this step.
681
- shared_memory : int, optional, default None
682
- The value for the size (in MiB) of the /dev/shm volume for this step.
683
- This parameter maps to the `--shm-size` option in Docker.
684
- """
685
- ...
686
-
687
- @typing.overload
688
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
689
- """
690
- Specifies that the step will success under all circumstances.
691
-
692
- The decorator will create an optional artifact, specified by `var`, which
693
- contains the exception raised. You can use it to detect the presence
694
- of errors, indicating that all happy-path artifacts produced by the step
695
- are missing.
696
-
697
-
698
- Parameters
699
- ----------
700
- var : str, optional, default None
701
- Name of the artifact in which to store the caught exception.
702
- If not specified, the exception is not stored.
703
- print_exception : bool, default True
704
- Determines whether or not the exception is printed to
705
- stdout when caught.
706
- """
707
- ...
708
-
709
- @typing.overload
710
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
711
- ...
712
-
713
- @typing.overload
714
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
715
- ...
716
-
717
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
718
- """
719
- Specifies that the step will success under all circumstances.
720
-
721
- The decorator will create an optional artifact, specified by `var`, which
722
- contains the exception raised. You can use it to detect the presence
723
- of errors, indicating that all happy-path artifacts produced by the step
724
- are missing.
959
+ 'parameters' can also be a list of strings and tuples like so:
960
+ ```
961
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
962
+ ```
963
+ This is equivalent to:
964
+ ```
965
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
966
+ ```
725
967
 
726
968
 
727
969
  Parameters
728
970
  ----------
729
- var : str, optional, default None
730
- Name of the artifact in which to store the caught exception.
731
- If not specified, the exception is not stored.
732
- print_exception : bool, default True
733
- Determines whether or not the exception is printed to
734
- stdout when caught.
971
+ event : Union[str, Dict[str, Any]], optional, default None
972
+ Event dependency for this flow.
973
+ events : List[Union[str, Dict[str, Any]]], default []
974
+ Events dependency for this flow.
975
+ options : Dict[str, Any], default {}
976
+ Backend-specific configuration for tuning eventing behavior.
735
977
  """
736
978
  ...
737
979
 
738
980
  @typing.overload
739
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
981
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
740
982
  """
741
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
983
+ Specifies the times when the flow should be run when running on a
984
+ production scheduler.
742
985
 
743
986
 
744
987
  Parameters
745
988
  ----------
746
- cpu : int, default 1
747
- Number of CPUs required for this step. If `@resources` is
748
- also present, the maximum value from all decorators is used.
749
- gpu : int, default 0
750
- Number of GPUs required for this step. If `@resources` is
751
- also present, the maximum value from all decorators is used.
752
- memory : int, default 4096
753
- Memory size (in MB) required for this step. If
754
- `@resources` is also present, the maximum value from all decorators is
755
- used.
756
- image : str, optional, default None
757
- Docker image to use when launching on AWS Batch. If not specified, and
758
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
759
- not, a default Docker image mapping to the current version of Python is used.
760
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
761
- AWS Batch Job Queue to submit the job to.
762
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
763
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
764
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
765
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
766
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
767
- shared_memory : int, optional, default None
768
- The value for the size (in MiB) of the /dev/shm volume for this step.
769
- This parameter maps to the `--shm-size` option in Docker.
770
- max_swap : int, optional, default None
771
- The total amount of swap memory (in MiB) a container can use for this
772
- step. This parameter is translated to the `--memory-swap` option in
773
- Docker where the value is the sum of the container memory plus the
774
- `max_swap` value.
775
- swappiness : int, optional, default None
776
- This allows you to tune memory swappiness behavior for this step.
777
- A swappiness value of 0 causes swapping not to happen unless absolutely
778
- necessary. A swappiness value of 100 causes pages to be swapped very
779
- aggressively. Accepted values are whole numbers between 0 and 100.
780
- use_tmpfs : bool, default False
781
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
782
- not available on Fargate compute environments
783
- tmpfs_tempdir : bool, default True
784
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
785
- tmpfs_size : int, optional, default None
786
- The value for the size (in MiB) of the tmpfs mount for this step.
787
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
788
- memory allocated for this step.
789
- tmpfs_path : str, optional, default None
790
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
791
- inferentia : int, default 0
792
- Number of Inferentia chips required for this step.
793
- trainium : int, default None
794
- Alias for inferentia. Use only one of the two.
795
- efa : int, default 0
796
- Number of elastic fabric adapter network devices to attach to container
797
- ephemeral_storage : int, default None
798
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
799
- This is only relevant for Fargate compute environments
800
- log_driver: str, optional, default None
801
- The log driver to use for the Amazon ECS container.
802
- log_options: List[str], optional, default None
803
- List of strings containing options for the chosen log driver. The configurable values
804
- depend on the `log driver` chosen. Validation of these options is not supported yet.
805
- Example: [`awslogs-group:aws/batch/job`]
989
+ hourly : bool, default False
990
+ Run the workflow hourly.
991
+ daily : bool, default True
992
+ Run the workflow daily.
993
+ weekly : bool, default False
994
+ Run the workflow weekly.
995
+ cron : str, optional, default None
996
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
997
+ specified by this expression.
998
+ timezone : str, optional, default None
999
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1000
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
806
1001
  """
807
1002
  ...
808
1003
 
809
1004
  @typing.overload
810
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
811
- ...
812
-
813
- @typing.overload
814
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1005
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
815
1006
  ...
816
1007
 
817
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1008
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
818
1009
  """
819
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1010
+ Specifies the times when the flow should be run when running on a
1011
+ production scheduler.
820
1012
 
821
1013
 
822
1014
  Parameters
823
1015
  ----------
824
- cpu : int, default 1
825
- Number of CPUs required for this step. If `@resources` is
826
- also present, the maximum value from all decorators is used.
827
- gpu : int, default 0
828
- Number of GPUs required for this step. If `@resources` is
829
- also present, the maximum value from all decorators is used.
830
- memory : int, default 4096
831
- Memory size (in MB) required for this step. If
832
- `@resources` is also present, the maximum value from all decorators is
833
- used.
834
- image : str, optional, default None
835
- Docker image to use when launching on AWS Batch. If not specified, and
836
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
837
- not, a default Docker image mapping to the current version of Python is used.
838
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
839
- AWS Batch Job Queue to submit the job to.
840
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
841
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
842
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
843
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
844
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
845
- shared_memory : int, optional, default None
846
- The value for the size (in MiB) of the /dev/shm volume for this step.
847
- This parameter maps to the `--shm-size` option in Docker.
848
- max_swap : int, optional, default None
849
- The total amount of swap memory (in MiB) a container can use for this
850
- step. This parameter is translated to the `--memory-swap` option in
851
- Docker where the value is the sum of the container memory plus the
852
- `max_swap` value.
853
- swappiness : int, optional, default None
854
- This allows you to tune memory swappiness behavior for this step.
855
- A swappiness value of 0 causes swapping not to happen unless absolutely
856
- necessary. A swappiness value of 100 causes pages to be swapped very
857
- aggressively. Accepted values are whole numbers between 0 and 100.
858
- use_tmpfs : bool, default False
859
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
860
- not available on Fargate compute environments
861
- tmpfs_tempdir : bool, default True
862
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
863
- tmpfs_size : int, optional, default None
864
- The value for the size (in MiB) of the tmpfs mount for this step.
865
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
866
- memory allocated for this step.
867
- tmpfs_path : str, optional, default None
868
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
869
- inferentia : int, default 0
870
- Number of Inferentia chips required for this step.
871
- trainium : int, default None
872
- Alias for inferentia. Use only one of the two.
873
- efa : int, default 0
874
- Number of elastic fabric adapter network devices to attach to container
875
- ephemeral_storage : int, default None
876
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
877
- This is only relevant for Fargate compute environments
878
- log_driver: str, optional, default None
879
- The log driver to use for the Amazon ECS container.
880
- log_options: List[str], optional, default None
881
- List of strings containing options for the chosen log driver. The configurable values
882
- depend on the `log driver` chosen. Validation of these options is not supported yet.
883
- Example: [`awslogs-group:aws/batch/job`]
1016
+ hourly : bool, default False
1017
+ Run the workflow hourly.
1018
+ daily : bool, default True
1019
+ Run the workflow daily.
1020
+ weekly : bool, default False
1021
+ Run the workflow weekly.
1022
+ cron : str, optional, default None
1023
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1024
+ specified by this expression.
1025
+ timezone : str, optional, default None
1026
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1027
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
884
1028
  """
885
1029
  ...
886
1030
 
887
- @typing.overload
888
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1031
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
889
1032
  """
890
- Specifies the Conda environment for all steps of the flow.
1033
+ Specifies what flows belong to the same project.
891
1034
 
892
- Use `@conda_base` to set common libraries required by all
893
- steps and use `@conda` to specify step-specific additions.
1035
+ A project-specific namespace is created for all flows that
1036
+ use the same `@project(name)`.
894
1037
 
895
1038
 
896
1039
  Parameters
897
1040
  ----------
898
- packages : Dict[str, str], default {}
899
- Packages to use for this flow. The key is the name of the package
900
- and the value is the version to use.
901
- libraries : Dict[str, str], default {}
902
- Supported for backward compatibility. When used with packages, packages will take precedence.
903
- python : str, optional, default None
904
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
905
- that the version used will correspond to the version of the Python interpreter used to start the run.
906
- disabled : bool, default False
907
- If set to True, disables Conda.
908
- """
909
- ...
910
-
911
- @typing.overload
912
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
913
- ...
914
-
915
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
916
- """
917
- Specifies the Conda environment for all steps of the flow.
918
-
919
- Use `@conda_base` to set common libraries required by all
920
- steps and use `@conda` to specify step-specific additions.
1041
+ name : str
1042
+ Project name. Make sure that the name is unique amongst all
1043
+ projects that use the same production scheduler. The name may
1044
+ contain only lowercase alphanumeric characters and underscores.
921
1045
 
1046
+ branch : Optional[str], default None
1047
+ The branch to use. If not specified, the branch is set to
1048
+ `user.<username>` unless `production` is set to `True`. This can
1049
+ also be set on the command line using `--branch` as a top-level option.
1050
+ It is an error to specify `branch` in the decorator and on the command line.
922
1051
 
923
- Parameters
924
- ----------
925
- packages : Dict[str, str], default {}
926
- Packages to use for this flow. The key is the name of the package
927
- and the value is the version to use.
928
- libraries : Dict[str, str], default {}
929
- Supported for backward compatibility. When used with packages, packages will take precedence.
930
- python : str, optional, default None
931
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
932
- that the version used will correspond to the version of the Python interpreter used to start the run.
933
- disabled : bool, default False
934
- If set to True, disables Conda.
1052
+ production : bool, default False
1053
+ Whether or not the branch is the production branch. This can also be set on the
1054
+ command line using `--production` as a top-level option. It is an error to specify
1055
+ `production` in the decorator and on the command line.
1056
+ The project branch name will be:
1057
+ - if `branch` is specified:
1058
+ - if `production` is True: `prod.<branch>`
1059
+ - if `production` is False: `test.<branch>`
1060
+ - if `branch` is not specified:
1061
+ - if `production` is True: `prod`
1062
+ - if `production` is False: `user.<username>`
935
1063
  """
936
1064
  ...
937
1065
 
@@ -1008,160 +1136,72 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1008
1136
  @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1009
1137
  ```
1010
1138
  or
1011
- ```
1012
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1013
- ```
1014
-
1015
- You can also specify just the project or project branch (other values will be
1016
- inferred from the current project or project branch):
1017
- ```
1018
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1019
- ```
1020
-
1021
- Note that `branch` is typically one of:
1022
- - `prod`
1023
- - `user.bob`
1024
- - `test.my_experiment`
1025
- - `prod.staging`
1026
-
1027
-
1028
- Parameters
1029
- ----------
1030
- flow : Union[str, Dict[str, str]], optional, default None
1031
- Upstream flow dependency for this flow.
1032
- flows : List[Union[str, Dict[str, str]]], default []
1033
- Upstream flow dependencies for this flow.
1034
- options : Dict[str, Any], default {}
1035
- Backend-specific configuration for tuning eventing behavior.
1036
- """
1037
- ...
1038
-
1039
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1040
- """
1041
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1042
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1043
-
1044
-
1045
- Parameters
1046
- ----------
1047
- timeout : int
1048
- Time, in seconds before the task times out and fails. (Default: 3600)
1049
- poke_interval : int
1050
- Time in seconds that the job should wait in between each try. (Default: 60)
1051
- mode : str
1052
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1053
- exponential_backoff : bool
1054
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1055
- pool : str
1056
- the slot pool this task should run in,
1057
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1058
- soft_fail : bool
1059
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1060
- name : str
1061
- Name of the sensor on Airflow
1062
- description : str
1063
- Description of sensor in the Airflow UI
1064
- external_dag_id : str
1065
- The dag_id that contains the task you want to wait for.
1066
- external_task_ids : List[str]
1067
- The list of task_ids that you want to wait for.
1068
- If None (default value) the sensor waits for the DAG. (Default: None)
1069
- allowed_states : List[str]
1070
- Iterable of allowed states, (Default: ['success'])
1071
- failed_states : List[str]
1072
- Iterable of failed or dis-allowed states. (Default: None)
1073
- execution_delta : datetime.timedelta
1074
- time difference with the previous execution to look at,
1075
- the default is the same logical date as the current task or DAG. (Default: None)
1076
- check_existence: bool
1077
- Set to True to check if the external task exists or check if
1078
- the DAG to wait for exists. (Default: True)
1079
- """
1080
- ...
1081
-
1082
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1083
- """
1084
- Specifies what flows belong to the same project.
1139
+ ```
1140
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1141
+ ```
1085
1142
 
1086
- A project-specific namespace is created for all flows that
1087
- use the same `@project(name)`.
1143
+ You can also specify just the project or project branch (other values will be
1144
+ inferred from the current project or project branch):
1145
+ ```
1146
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1147
+ ```
1148
+
1149
+ Note that `branch` is typically one of:
1150
+ - `prod`
1151
+ - `user.bob`
1152
+ - `test.my_experiment`
1153
+ - `prod.staging`
1088
1154
 
1089
1155
 
1090
1156
  Parameters
1091
1157
  ----------
1092
- name : str
1093
- Project name. Make sure that the name is unique amongst all
1094
- projects that use the same production scheduler. The name may
1095
- contain only lowercase alphanumeric characters and underscores.
1096
-
1097
- branch : Optional[str], default None
1098
- The branch to use. If not specified, the branch is set to
1099
- `user.<username>` unless `production` is set to `True`. This can
1100
- also be set on the command line using `--branch` as a top-level option.
1101
- It is an error to specify `branch` in the decorator and on the command line.
1102
-
1103
- production : bool, default False
1104
- Whether or not the branch is the production branch. This can also be set on the
1105
- command line using `--production` as a top-level option. It is an error to specify
1106
- `production` in the decorator and on the command line.
1107
- The project branch name will be:
1108
- - if `branch` is specified:
1109
- - if `production` is True: `prod.<branch>`
1110
- - if `production` is False: `test.<branch>`
1111
- - if `branch` is not specified:
1112
- - if `production` is True: `prod`
1113
- - if `production` is False: `user.<username>`
1158
+ flow : Union[str, Dict[str, str]], optional, default None
1159
+ Upstream flow dependency for this flow.
1160
+ flows : List[Union[str, Dict[str, str]]], default []
1161
+ Upstream flow dependencies for this flow.
1162
+ options : Dict[str, Any], default {}
1163
+ Backend-specific configuration for tuning eventing behavior.
1114
1164
  """
1115
1165
  ...
1116
1166
 
1117
1167
  @typing.overload
1118
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1168
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1119
1169
  """
1120
- Specifies the times when the flow should be run when running on a
1121
- production scheduler.
1170
+ Specifies the PyPI packages for all steps of the flow.
1122
1171
 
1172
+ Use `@pypi_base` to set common packages required by all
1173
+ steps and use `@pypi` to specify step-specific overrides.
1123
1174
 
1124
1175
  Parameters
1125
1176
  ----------
1126
- hourly : bool, default False
1127
- Run the workflow hourly.
1128
- daily : bool, default True
1129
- Run the workflow daily.
1130
- weekly : bool, default False
1131
- Run the workflow weekly.
1132
- cron : str, optional, default None
1133
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1134
- specified by this expression.
1135
- timezone : str, optional, default None
1136
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1137
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1177
+ packages : Dict[str, str], default: {}
1178
+ Packages to use for this flow. The key is the name of the package
1179
+ and the value is the version to use.
1180
+ python : str, optional, default: None
1181
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1182
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1138
1183
  """
1139
1184
  ...
1140
1185
 
1141
1186
  @typing.overload
1142
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1187
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1143
1188
  ...
1144
1189
 
1145
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1190
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1146
1191
  """
1147
- Specifies the times when the flow should be run when running on a
1148
- production scheduler.
1192
+ Specifies the PyPI packages for all steps of the flow.
1149
1193
 
1194
+ Use `@pypi_base` to set common packages required by all
1195
+ steps and use `@pypi` to specify step-specific overrides.
1150
1196
 
1151
1197
  Parameters
1152
1198
  ----------
1153
- hourly : bool, default False
1154
- Run the workflow hourly.
1155
- daily : bool, default True
1156
- Run the workflow daily.
1157
- weekly : bool, default False
1158
- Run the workflow weekly.
1159
- cron : str, optional, default None
1160
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1161
- specified by this expression.
1162
- timezone : str, optional, default None
1163
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1164
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1199
+ packages : Dict[str, str], default: {}
1200
+ Packages to use for this flow. The key is the name of the package
1201
+ and the value is the version to use.
1202
+ python : str, optional, default: None
1203
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1204
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1165
1205
  """
1166
1206
  ...
1167
1207
 
@@ -1209,136 +1249,96 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1209
1249
  ...
1210
1250
 
1211
1251
  @typing.overload
1212
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1252
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1213
1253
  """
1214
- Specifies the event(s) that this flow depends on.
1215
-
1216
- ```
1217
- @trigger(event='foo')
1218
- ```
1219
- or
1220
- ```
1221
- @trigger(events=['foo', 'bar'])
1222
- ```
1223
-
1224
- Additionally, you can specify the parameter mappings
1225
- to map event payload to Metaflow parameters for the flow.
1226
- ```
1227
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1228
- ```
1229
- or
1230
- ```
1231
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1232
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1233
- ```
1254
+ Specifies the Conda environment for all steps of the flow.
1234
1255
 
1235
- 'parameters' can also be a list of strings and tuples like so:
1236
- ```
1237
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1238
- ```
1239
- This is equivalent to:
1240
- ```
1241
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1242
- ```
1256
+ Use `@conda_base` to set common libraries required by all
1257
+ steps and use `@conda` to specify step-specific additions.
1243
1258
 
1244
1259
 
1245
1260
  Parameters
1246
1261
  ----------
1247
- event : Union[str, Dict[str, Any]], optional, default None
1248
- Event dependency for this flow.
1249
- events : List[Union[str, Dict[str, Any]]], default []
1250
- Events dependency for this flow.
1251
- options : Dict[str, Any], default {}
1252
- Backend-specific configuration for tuning eventing behavior.
1262
+ packages : Dict[str, str], default {}
1263
+ Packages to use for this flow. The key is the name of the package
1264
+ and the value is the version to use.
1265
+ libraries : Dict[str, str], default {}
1266
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1267
+ python : str, optional, default None
1268
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1269
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1270
+ disabled : bool, default False
1271
+ If set to True, disables Conda.
1253
1272
  """
1254
1273
  ...
1255
1274
 
1256
1275
  @typing.overload
1257
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1276
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1258
1277
  ...
1259
1278
 
1260
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1279
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1261
1280
  """
1262
- Specifies the event(s) that this flow depends on.
1263
-
1264
- ```
1265
- @trigger(event='foo')
1266
- ```
1267
- or
1268
- ```
1269
- @trigger(events=['foo', 'bar'])
1270
- ```
1271
-
1272
- Additionally, you can specify the parameter mappings
1273
- to map event payload to Metaflow parameters for the flow.
1274
- ```
1275
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1276
- ```
1277
- or
1278
- ```
1279
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1280
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1281
- ```
1282
-
1283
- 'parameters' can also be a list of strings and tuples like so:
1284
- ```
1285
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1286
- ```
1287
- This is equivalent to:
1288
- ```
1289
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1290
- ```
1291
-
1281
+ Specifies the Conda environment for all steps of the flow.
1292
1282
 
1293
- Parameters
1294
- ----------
1295
- event : Union[str, Dict[str, Any]], optional, default None
1296
- Event dependency for this flow.
1297
- events : List[Union[str, Dict[str, Any]]], default []
1298
- Events dependency for this flow.
1299
- options : Dict[str, Any], default {}
1300
- Backend-specific configuration for tuning eventing behavior.
1301
- """
1302
- ...
1303
-
1304
- @typing.overload
1305
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1306
- """
1307
- Specifies the PyPI packages for all steps of the flow.
1283
+ Use `@conda_base` to set common libraries required by all
1284
+ steps and use `@conda` to specify step-specific additions.
1308
1285
 
1309
- Use `@pypi_base` to set common packages required by all
1310
- steps and use `@pypi` to specify step-specific overrides.
1311
1286
 
1312
1287
  Parameters
1313
1288
  ----------
1314
- packages : Dict[str, str], default: {}
1289
+ packages : Dict[str, str], default {}
1315
1290
  Packages to use for this flow. The key is the name of the package
1316
1291
  and the value is the version to use.
1317
- python : str, optional, default: None
1292
+ libraries : Dict[str, str], default {}
1293
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1294
+ python : str, optional, default None
1318
1295
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1319
1296
  that the version used will correspond to the version of the Python interpreter used to start the run.
1297
+ disabled : bool, default False
1298
+ If set to True, disables Conda.
1320
1299
  """
1321
1300
  ...
1322
1301
 
1323
- @typing.overload
1324
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1325
- ...
1326
-
1327
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1302
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1328
1303
  """
1329
- Specifies the PyPI packages for all steps of the flow.
1304
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1305
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1330
1306
 
1331
- Use `@pypi_base` to set common packages required by all
1332
- steps and use `@pypi` to specify step-specific overrides.
1333
1307
 
1334
1308
  Parameters
1335
1309
  ----------
1336
- packages : Dict[str, str], default: {}
1337
- Packages to use for this flow. The key is the name of the package
1338
- and the value is the version to use.
1339
- python : str, optional, default: None
1340
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1341
- that the version used will correspond to the version of the Python interpreter used to start the run.
1310
+ timeout : int
1311
+ Time, in seconds before the task times out and fails. (Default: 3600)
1312
+ poke_interval : int
1313
+ Time in seconds that the job should wait in between each try. (Default: 60)
1314
+ mode : str
1315
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1316
+ exponential_backoff : bool
1317
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1318
+ pool : str
1319
+ the slot pool this task should run in,
1320
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1321
+ soft_fail : bool
1322
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1323
+ name : str
1324
+ Name of the sensor on Airflow
1325
+ description : str
1326
+ Description of sensor in the Airflow UI
1327
+ external_dag_id : str
1328
+ The dag_id that contains the task you want to wait for.
1329
+ external_task_ids : List[str]
1330
+ The list of task_ids that you want to wait for.
1331
+ If None (default value) the sensor waits for the DAG. (Default: None)
1332
+ allowed_states : List[str]
1333
+ Iterable of allowed states, (Default: ['success'])
1334
+ failed_states : List[str]
1335
+ Iterable of failed or dis-allowed states. (Default: None)
1336
+ execution_delta : datetime.timedelta
1337
+ time difference with the previous execution to look at,
1338
+ the default is the same logical date as the current task or DAG. (Default: None)
1339
+ check_existence: bool
1340
+ Set to True to check if the external task exists or check if
1341
+ the DAG to wait for exists. (Default: True)
1342
1342
  """
1343
1343
  ...
1344
1344