metaflow-stubs 2.19.2__py2.py3-none-any.whl → 2.19.3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (168) hide show
  1. metaflow-stubs/__init__.pyi +629 -629
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +4 -4
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +25 -25
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  24. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +3 -3
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +7 -7
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  29. metaflow-stubs/parameters.pyi +3 -3
  30. metaflow-stubs/plugins/__init__.pyi +14 -14
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  47. metaflow-stubs/plugins/aws/__init__.pyi +4 -4
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  85. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  86. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  87. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  88. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  89. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  94. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  95. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  96. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  97. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  102. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  105. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  108. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  116. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  117. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/parsers.pyi +2 -2
  119. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  121. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  123. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  126. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  127. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  128. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  130. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  131. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  133. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  134. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  135. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  136. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  137. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  138. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  139. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  140. metaflow-stubs/pylint_wrapper.pyi +2 -2
  141. metaflow-stubs/runner/__init__.pyi +2 -2
  142. metaflow-stubs/runner/deployer.pyi +4 -4
  143. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  144. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  145. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  146. metaflow-stubs/runner/nbrun.pyi +2 -2
  147. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  148. metaflow-stubs/runner/utils.pyi +3 -3
  149. metaflow-stubs/system/__init__.pyi +2 -2
  150. metaflow-stubs/system/system_logger.pyi +2 -2
  151. metaflow-stubs/system/system_monitor.pyi +2 -2
  152. metaflow-stubs/tagging_util.pyi +2 -2
  153. metaflow-stubs/tuple_util.pyi +2 -2
  154. metaflow-stubs/user_configs/__init__.pyi +2 -2
  155. metaflow-stubs/user_configs/config_options.pyi +2 -2
  156. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  157. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  158. metaflow-stubs/user_decorators/common.pyi +2 -2
  159. metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
  160. metaflow-stubs/user_decorators/mutable_step.pyi +3 -3
  161. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  162. metaflow-stubs/user_decorators/user_step_decorator.pyi +4 -4
  163. metaflow-stubs/version.pyi +2 -2
  164. {metaflow_stubs-2.19.2.dist-info → metaflow_stubs-2.19.3.dist-info}/METADATA +2 -2
  165. metaflow_stubs-2.19.3.dist-info/RECORD +168 -0
  166. metaflow_stubs-2.19.2.dist-info/RECORD +0 -168
  167. {metaflow_stubs-2.19.2.dist-info → metaflow_stubs-2.19.3.dist-info}/WHEEL +0 -0
  168. {metaflow_stubs-2.19.2.dist-info → metaflow_stubs-2.19.3.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.19.2 #
4
- # Generated on 2025-10-28T11:13:58.765115 #
3
+ # MF version: 2.19.3 #
4
+ # Generated on 2025-10-28T12:26:25.237849 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -47,10 +47,10 @@ from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
51
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
50
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
51
  from .plugins.parsers import yaml_parser as yaml_parser
52
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
54
54
  from . import cards as cards
55
55
  from . import client as client
56
56
  from .client.core import namespace as namespace
@@ -155,279 +155,325 @@ def step(f: typing.Callable[[~FlowSpecDerived], NoneType] | typing.Callable[[~Fl
155
155
  ...
156
156
 
157
157
  @typing.overload
158
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
158
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
159
159
  """
160
- Specifies environment variables to be set prior to the execution of a step.
160
+ Specifies the Conda environment for the step.
161
+
162
+ Information in this decorator will augment any
163
+ attributes set in the `@conda_base` flow-level decorator. Hence,
164
+ you can use `@conda_base` to set packages required by all
165
+ steps and use `@conda` to specify step-specific overrides.
161
166
 
162
167
 
163
168
  Parameters
164
169
  ----------
165
- vars : Dict[str, str], default {}
166
- Dictionary of environment variables to set.
170
+ packages : Dict[str, str], default {}
171
+ Packages to use for this step. The key is the name of the package
172
+ and the value is the version to use.
173
+ libraries : Dict[str, str], default {}
174
+ Supported for backward compatibility. When used with packages, packages will take precedence.
175
+ python : str, optional, default None
176
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
177
+ that the version used will correspond to the version of the Python interpreter used to start the run.
178
+ disabled : bool, default False
179
+ If set to True, disables @conda.
167
180
  """
168
181
  ...
169
182
 
170
183
  @typing.overload
171
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
184
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
172
185
  ...
173
186
 
174
187
  @typing.overload
175
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
188
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
176
189
  ...
177
190
 
178
- def environment(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, vars: typing.Dict[str, str] = {}):
191
+ def conda(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
179
192
  """
180
- Specifies environment variables to be set prior to the execution of a step.
193
+ Specifies the Conda environment for the step.
194
+
195
+ Information in this decorator will augment any
196
+ attributes set in the `@conda_base` flow-level decorator. Hence,
197
+ you can use `@conda_base` to set packages required by all
198
+ steps and use `@conda` to specify step-specific overrides.
181
199
 
182
200
 
183
201
  Parameters
184
202
  ----------
185
- vars : Dict[str, str], default {}
186
- Dictionary of environment variables to set.
203
+ packages : Dict[str, str], default {}
204
+ Packages to use for this step. The key is the name of the package
205
+ and the value is the version to use.
206
+ libraries : Dict[str, str], default {}
207
+ Supported for backward compatibility. When used with packages, packages will take precedence.
208
+ python : str, optional, default None
209
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
210
+ that the version used will correspond to the version of the Python interpreter used to start the run.
211
+ disabled : bool, default False
212
+ If set to True, disables @conda.
187
213
  """
188
214
  ...
189
215
 
190
216
  @typing.overload
191
- def resources(*, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
217
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
192
218
  """
193
- Specifies the resources needed when executing this step.
194
-
195
- Use `@resources` to specify the resource requirements
196
- independently of the specific compute layer (`@batch`, `@kubernetes`).
197
-
198
- You can choose the compute layer on the command line by executing e.g.
199
- ```
200
- python myflow.py run --with batch
201
- ```
202
- or
203
- ```
204
- python myflow.py run --with kubernetes
205
- ```
206
- which executes the flow on the desired system using the
207
- requirements specified in `@resources`.
219
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
208
220
 
209
221
 
210
222
  Parameters
211
223
  ----------
212
224
  cpu : int, default 1
213
- Number of CPUs required for this step.
214
- gpu : int, optional, default None
215
- Number of GPUs required for this step.
216
- disk : int, optional, default None
217
- Disk size (in MB) required for this step. Only applies on Kubernetes.
225
+ Number of CPUs required for this step. If `@resources` is
226
+ also present, the maximum value from all decorators is used.
227
+ gpu : int, default 0
228
+ Number of GPUs required for this step. If `@resources` is
229
+ also present, the maximum value from all decorators is used.
218
230
  memory : int, default 4096
219
- Memory size (in MB) required for this step.
231
+ Memory size (in MB) required for this step. If
232
+ `@resources` is also present, the maximum value from all decorators is
233
+ used.
234
+ image : str, optional, default None
235
+ Docker image to use when launching on AWS Batch. If not specified, and
236
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
237
+ not, a default Docker image mapping to the current version of Python is used.
238
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
239
+ AWS Batch Job Queue to submit the job to.
240
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
241
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
242
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
243
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
244
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
220
245
  shared_memory : int, optional, default None
221
246
  The value for the size (in MiB) of the /dev/shm volume for this step.
222
247
  This parameter maps to the `--shm-size` option in Docker.
248
+ max_swap : int, optional, default None
249
+ The total amount of swap memory (in MiB) a container can use for this
250
+ step. This parameter is translated to the `--memory-swap` option in
251
+ Docker where the value is the sum of the container memory plus the
252
+ `max_swap` value.
253
+ swappiness : int, optional, default None
254
+ This allows you to tune memory swappiness behavior for this step.
255
+ A swappiness value of 0 causes swapping not to happen unless absolutely
256
+ necessary. A swappiness value of 100 causes pages to be swapped very
257
+ aggressively. Accepted values are whole numbers between 0 and 100.
258
+ aws_batch_tags: Dict[str, str], optional, default None
259
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
260
+ Set as string key-value pairs.
261
+ use_tmpfs : bool, default False
262
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
263
+ not available on Fargate compute environments
264
+ tmpfs_tempdir : bool, default True
265
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
266
+ tmpfs_size : int, optional, default None
267
+ The value for the size (in MiB) of the tmpfs mount for this step.
268
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
269
+ memory allocated for this step.
270
+ tmpfs_path : str, optional, default None
271
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
272
+ inferentia : int, default 0
273
+ Number of Inferentia chips required for this step.
274
+ trainium : int, default None
275
+ Alias for inferentia. Use only one of the two.
276
+ efa : int, default 0
277
+ Number of elastic fabric adapter network devices to attach to container
278
+ ephemeral_storage : int, default None
279
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
280
+ This is only relevant for Fargate compute environments
281
+ log_driver: str, optional, default None
282
+ The log driver to use for the Amazon ECS container.
283
+ log_options: List[str], optional, default None
284
+ List of strings containing options for the chosen log driver. The configurable values
285
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
286
+ Example: [`awslogs-group:aws/batch/job`]
223
287
  """
224
288
  ...
225
289
 
226
290
  @typing.overload
227
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
291
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
228
292
  ...
229
293
 
230
294
  @typing.overload
231
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
295
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
232
296
  ...
233
297
 
234
- def resources(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None):
298
+ def batch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None):
235
299
  """
236
- Specifies the resources needed when executing this step.
237
-
238
- Use `@resources` to specify the resource requirements
239
- independently of the specific compute layer (`@batch`, `@kubernetes`).
240
-
241
- You can choose the compute layer on the command line by executing e.g.
242
- ```
243
- python myflow.py run --with batch
244
- ```
245
- or
246
- ```
247
- python myflow.py run --with kubernetes
248
- ```
249
- which executes the flow on the desired system using the
250
- requirements specified in `@resources`.
300
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
251
301
 
252
302
 
253
303
  Parameters
254
304
  ----------
255
305
  cpu : int, default 1
256
- Number of CPUs required for this step.
257
- gpu : int, optional, default None
258
- Number of GPUs required for this step.
259
- disk : int, optional, default None
260
- Disk size (in MB) required for this step. Only applies on Kubernetes.
306
+ Number of CPUs required for this step. If `@resources` is
307
+ also present, the maximum value from all decorators is used.
308
+ gpu : int, default 0
309
+ Number of GPUs required for this step. If `@resources` is
310
+ also present, the maximum value from all decorators is used.
261
311
  memory : int, default 4096
262
- Memory size (in MB) required for this step.
312
+ Memory size (in MB) required for this step. If
313
+ `@resources` is also present, the maximum value from all decorators is
314
+ used.
315
+ image : str, optional, default None
316
+ Docker image to use when launching on AWS Batch. If not specified, and
317
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
318
+ not, a default Docker image mapping to the current version of Python is used.
319
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
320
+ AWS Batch Job Queue to submit the job to.
321
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
322
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
323
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
324
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
325
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
263
326
  shared_memory : int, optional, default None
264
327
  The value for the size (in MiB) of the /dev/shm volume for this step.
265
328
  This parameter maps to the `--shm-size` option in Docker.
329
+ max_swap : int, optional, default None
330
+ The total amount of swap memory (in MiB) a container can use for this
331
+ step. This parameter is translated to the `--memory-swap` option in
332
+ Docker where the value is the sum of the container memory plus the
333
+ `max_swap` value.
334
+ swappiness : int, optional, default None
335
+ This allows you to tune memory swappiness behavior for this step.
336
+ A swappiness value of 0 causes swapping not to happen unless absolutely
337
+ necessary. A swappiness value of 100 causes pages to be swapped very
338
+ aggressively. Accepted values are whole numbers between 0 and 100.
339
+ aws_batch_tags: Dict[str, str], optional, default None
340
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
341
+ Set as string key-value pairs.
342
+ use_tmpfs : bool, default False
343
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
344
+ not available on Fargate compute environments
345
+ tmpfs_tempdir : bool, default True
346
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
347
+ tmpfs_size : int, optional, default None
348
+ The value for the size (in MiB) of the tmpfs mount for this step.
349
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
350
+ memory allocated for this step.
351
+ tmpfs_path : str, optional, default None
352
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
353
+ inferentia : int, default 0
354
+ Number of Inferentia chips required for this step.
355
+ trainium : int, default None
356
+ Alias for inferentia. Use only one of the two.
357
+ efa : int, default 0
358
+ Number of elastic fabric adapter network devices to attach to container
359
+ ephemeral_storage : int, default None
360
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
361
+ This is only relevant for Fargate compute environments
362
+ log_driver: str, optional, default None
363
+ The log driver to use for the Amazon ECS container.
364
+ log_options: List[str], optional, default None
365
+ List of strings containing options for the chosen log driver. The configurable values
366
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
367
+ Example: [`awslogs-group:aws/batch/job`]
266
368
  """
267
369
  ...
268
370
 
269
371
  @typing.overload
270
- def catch(*, var: str | None = None, print_exception: bool = True) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
372
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
271
373
  """
272
- Specifies that the step will success under all circumstances.
273
-
274
- The decorator will create an optional artifact, specified by `var`, which
275
- contains the exception raised. You can use it to detect the presence
276
- of errors, indicating that all happy-path artifacts produced by the step
277
- are missing.
278
-
279
-
280
- Parameters
281
- ----------
282
- var : str, optional, default None
283
- Name of the artifact in which to store the caught exception.
284
- If not specified, the exception is not stored.
285
- print_exception : bool, default True
286
- Determines whether or not the exception is printed to
287
- stdout when caught.
374
+ Decorator prototype for all step decorators. This function gets specialized
375
+ and imported for all decorators types by _import_plugin_decorators().
288
376
  """
289
377
  ...
290
378
 
291
379
  @typing.overload
292
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
380
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
293
381
  ...
294
382
 
295
- @typing.overload
296
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
383
+ def parallel(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None):
384
+ """
385
+ Decorator prototype for all step decorators. This function gets specialized
386
+ and imported for all decorators types by _import_plugin_decorators().
387
+ """
297
388
  ...
298
389
 
299
- def catch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, var: str | None = None, print_exception: bool = True):
390
+ @typing.overload
391
+ def secrets(*, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
300
392
  """
301
- Specifies that the step will success under all circumstances.
302
-
303
- The decorator will create an optional artifact, specified by `var`, which
304
- contains the exception raised. You can use it to detect the presence
305
- of errors, indicating that all happy-path artifacts produced by the step
306
- are missing.
307
-
308
-
309
- Parameters
310
- ----------
311
- var : str, optional, default None
312
- Name of the artifact in which to store the caught exception.
313
- If not specified, the exception is not stored.
314
- print_exception : bool, default True
315
- Determines whether or not the exception is printed to
316
- stdout when caught.
317
- """
318
- ...
319
-
320
- @typing.overload
321
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
322
- """
323
- Specifies the number of times the task corresponding
324
- to a step needs to be retried.
325
-
326
- This decorator is useful for handling transient errors, such as networking issues.
327
- If your task contains operations that can't be retried safely, e.g. database updates,
328
- it is advisable to annotate it with `@retry(times=0)`.
329
-
330
- This can be used in conjunction with the `@catch` decorator. The `@catch`
331
- decorator will execute a no-op task after all retries have been exhausted,
332
- ensuring that the flow execution can continue.
393
+ Specifies secrets to be retrieved and injected as environment variables prior to
394
+ the execution of a step.
333
395
 
334
396
 
335
397
  Parameters
336
398
  ----------
337
- times : int, default 3
338
- Number of times to retry this task.
339
- minutes_between_retries : int, default 2
340
- Number of minutes between retries.
399
+ sources : List[Union[str, Dict[str, Any]]], default: []
400
+ List of secret specs, defining how the secrets are to be retrieved
401
+ role : str, optional, default: None
402
+ Role to use for fetching secrets
341
403
  """
342
404
  ...
343
405
 
344
406
  @typing.overload
345
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
407
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
346
408
  ...
347
409
 
348
410
  @typing.overload
349
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
411
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
350
412
  ...
351
413
 
352
- def retry(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, times: int = 3, minutes_between_retries: int = 2):
414
+ def secrets(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None):
353
415
  """
354
- Specifies the number of times the task corresponding
355
- to a step needs to be retried.
356
-
357
- This decorator is useful for handling transient errors, such as networking issues.
358
- If your task contains operations that can't be retried safely, e.g. database updates,
359
- it is advisable to annotate it with `@retry(times=0)`.
360
-
361
- This can be used in conjunction with the `@catch` decorator. The `@catch`
362
- decorator will execute a no-op task after all retries have been exhausted,
363
- ensuring that the flow execution can continue.
416
+ Specifies secrets to be retrieved and injected as environment variables prior to
417
+ the execution of a step.
364
418
 
365
419
 
366
420
  Parameters
367
421
  ----------
368
- times : int, default 3
369
- Number of times to retry this task.
370
- minutes_between_retries : int, default 2
371
- Number of minutes between retries.
422
+ sources : List[Union[str, Dict[str, Any]]], default: []
423
+ List of secret specs, defining how the secrets are to be retrieved
424
+ role : str, optional, default: None
425
+ Role to use for fetching secrets
372
426
  """
373
427
  ...
374
428
 
375
429
  @typing.overload
376
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
430
+ def catch(*, var: str | None = None, print_exception: bool = True) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
377
431
  """
378
- Specifies a timeout for your step.
379
-
380
- This decorator is useful if this step may hang indefinitely.
381
-
382
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
383
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
384
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
432
+ Specifies that the step will success under all circumstances.
385
433
 
386
- Note that all the values specified in parameters are added together so if you specify
387
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
434
+ The decorator will create an optional artifact, specified by `var`, which
435
+ contains the exception raised. You can use it to detect the presence
436
+ of errors, indicating that all happy-path artifacts produced by the step
437
+ are missing.
388
438
 
389
439
 
390
440
  Parameters
391
441
  ----------
392
- seconds : int, default 0
393
- Number of seconds to wait prior to timing out.
394
- minutes : int, default 0
395
- Number of minutes to wait prior to timing out.
396
- hours : int, default 0
397
- Number of hours to wait prior to timing out.
442
+ var : str, optional, default None
443
+ Name of the artifact in which to store the caught exception.
444
+ If not specified, the exception is not stored.
445
+ print_exception : bool, default True
446
+ Determines whether or not the exception is printed to
447
+ stdout when caught.
398
448
  """
399
449
  ...
400
450
 
401
451
  @typing.overload
402
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
452
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
403
453
  ...
404
454
 
405
455
  @typing.overload
406
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
456
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
407
457
  ...
408
458
 
409
- def timeout(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
459
+ def catch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, var: str | None = None, print_exception: bool = True):
410
460
  """
411
- Specifies a timeout for your step.
412
-
413
- This decorator is useful if this step may hang indefinitely.
414
-
415
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
416
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
417
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
461
+ Specifies that the step will success under all circumstances.
418
462
 
419
- Note that all the values specified in parameters are added together so if you specify
420
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
463
+ The decorator will create an optional artifact, specified by `var`, which
464
+ contains the exception raised. You can use it to detect the presence
465
+ of errors, indicating that all happy-path artifacts produced by the step
466
+ are missing.
421
467
 
422
468
 
423
469
  Parameters
424
470
  ----------
425
- seconds : int, default 0
426
- Number of seconds to wait prior to timing out.
427
- minutes : int, default 0
428
- Number of minutes to wait prior to timing out.
429
- hours : int, default 0
430
- Number of hours to wait prior to timing out.
471
+ var : str, optional, default None
472
+ Name of the artifact in which to store the caught exception.
473
+ If not specified, the exception is not stored.
474
+ print_exception : bool, default True
475
+ Determines whether or not the exception is printed to
476
+ stdout when caught.
431
477
  """
432
478
  ...
433
479
 
@@ -521,306 +567,169 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: st
521
567
  ...
522
568
 
523
569
  @typing.overload
524
- def pypi(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
570
+ def resources(*, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
525
571
  """
526
- Specifies the PyPI packages for the step.
572
+ Specifies the resources needed when executing this step.
527
573
 
528
- Information in this decorator will augment any
529
- attributes set in the `@pyi_base` flow-level decorator. Hence,
530
- you can use `@pypi_base` to set packages required by all
531
- steps and use `@pypi` to specify step-specific overrides.
574
+ Use `@resources` to specify the resource requirements
575
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
576
+
577
+ You can choose the compute layer on the command line by executing e.g.
578
+ ```
579
+ python myflow.py run --with batch
580
+ ```
581
+ or
582
+ ```
583
+ python myflow.py run --with kubernetes
584
+ ```
585
+ which executes the flow on the desired system using the
586
+ requirements specified in `@resources`.
532
587
 
533
588
 
534
589
  Parameters
535
590
  ----------
536
- packages : Dict[str, str], default: {}
537
- Packages to use for this step. The key is the name of the package
538
- and the value is the version to use.
539
- python : str, optional, default: None
540
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
541
- that the version used will correspond to the version of the Python interpreter used to start the run.
591
+ cpu : int, default 1
592
+ Number of CPUs required for this step.
593
+ gpu : int, optional, default None
594
+ Number of GPUs required for this step.
595
+ disk : int, optional, default None
596
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
597
+ memory : int, default 4096
598
+ Memory size (in MB) required for this step.
599
+ shared_memory : int, optional, default None
600
+ The value for the size (in MiB) of the /dev/shm volume for this step.
601
+ This parameter maps to the `--shm-size` option in Docker.
542
602
  """
543
603
  ...
544
604
 
545
605
  @typing.overload
546
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
606
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
547
607
  ...
548
608
 
549
609
  @typing.overload
550
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
610
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
551
611
  ...
552
612
 
553
- def pypi(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
613
+ def resources(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None):
554
614
  """
555
- Specifies the PyPI packages for the step.
615
+ Specifies the resources needed when executing this step.
556
616
 
557
- Information in this decorator will augment any
558
- attributes set in the `@pyi_base` flow-level decorator. Hence,
559
- you can use `@pypi_base` to set packages required by all
560
- steps and use `@pypi` to specify step-specific overrides.
617
+ Use `@resources` to specify the resource requirements
618
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
619
+
620
+ You can choose the compute layer on the command line by executing e.g.
621
+ ```
622
+ python myflow.py run --with batch
623
+ ```
624
+ or
625
+ ```
626
+ python myflow.py run --with kubernetes
627
+ ```
628
+ which executes the flow on the desired system using the
629
+ requirements specified in `@resources`.
561
630
 
562
631
 
563
632
  Parameters
564
633
  ----------
565
- packages : Dict[str, str], default: {}
566
- Packages to use for this step. The key is the name of the package
567
- and the value is the version to use.
568
- python : str, optional, default: None
569
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
570
- that the version used will correspond to the version of the Python interpreter used to start the run.
634
+ cpu : int, default 1
635
+ Number of CPUs required for this step.
636
+ gpu : int, optional, default None
637
+ Number of GPUs required for this step.
638
+ disk : int, optional, default None
639
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
640
+ memory : int, default 4096
641
+ Memory size (in MB) required for this step.
642
+ shared_memory : int, optional, default None
643
+ The value for the size (in MiB) of the /dev/shm volume for this step.
644
+ This parameter maps to the `--shm-size` option in Docker.
571
645
  """
572
646
  ...
573
647
 
574
648
  @typing.overload
575
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
649
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
576
650
  """
577
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
651
+ Specifies the number of times the task corresponding
652
+ to a step needs to be retried.
653
+
654
+ This decorator is useful for handling transient errors, such as networking issues.
655
+ If your task contains operations that can't be retried safely, e.g. database updates,
656
+ it is advisable to annotate it with `@retry(times=0)`.
657
+
658
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
659
+ decorator will execute a no-op task after all retries have been exhausted,
660
+ ensuring that the flow execution can continue.
578
661
 
579
662
 
580
663
  Parameters
581
664
  ----------
582
- cpu : int, default 1
583
- Number of CPUs required for this step. If `@resources` is
584
- also present, the maximum value from all decorators is used.
585
- gpu : int, default 0
586
- Number of GPUs required for this step. If `@resources` is
587
- also present, the maximum value from all decorators is used.
588
- memory : int, default 4096
589
- Memory size (in MB) required for this step. If
590
- `@resources` is also present, the maximum value from all decorators is
591
- used.
592
- image : str, optional, default None
593
- Docker image to use when launching on AWS Batch. If not specified, and
594
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
595
- not, a default Docker image mapping to the current version of Python is used.
596
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
597
- AWS Batch Job Queue to submit the job to.
598
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
599
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
600
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
601
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
602
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
603
- shared_memory : int, optional, default None
604
- The value for the size (in MiB) of the /dev/shm volume for this step.
605
- This parameter maps to the `--shm-size` option in Docker.
606
- max_swap : int, optional, default None
607
- The total amount of swap memory (in MiB) a container can use for this
608
- step. This parameter is translated to the `--memory-swap` option in
609
- Docker where the value is the sum of the container memory plus the
610
- `max_swap` value.
611
- swappiness : int, optional, default None
612
- This allows you to tune memory swappiness behavior for this step.
613
- A swappiness value of 0 causes swapping not to happen unless absolutely
614
- necessary. A swappiness value of 100 causes pages to be swapped very
615
- aggressively. Accepted values are whole numbers between 0 and 100.
616
- aws_batch_tags: Dict[str, str], optional, default None
617
- Sets arbitrary AWS tags on the AWS Batch compute environment.
618
- Set as string key-value pairs.
619
- use_tmpfs : bool, default False
620
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
621
- not available on Fargate compute environments
622
- tmpfs_tempdir : bool, default True
623
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
624
- tmpfs_size : int, optional, default None
625
- The value for the size (in MiB) of the tmpfs mount for this step.
626
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
627
- memory allocated for this step.
628
- tmpfs_path : str, optional, default None
629
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
630
- inferentia : int, default 0
631
- Number of Inferentia chips required for this step.
632
- trainium : int, default None
633
- Alias for inferentia. Use only one of the two.
634
- efa : int, default 0
635
- Number of elastic fabric adapter network devices to attach to container
636
- ephemeral_storage : int, default None
637
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
638
- This is only relevant for Fargate compute environments
639
- log_driver: str, optional, default None
640
- The log driver to use for the Amazon ECS container.
641
- log_options: List[str], optional, default None
642
- List of strings containing options for the chosen log driver. The configurable values
643
- depend on the `log driver` chosen. Validation of these options is not supported yet.
644
- Example: [`awslogs-group:aws/batch/job`]
665
+ times : int, default 3
666
+ Number of times to retry this task.
667
+ minutes_between_retries : int, default 2
668
+ Number of minutes between retries.
645
669
  """
646
670
  ...
647
671
 
648
672
  @typing.overload
649
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
673
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
650
674
  ...
651
675
 
652
676
  @typing.overload
653
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
654
- ...
655
-
656
- def batch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None):
657
- """
658
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
659
-
660
-
661
- Parameters
662
- ----------
663
- cpu : int, default 1
664
- Number of CPUs required for this step. If `@resources` is
665
- also present, the maximum value from all decorators is used.
666
- gpu : int, default 0
667
- Number of GPUs required for this step. If `@resources` is
668
- also present, the maximum value from all decorators is used.
669
- memory : int, default 4096
670
- Memory size (in MB) required for this step. If
671
- `@resources` is also present, the maximum value from all decorators is
672
- used.
673
- image : str, optional, default None
674
- Docker image to use when launching on AWS Batch. If not specified, and
675
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
676
- not, a default Docker image mapping to the current version of Python is used.
677
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
678
- AWS Batch Job Queue to submit the job to.
679
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
680
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
681
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
682
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
683
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
684
- shared_memory : int, optional, default None
685
- The value for the size (in MiB) of the /dev/shm volume for this step.
686
- This parameter maps to the `--shm-size` option in Docker.
687
- max_swap : int, optional, default None
688
- The total amount of swap memory (in MiB) a container can use for this
689
- step. This parameter is translated to the `--memory-swap` option in
690
- Docker where the value is the sum of the container memory plus the
691
- `max_swap` value.
692
- swappiness : int, optional, default None
693
- This allows you to tune memory swappiness behavior for this step.
694
- A swappiness value of 0 causes swapping not to happen unless absolutely
695
- necessary. A swappiness value of 100 causes pages to be swapped very
696
- aggressively. Accepted values are whole numbers between 0 and 100.
697
- aws_batch_tags: Dict[str, str], optional, default None
698
- Sets arbitrary AWS tags on the AWS Batch compute environment.
699
- Set as string key-value pairs.
700
- use_tmpfs : bool, default False
701
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
702
- not available on Fargate compute environments
703
- tmpfs_tempdir : bool, default True
704
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
705
- tmpfs_size : int, optional, default None
706
- The value for the size (in MiB) of the tmpfs mount for this step.
707
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
708
- memory allocated for this step.
709
- tmpfs_path : str, optional, default None
710
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
711
- inferentia : int, default 0
712
- Number of Inferentia chips required for this step.
713
- trainium : int, default None
714
- Alias for inferentia. Use only one of the two.
715
- efa : int, default 0
716
- Number of elastic fabric adapter network devices to attach to container
717
- ephemeral_storage : int, default None
718
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
719
- This is only relevant for Fargate compute environments
720
- log_driver: str, optional, default None
721
- The log driver to use for the Amazon ECS container.
722
- log_options: List[str], optional, default None
723
- List of strings containing options for the chosen log driver. The configurable values
724
- depend on the `log driver` chosen. Validation of these options is not supported yet.
725
- Example: [`awslogs-group:aws/batch/job`]
726
- """
677
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
727
678
  ...
728
679
 
729
- @typing.overload
730
- def secrets(*, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
680
+ def retry(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, times: int = 3, minutes_between_retries: int = 2):
731
681
  """
732
- Specifies secrets to be retrieved and injected as environment variables prior to
733
- the execution of a step.
682
+ Specifies the number of times the task corresponding
683
+ to a step needs to be retried.
734
684
 
685
+ This decorator is useful for handling transient errors, such as networking issues.
686
+ If your task contains operations that can't be retried safely, e.g. database updates,
687
+ it is advisable to annotate it with `@retry(times=0)`.
735
688
 
736
- Parameters
737
- ----------
738
- sources : List[Union[str, Dict[str, Any]]], default: []
739
- List of secret specs, defining how the secrets are to be retrieved
740
- role : str, optional, default: None
741
- Role to use for fetching secrets
742
- """
743
- ...
744
-
745
- @typing.overload
746
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
747
- ...
748
-
749
- @typing.overload
750
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
751
- ...
752
-
753
- def secrets(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None):
754
- """
755
- Specifies secrets to be retrieved and injected as environment variables prior to
756
- the execution of a step.
689
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
690
+ decorator will execute a no-op task after all retries have been exhausted,
691
+ ensuring that the flow execution can continue.
757
692
 
758
693
 
759
694
  Parameters
760
695
  ----------
761
- sources : List[Union[str, Dict[str, Any]]], default: []
762
- List of secret specs, defining how the secrets are to be retrieved
763
- role : str, optional, default: None
764
- Role to use for fetching secrets
696
+ times : int, default 3
697
+ Number of times to retry this task.
698
+ minutes_between_retries : int, default 2
699
+ Number of minutes between retries.
765
700
  """
766
701
  ...
767
702
 
768
703
  @typing.overload
769
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
704
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
770
705
  """
771
- Specifies the Conda environment for the step.
772
-
773
- Information in this decorator will augment any
774
- attributes set in the `@conda_base` flow-level decorator. Hence,
775
- you can use `@conda_base` to set packages required by all
776
- steps and use `@conda` to specify step-specific overrides.
706
+ Specifies environment variables to be set prior to the execution of a step.
777
707
 
778
708
 
779
709
  Parameters
780
710
  ----------
781
- packages : Dict[str, str], default {}
782
- Packages to use for this step. The key is the name of the package
783
- and the value is the version to use.
784
- libraries : Dict[str, str], default {}
785
- Supported for backward compatibility. When used with packages, packages will take precedence.
786
- python : str, optional, default None
787
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
788
- that the version used will correspond to the version of the Python interpreter used to start the run.
789
- disabled : bool, default False
790
- If set to True, disables @conda.
711
+ vars : Dict[str, str], default {}
712
+ Dictionary of environment variables to set.
791
713
  """
792
714
  ...
793
715
 
794
716
  @typing.overload
795
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
717
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
796
718
  ...
797
719
 
798
720
  @typing.overload
799
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
721
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
800
722
  ...
801
723
 
802
- def conda(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
724
+ def environment(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, vars: typing.Dict[str, str] = {}):
803
725
  """
804
- Specifies the Conda environment for the step.
805
-
806
- Information in this decorator will augment any
807
- attributes set in the `@conda_base` flow-level decorator. Hence,
808
- you can use `@conda_base` to set packages required by all
809
- steps and use `@conda` to specify step-specific overrides.
726
+ Specifies environment variables to be set prior to the execution of a step.
810
727
 
811
728
 
812
729
  Parameters
813
730
  ----------
814
- packages : Dict[str, str], default {}
815
- Packages to use for this step. The key is the name of the package
816
- and the value is the version to use.
817
- libraries : Dict[str, str], default {}
818
- Supported for backward compatibility. When used with packages, packages will take precedence.
819
- python : str, optional, default None
820
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
821
- that the version used will correspond to the version of the Python interpreter used to start the run.
822
- disabled : bool, default False
823
- If set to True, disables @conda.
731
+ vars : Dict[str, str], default {}
732
+ Dictionary of environment variables to set.
824
733
  """
825
734
  ...
826
735
 
@@ -874,122 +783,147 @@ def card(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generat
874
783
  ...
875
784
 
876
785
  @typing.overload
877
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
786
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
878
787
  """
879
- Decorator prototype for all step decorators. This function gets specialized
880
- and imported for all decorators types by _import_plugin_decorators().
788
+ Specifies the PyPI packages for the step.
789
+
790
+ Information in this decorator will augment any
791
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
792
+ you can use `@pypi_base` to set packages required by all
793
+ steps and use `@pypi` to specify step-specific overrides.
794
+
795
+
796
+ Parameters
797
+ ----------
798
+ packages : Dict[str, str], default: {}
799
+ Packages to use for this step. The key is the name of the package
800
+ and the value is the version to use.
801
+ python : str, optional, default: None
802
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
803
+ that the version used will correspond to the version of the Python interpreter used to start the run.
881
804
  """
882
805
  ...
883
806
 
884
807
  @typing.overload
885
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
808
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
886
809
  ...
887
810
 
888
- def parallel(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None):
811
+ @typing.overload
812
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
813
+ ...
814
+
815
+ def pypi(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
889
816
  """
890
- Decorator prototype for all step decorators. This function gets specialized
891
- and imported for all decorators types by _import_plugin_decorators().
817
+ Specifies the PyPI packages for the step.
818
+
819
+ Information in this decorator will augment any
820
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
821
+ you can use `@pypi_base` to set packages required by all
822
+ steps and use `@pypi` to specify step-specific overrides.
823
+
824
+
825
+ Parameters
826
+ ----------
827
+ packages : Dict[str, str], default: {}
828
+ Packages to use for this step. The key is the name of the package
829
+ and the value is the version to use.
830
+ python : str, optional, default: None
831
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
832
+ that the version used will correspond to the version of the Python interpreter used to start the run.
892
833
  """
893
834
  ...
894
835
 
895
836
  @typing.overload
896
- def trigger_on_finish(*, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
837
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
897
838
  """
898
- Specifies the flow(s) that this flow depends on.
899
-
900
- ```
901
- @trigger_on_finish(flow='FooFlow')
902
- ```
903
- or
904
- ```
905
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
906
- ```
907
- This decorator respects the @project decorator and triggers the flow
908
- when upstream runs within the same namespace complete successfully
839
+ Specifies a timeout for your step.
909
840
 
910
- Additionally, you can specify project aware upstream flow dependencies
911
- by specifying the fully qualified project_flow_name.
912
- ```
913
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
914
- ```
915
- or
916
- ```
917
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
918
- ```
841
+ This decorator is useful if this step may hang indefinitely.
919
842
 
920
- You can also specify just the project or project branch (other values will be
921
- inferred from the current project or project branch):
922
- ```
923
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
924
- ```
843
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
844
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
845
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
925
846
 
926
- Note that `branch` is typically one of:
927
- - `prod`
928
- - `user.bob`
929
- - `test.my_experiment`
930
- - `prod.staging`
847
+ Note that all the values specified in parameters are added together so if you specify
848
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
931
849
 
932
850
 
933
851
  Parameters
934
852
  ----------
935
- flow : Union[str, Dict[str, str]], optional, default None
936
- Upstream flow dependency for this flow.
937
- flows : List[Union[str, Dict[str, str]]], default []
938
- Upstream flow dependencies for this flow.
939
- options : Dict[str, Any], default {}
940
- Backend-specific configuration for tuning eventing behavior.
853
+ seconds : int, default 0
854
+ Number of seconds to wait prior to timing out.
855
+ minutes : int, default 0
856
+ Number of minutes to wait prior to timing out.
857
+ hours : int, default 0
858
+ Number of hours to wait prior to timing out.
941
859
  """
942
860
  ...
943
861
 
944
862
  @typing.overload
945
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
863
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
946
864
  ...
947
865
 
948
- def trigger_on_finish(f: typing.Type[~FlowSpecDerived] | None = None, *, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}):
866
+ @typing.overload
867
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
868
+ ...
869
+
870
+ def timeout(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
949
871
  """
950
- Specifies the flow(s) that this flow depends on.
872
+ Specifies a timeout for your step.
951
873
 
952
- ```
953
- @trigger_on_finish(flow='FooFlow')
954
- ```
955
- or
956
- ```
957
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
958
- ```
959
- This decorator respects the @project decorator and triggers the flow
960
- when upstream runs within the same namespace complete successfully
874
+ This decorator is useful if this step may hang indefinitely.
961
875
 
962
- Additionally, you can specify project aware upstream flow dependencies
963
- by specifying the fully qualified project_flow_name.
964
- ```
965
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
966
- ```
967
- or
968
- ```
969
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
970
- ```
876
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
877
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
878
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
971
879
 
972
- You can also specify just the project or project branch (other values will be
973
- inferred from the current project or project branch):
974
- ```
975
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
976
- ```
880
+ Note that all the values specified in parameters are added together so if you specify
881
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
977
882
 
978
- Note that `branch` is typically one of:
979
- - `prod`
980
- - `user.bob`
981
- - `test.my_experiment`
982
- - `prod.staging`
883
+
884
+ Parameters
885
+ ----------
886
+ seconds : int, default 0
887
+ Number of seconds to wait prior to timing out.
888
+ minutes : int, default 0
889
+ Number of minutes to wait prior to timing out.
890
+ hours : int, default 0
891
+ Number of hours to wait prior to timing out.
892
+ """
893
+ ...
894
+
895
+ def project(*, name: str, branch: str | None = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
896
+ """
897
+ Specifies what flows belong to the same project.
898
+
899
+ A project-specific namespace is created for all flows that
900
+ use the same `@project(name)`.
983
901
 
984
902
 
985
903
  Parameters
986
904
  ----------
987
- flow : Union[str, Dict[str, str]], optional, default None
988
- Upstream flow dependency for this flow.
989
- flows : List[Union[str, Dict[str, str]]], default []
990
- Upstream flow dependencies for this flow.
991
- options : Dict[str, Any], default {}
992
- Backend-specific configuration for tuning eventing behavior.
905
+ name : str
906
+ Project name. Make sure that the name is unique amongst all
907
+ projects that use the same production scheduler. The name may
908
+ contain only lowercase alphanumeric characters and underscores.
909
+
910
+ branch : Optional[str], default None
911
+ The branch to use. If not specified, the branch is set to
912
+ `user.<username>` unless `production` is set to `True`. This can
913
+ also be set on the command line using `--branch` as a top-level option.
914
+ It is an error to specify `branch` in the decorator and on the command line.
915
+
916
+ production : bool, default False
917
+ Whether or not the branch is the production branch. This can also be set on the
918
+ command line using `--production` as a top-level option. It is an error to specify
919
+ `production` in the decorator and on the command line.
920
+ The project branch name will be:
921
+ - if `branch` is specified:
922
+ - if `production` is True: `prod.<branch>`
923
+ - if `production` is False: `test.<branch>`
924
+ - if `branch` is not specified:
925
+ - if `production` is True: `prod`
926
+ - if `production` is False: `user.<username>`
993
927
  """
994
928
  ...
995
929
 
@@ -1035,130 +969,103 @@ def pypi_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typin
1035
969
  ...
1036
970
 
1037
971
  @typing.overload
1038
- def trigger(*, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
972
+ def trigger_on_finish(*, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1039
973
  """
1040
- Specifies the event(s) that this flow depends on.
974
+ Specifies the flow(s) that this flow depends on.
1041
975
 
1042
976
  ```
1043
- @trigger(event='foo')
977
+ @trigger_on_finish(flow='FooFlow')
1044
978
  ```
1045
979
  or
1046
980
  ```
1047
- @trigger(events=['foo', 'bar'])
981
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1048
982
  ```
983
+ This decorator respects the @project decorator and triggers the flow
984
+ when upstream runs within the same namespace complete successfully
1049
985
 
1050
- Additionally, you can specify the parameter mappings
1051
- to map event payload to Metaflow parameters for the flow.
986
+ Additionally, you can specify project aware upstream flow dependencies
987
+ by specifying the fully qualified project_flow_name.
1052
988
  ```
1053
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
989
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1054
990
  ```
1055
991
  or
1056
992
  ```
1057
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1058
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
993
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1059
994
  ```
1060
995
 
1061
- 'parameters' can also be a list of strings and tuples like so:
1062
- ```
1063
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1064
- ```
1065
- This is equivalent to:
996
+ You can also specify just the project or project branch (other values will be
997
+ inferred from the current project or project branch):
1066
998
  ```
1067
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
999
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1068
1000
  ```
1069
1001
 
1002
+ Note that `branch` is typically one of:
1003
+ - `prod`
1004
+ - `user.bob`
1005
+ - `test.my_experiment`
1006
+ - `prod.staging`
1007
+
1070
1008
 
1071
1009
  Parameters
1072
1010
  ----------
1073
- event : Union[str, Dict[str, Any]], optional, default None
1074
- Event dependency for this flow.
1075
- events : List[Union[str, Dict[str, Any]]], default []
1076
- Events dependency for this flow.
1011
+ flow : Union[str, Dict[str, str]], optional, default None
1012
+ Upstream flow dependency for this flow.
1013
+ flows : List[Union[str, Dict[str, str]]], default []
1014
+ Upstream flow dependencies for this flow.
1077
1015
  options : Dict[str, Any], default {}
1078
1016
  Backend-specific configuration for tuning eventing behavior.
1079
1017
  """
1080
1018
  ...
1081
1019
 
1082
1020
  @typing.overload
1083
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1021
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1084
1022
  ...
1085
1023
 
1086
- def trigger(f: typing.Type[~FlowSpecDerived] | None = None, *, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}):
1024
+ def trigger_on_finish(f: typing.Type[~FlowSpecDerived] | None = None, *, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}):
1087
1025
  """
1088
- Specifies the event(s) that this flow depends on.
1026
+ Specifies the flow(s) that this flow depends on.
1089
1027
 
1090
1028
  ```
1091
- @trigger(event='foo')
1029
+ @trigger_on_finish(flow='FooFlow')
1092
1030
  ```
1093
1031
  or
1094
1032
  ```
1095
- @trigger(events=['foo', 'bar'])
1033
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1096
1034
  ```
1035
+ This decorator respects the @project decorator and triggers the flow
1036
+ when upstream runs within the same namespace complete successfully
1097
1037
 
1098
- Additionally, you can specify the parameter mappings
1099
- to map event payload to Metaflow parameters for the flow.
1038
+ Additionally, you can specify project aware upstream flow dependencies
1039
+ by specifying the fully qualified project_flow_name.
1100
1040
  ```
1101
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1041
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1102
1042
  ```
1103
1043
  or
1104
1044
  ```
1105
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1106
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1045
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1107
1046
  ```
1108
1047
 
1109
- 'parameters' can also be a list of strings and tuples like so:
1110
- ```
1111
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1112
- ```
1113
- This is equivalent to:
1048
+ You can also specify just the project or project branch (other values will be
1049
+ inferred from the current project or project branch):
1114
1050
  ```
1115
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1051
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1116
1052
  ```
1117
1053
 
1054
+ Note that `branch` is typically one of:
1055
+ - `prod`
1056
+ - `user.bob`
1057
+ - `test.my_experiment`
1058
+ - `prod.staging`
1118
1059
 
1119
- Parameters
1120
- ----------
1121
- event : Union[str, Dict[str, Any]], optional, default None
1122
- Event dependency for this flow.
1123
- events : List[Union[str, Dict[str, Any]]], default []
1124
- Events dependency for this flow.
1125
- options : Dict[str, Any], default {}
1126
- Backend-specific configuration for tuning eventing behavior.
1127
- """
1128
- ...
1129
-
1130
- def project(*, name: str, branch: str | None = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1131
- """
1132
- Specifies what flows belong to the same project.
1133
-
1134
- A project-specific namespace is created for all flows that
1135
- use the same `@project(name)`.
1136
-
1137
-
1138
- Parameters
1139
- ----------
1140
- name : str
1141
- Project name. Make sure that the name is unique amongst all
1142
- projects that use the same production scheduler. The name may
1143
- contain only lowercase alphanumeric characters and underscores.
1144
-
1145
- branch : Optional[str], default None
1146
- The branch to use. If not specified, the branch is set to
1147
- `user.<username>` unless `production` is set to `True`. This can
1148
- also be set on the command line using `--branch` as a top-level option.
1149
- It is an error to specify `branch` in the decorator and on the command line.
1150
-
1151
- production : bool, default False
1152
- Whether or not the branch is the production branch. This can also be set on the
1153
- command line using `--production` as a top-level option. It is an error to specify
1154
- `production` in the decorator and on the command line.
1155
- The project branch name will be:
1156
- - if `branch` is specified:
1157
- - if `production` is True: `prod.<branch>`
1158
- - if `production` is False: `test.<branch>`
1159
- - if `branch` is not specified:
1160
- - if `production` is True: `prod`
1161
- - if `production` is False: `user.<username>`
1060
+
1061
+ Parameters
1062
+ ----------
1063
+ flow : Union[str, Dict[str, str]], optional, default None
1064
+ Upstream flow dependency for this flow.
1065
+ flows : List[Union[str, Dict[str, str]]], default []
1066
+ Upstream flow dependencies for this flow.
1067
+ options : Dict[str, Any], default {}
1068
+ Backend-specific configuration for tuning eventing behavior.
1162
1069
  """
1163
1070
  ...
1164
1071
 
@@ -1205,6 +1112,100 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1205
1112
  """
1206
1113
  ...
1207
1114
 
1115
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: str | typing.List[str], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1116
+ """
1117
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1118
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1119
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1120
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1121
+ starts only after all sensors finish.
1122
+
1123
+
1124
+ Parameters
1125
+ ----------
1126
+ timeout : int
1127
+ Time, in seconds before the task times out and fails. (Default: 3600)
1128
+ poke_interval : int
1129
+ Time in seconds that the job should wait in between each try. (Default: 60)
1130
+ mode : str
1131
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1132
+ exponential_backoff : bool
1133
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1134
+ pool : str
1135
+ the slot pool this task should run in,
1136
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1137
+ soft_fail : bool
1138
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1139
+ name : str
1140
+ Name of the sensor on Airflow
1141
+ description : str
1142
+ Description of sensor in the Airflow UI
1143
+ bucket_key : Union[str, List[str]]
1144
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1145
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1146
+ bucket_name : str
1147
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1148
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1149
+ wildcard_match : bool
1150
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1151
+ aws_conn_id : str
1152
+ a reference to the s3 connection on Airflow. (Default: None)
1153
+ verify : bool
1154
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1155
+ """
1156
+ ...
1157
+
1158
+ @typing.overload
1159
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1160
+ """
1161
+ Specifies the Conda environment for all steps of the flow.
1162
+
1163
+ Use `@conda_base` to set common libraries required by all
1164
+ steps and use `@conda` to specify step-specific additions.
1165
+
1166
+
1167
+ Parameters
1168
+ ----------
1169
+ packages : Dict[str, str], default {}
1170
+ Packages to use for this flow. The key is the name of the package
1171
+ and the value is the version to use.
1172
+ libraries : Dict[str, str], default {}
1173
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1174
+ python : str, optional, default None
1175
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1176
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1177
+ disabled : bool, default False
1178
+ If set to True, disables Conda.
1179
+ """
1180
+ ...
1181
+
1182
+ @typing.overload
1183
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1184
+ ...
1185
+
1186
+ def conda_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
1187
+ """
1188
+ Specifies the Conda environment for all steps of the flow.
1189
+
1190
+ Use `@conda_base` to set common libraries required by all
1191
+ steps and use `@conda` to specify step-specific additions.
1192
+
1193
+
1194
+ Parameters
1195
+ ----------
1196
+ packages : Dict[str, str], default {}
1197
+ Packages to use for this flow. The key is the name of the package
1198
+ and the value is the version to use.
1199
+ libraries : Dict[str, str], default {}
1200
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1201
+ python : str, optional, default None
1202
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1203
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1204
+ disabled : bool, default False
1205
+ If set to True, disables Conda.
1206
+ """
1207
+ ...
1208
+
1208
1209
  @typing.overload
1209
1210
  def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: str | None = None, timezone: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1210
1211
  """
@@ -1257,96 +1258,95 @@ def schedule(f: typing.Type[~FlowSpecDerived] | None = None, *, hourly: bool = F
1257
1258
  ...
1258
1259
 
1259
1260
  @typing.overload
1260
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1261
+ def trigger(*, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1261
1262
  """
1262
- Specifies the Conda environment for all steps of the flow.
1263
+ Specifies the event(s) that this flow depends on.
1263
1264
 
1264
- Use `@conda_base` to set common libraries required by all
1265
- steps and use `@conda` to specify step-specific additions.
1265
+ ```
1266
+ @trigger(event='foo')
1267
+ ```
1268
+ or
1269
+ ```
1270
+ @trigger(events=['foo', 'bar'])
1271
+ ```
1272
+
1273
+ Additionally, you can specify the parameter mappings
1274
+ to map event payload to Metaflow parameters for the flow.
1275
+ ```
1276
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1277
+ ```
1278
+ or
1279
+ ```
1280
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1281
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1282
+ ```
1283
+
1284
+ 'parameters' can also be a list of strings and tuples like so:
1285
+ ```
1286
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1287
+ ```
1288
+ This is equivalent to:
1289
+ ```
1290
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1291
+ ```
1266
1292
 
1267
1293
 
1268
1294
  Parameters
1269
1295
  ----------
1270
- packages : Dict[str, str], default {}
1271
- Packages to use for this flow. The key is the name of the package
1272
- and the value is the version to use.
1273
- libraries : Dict[str, str], default {}
1274
- Supported for backward compatibility. When used with packages, packages will take precedence.
1275
- python : str, optional, default None
1276
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1277
- that the version used will correspond to the version of the Python interpreter used to start the run.
1278
- disabled : bool, default False
1279
- If set to True, disables Conda.
1296
+ event : Union[str, Dict[str, Any]], optional, default None
1297
+ Event dependency for this flow.
1298
+ events : List[Union[str, Dict[str, Any]]], default []
1299
+ Events dependency for this flow.
1300
+ options : Dict[str, Any], default {}
1301
+ Backend-specific configuration for tuning eventing behavior.
1280
1302
  """
1281
1303
  ...
1282
1304
 
1283
1305
  @typing.overload
1284
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1306
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1285
1307
  ...
1286
1308
 
1287
- def conda_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
1309
+ def trigger(f: typing.Type[~FlowSpecDerived] | None = None, *, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}):
1288
1310
  """
1289
- Specifies the Conda environment for all steps of the flow.
1311
+ Specifies the event(s) that this flow depends on.
1290
1312
 
1291
- Use `@conda_base` to set common libraries required by all
1292
- steps and use `@conda` to specify step-specific additions.
1313
+ ```
1314
+ @trigger(event='foo')
1315
+ ```
1316
+ or
1317
+ ```
1318
+ @trigger(events=['foo', 'bar'])
1319
+ ```
1293
1320
 
1321
+ Additionally, you can specify the parameter mappings
1322
+ to map event payload to Metaflow parameters for the flow.
1323
+ ```
1324
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1325
+ ```
1326
+ or
1327
+ ```
1328
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1329
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1330
+ ```
1294
1331
 
1295
- Parameters
1296
- ----------
1297
- packages : Dict[str, str], default {}
1298
- Packages to use for this flow. The key is the name of the package
1299
- and the value is the version to use.
1300
- libraries : Dict[str, str], default {}
1301
- Supported for backward compatibility. When used with packages, packages will take precedence.
1302
- python : str, optional, default None
1303
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1304
- that the version used will correspond to the version of the Python interpreter used to start the run.
1305
- disabled : bool, default False
1306
- If set to True, disables Conda.
1307
- """
1308
- ...
1309
-
1310
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: str | typing.List[str], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1311
- """
1312
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1313
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1314
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1315
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1316
- starts only after all sensors finish.
1332
+ 'parameters' can also be a list of strings and tuples like so:
1333
+ ```
1334
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1335
+ ```
1336
+ This is equivalent to:
1337
+ ```
1338
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1339
+ ```
1317
1340
 
1318
1341
 
1319
1342
  Parameters
1320
1343
  ----------
1321
- timeout : int
1322
- Time, in seconds before the task times out and fails. (Default: 3600)
1323
- poke_interval : int
1324
- Time in seconds that the job should wait in between each try. (Default: 60)
1325
- mode : str
1326
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1327
- exponential_backoff : bool
1328
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1329
- pool : str
1330
- the slot pool this task should run in,
1331
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1332
- soft_fail : bool
1333
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1334
- name : str
1335
- Name of the sensor on Airflow
1336
- description : str
1337
- Description of sensor in the Airflow UI
1338
- bucket_key : Union[str, List[str]]
1339
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1340
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1341
- bucket_name : str
1342
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1343
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1344
- wildcard_match : bool
1345
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1346
- aws_conn_id : str
1347
- a reference to the s3 connection on Airflow. (Default: None)
1348
- verify : bool
1349
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1344
+ event : Union[str, Dict[str, Any]], optional, default None
1345
+ Event dependency for this flow.
1346
+ events : List[Union[str, Dict[str, Any]]], default []
1347
+ Events dependency for this flow.
1348
+ options : Dict[str, Any], default {}
1349
+ Backend-specific configuration for tuning eventing behavior.
1350
1350
  """
1351
1351
  ...
1352
1352