metaflow-stubs 2.17.5__py2.py3-none-any.whl → 2.18.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of metaflow-stubs might be problematic. Click here for more details.
- metaflow-stubs/__init__.pyi +567 -567
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +6 -6
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/events.pyi +3 -3
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +6 -6
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/meta_files.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +3 -3
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +17 -17
- metaflow-stubs/metaflow_git.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/packaging_sys/__init__.pyi +5 -5
- metaflow-stubs/packaging_sys/backend.pyi +4 -4
- metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
- metaflow-stubs/packaging_sys/tar_backend.pyi +6 -6
- metaflow-stubs/packaging_sys/utils.pyi +2 -2
- metaflow-stubs/packaging_sys/v1.pyi +4 -4
- metaflow-stubs/parameters.pyi +4 -4
- metaflow-stubs/plugins/__init__.pyi +13 -13
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
- metaflow-stubs/plugins/aws/__init__.pyi +4 -4
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +5 -5
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +2 -2
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +5 -5
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +2 -2
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
- metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
- metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
- metaflow-stubs/plugins/secrets/utils.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/plugins/uv/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +34 -34
- metaflow-stubs/runner/deployer_impl.pyi +3 -3
- metaflow-stubs/runner/metaflow_runner.pyi +4 -4
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +4 -4
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +3 -3
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_options.pyi +4 -4
- metaflow-stubs/user_configs/config_parameters.pyi +8 -6
- metaflow-stubs/user_decorators/__init__.pyi +2 -2
- metaflow-stubs/user_decorators/common.pyi +2 -2
- metaflow-stubs/user_decorators/mutable_flow.pyi +6 -6
- metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
- metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
- metaflow-stubs/user_decorators/user_step_decorator.pyi +6 -6
- metaflow-stubs/version.pyi +2 -2
- {metaflow_stubs-2.17.5.dist-info → metaflow_stubs-2.18.0.dist-info}/METADATA +2 -2
- metaflow_stubs-2.18.0.dist-info/RECORD +166 -0
- metaflow_stubs-2.17.5.dist-info/RECORD +0 -166
- {metaflow_stubs-2.17.5.dist-info → metaflow_stubs-2.18.0.dist-info}/WHEEL +0 -0
- {metaflow_stubs-2.17.5.dist-info → metaflow_stubs-2.18.0.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
######################################################################################################
|
|
2
2
|
# Auto-generated Metaflow stub file #
|
|
3
|
-
# MF version: 2.
|
|
4
|
-
# Generated on 2025-08-
|
|
3
|
+
# MF version: 2.18.0 #
|
|
4
|
+
# Generated on 2025-08-27T01:57:08.613943 #
|
|
5
5
|
######################################################################################################
|
|
6
6
|
|
|
7
7
|
from __future__ import annotations
|
|
8
8
|
|
|
9
9
|
import typing
|
|
10
10
|
if typing.TYPE_CHECKING:
|
|
11
|
-
import typing
|
|
12
11
|
import datetime
|
|
12
|
+
import typing
|
|
13
13
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
|
14
14
|
StepFlag = typing.NewType("StepFlag", bool)
|
|
15
15
|
|
|
@@ -39,16 +39,16 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
|
|
|
39
39
|
from .user_decorators.user_step_decorator import StepMutator as StepMutator
|
|
40
40
|
from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
|
|
41
41
|
from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
|
|
42
|
-
from . import tuple_util as tuple_util
|
|
43
|
-
from . import metaflow_git as metaflow_git
|
|
44
42
|
from . import events as events
|
|
43
|
+
from . import metaflow_git as metaflow_git
|
|
44
|
+
from . import tuple_util as tuple_util
|
|
45
45
|
from . import runner as runner
|
|
46
46
|
from . import plugins as plugins
|
|
47
47
|
from .plugins.datatools.s3.s3 import S3 as S3
|
|
48
48
|
from . import includefile as includefile
|
|
49
49
|
from .includefile import IncludeFile as IncludeFile
|
|
50
|
-
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
|
51
50
|
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
|
51
|
+
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
|
52
52
|
from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
|
|
53
53
|
from . import cards as cards
|
|
54
54
|
from . import client as client
|
|
@@ -152,155 +152,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
|
152
152
|
"""
|
|
153
153
|
...
|
|
154
154
|
|
|
155
|
-
@typing.overload
|
|
156
|
-
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
157
|
-
"""
|
|
158
|
-
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
Parameters
|
|
162
|
-
----------
|
|
163
|
-
cpu : int, default 1
|
|
164
|
-
Number of CPUs required for this step. If `@resources` is
|
|
165
|
-
also present, the maximum value from all decorators is used.
|
|
166
|
-
gpu : int, default 0
|
|
167
|
-
Number of GPUs required for this step. If `@resources` is
|
|
168
|
-
also present, the maximum value from all decorators is used.
|
|
169
|
-
memory : int, default 4096
|
|
170
|
-
Memory size (in MB) required for this step. If
|
|
171
|
-
`@resources` is also present, the maximum value from all decorators is
|
|
172
|
-
used.
|
|
173
|
-
image : str, optional, default None
|
|
174
|
-
Docker image to use when launching on AWS Batch. If not specified, and
|
|
175
|
-
METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
|
|
176
|
-
not, a default Docker image mapping to the current version of Python is used.
|
|
177
|
-
queue : str, default METAFLOW_BATCH_JOB_QUEUE
|
|
178
|
-
AWS Batch Job Queue to submit the job to.
|
|
179
|
-
iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
|
|
180
|
-
AWS IAM role that AWS Batch container uses to access AWS cloud resources.
|
|
181
|
-
execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
|
|
182
|
-
AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
|
|
183
|
-
(https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
|
|
184
|
-
shared_memory : int, optional, default None
|
|
185
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
186
|
-
This parameter maps to the `--shm-size` option in Docker.
|
|
187
|
-
max_swap : int, optional, default None
|
|
188
|
-
The total amount of swap memory (in MiB) a container can use for this
|
|
189
|
-
step. This parameter is translated to the `--memory-swap` option in
|
|
190
|
-
Docker where the value is the sum of the container memory plus the
|
|
191
|
-
`max_swap` value.
|
|
192
|
-
swappiness : int, optional, default None
|
|
193
|
-
This allows you to tune memory swappiness behavior for this step.
|
|
194
|
-
A swappiness value of 0 causes swapping not to happen unless absolutely
|
|
195
|
-
necessary. A swappiness value of 100 causes pages to be swapped very
|
|
196
|
-
aggressively. Accepted values are whole numbers between 0 and 100.
|
|
197
|
-
use_tmpfs : bool, default False
|
|
198
|
-
This enables an explicit tmpfs mount for this step. Note that tmpfs is
|
|
199
|
-
not available on Fargate compute environments
|
|
200
|
-
tmpfs_tempdir : bool, default True
|
|
201
|
-
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
|
202
|
-
tmpfs_size : int, optional, default None
|
|
203
|
-
The value for the size (in MiB) of the tmpfs mount for this step.
|
|
204
|
-
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
|
205
|
-
memory allocated for this step.
|
|
206
|
-
tmpfs_path : str, optional, default None
|
|
207
|
-
Path to tmpfs mount for this step. Defaults to /metaflow_temp.
|
|
208
|
-
inferentia : int, default 0
|
|
209
|
-
Number of Inferentia chips required for this step.
|
|
210
|
-
trainium : int, default None
|
|
211
|
-
Alias for inferentia. Use only one of the two.
|
|
212
|
-
efa : int, default 0
|
|
213
|
-
Number of elastic fabric adapter network devices to attach to container
|
|
214
|
-
ephemeral_storage : int, default None
|
|
215
|
-
The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
|
|
216
|
-
This is only relevant for Fargate compute environments
|
|
217
|
-
log_driver: str, optional, default None
|
|
218
|
-
The log driver to use for the Amazon ECS container.
|
|
219
|
-
log_options: List[str], optional, default None
|
|
220
|
-
List of strings containing options for the chosen log driver. The configurable values
|
|
221
|
-
depend on the `log driver` chosen. Validation of these options is not supported yet.
|
|
222
|
-
Example: [`awslogs-group:aws/batch/job`]
|
|
223
|
-
"""
|
|
224
|
-
...
|
|
225
|
-
|
|
226
|
-
@typing.overload
|
|
227
|
-
def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
228
|
-
...
|
|
229
|
-
|
|
230
|
-
@typing.overload
|
|
231
|
-
def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
232
|
-
...
|
|
233
|
-
|
|
234
|
-
def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
|
|
235
|
-
"""
|
|
236
|
-
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
Parameters
|
|
240
|
-
----------
|
|
241
|
-
cpu : int, default 1
|
|
242
|
-
Number of CPUs required for this step. If `@resources` is
|
|
243
|
-
also present, the maximum value from all decorators is used.
|
|
244
|
-
gpu : int, default 0
|
|
245
|
-
Number of GPUs required for this step. If `@resources` is
|
|
246
|
-
also present, the maximum value from all decorators is used.
|
|
247
|
-
memory : int, default 4096
|
|
248
|
-
Memory size (in MB) required for this step. If
|
|
249
|
-
`@resources` is also present, the maximum value from all decorators is
|
|
250
|
-
used.
|
|
251
|
-
image : str, optional, default None
|
|
252
|
-
Docker image to use when launching on AWS Batch. If not specified, and
|
|
253
|
-
METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
|
|
254
|
-
not, a default Docker image mapping to the current version of Python is used.
|
|
255
|
-
queue : str, default METAFLOW_BATCH_JOB_QUEUE
|
|
256
|
-
AWS Batch Job Queue to submit the job to.
|
|
257
|
-
iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
|
|
258
|
-
AWS IAM role that AWS Batch container uses to access AWS cloud resources.
|
|
259
|
-
execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
|
|
260
|
-
AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
|
|
261
|
-
(https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
|
|
262
|
-
shared_memory : int, optional, default None
|
|
263
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
264
|
-
This parameter maps to the `--shm-size` option in Docker.
|
|
265
|
-
max_swap : int, optional, default None
|
|
266
|
-
The total amount of swap memory (in MiB) a container can use for this
|
|
267
|
-
step. This parameter is translated to the `--memory-swap` option in
|
|
268
|
-
Docker where the value is the sum of the container memory plus the
|
|
269
|
-
`max_swap` value.
|
|
270
|
-
swappiness : int, optional, default None
|
|
271
|
-
This allows you to tune memory swappiness behavior for this step.
|
|
272
|
-
A swappiness value of 0 causes swapping not to happen unless absolutely
|
|
273
|
-
necessary. A swappiness value of 100 causes pages to be swapped very
|
|
274
|
-
aggressively. Accepted values are whole numbers between 0 and 100.
|
|
275
|
-
use_tmpfs : bool, default False
|
|
276
|
-
This enables an explicit tmpfs mount for this step. Note that tmpfs is
|
|
277
|
-
not available on Fargate compute environments
|
|
278
|
-
tmpfs_tempdir : bool, default True
|
|
279
|
-
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
|
280
|
-
tmpfs_size : int, optional, default None
|
|
281
|
-
The value for the size (in MiB) of the tmpfs mount for this step.
|
|
282
|
-
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
|
283
|
-
memory allocated for this step.
|
|
284
|
-
tmpfs_path : str, optional, default None
|
|
285
|
-
Path to tmpfs mount for this step. Defaults to /metaflow_temp.
|
|
286
|
-
inferentia : int, default 0
|
|
287
|
-
Number of Inferentia chips required for this step.
|
|
288
|
-
trainium : int, default None
|
|
289
|
-
Alias for inferentia. Use only one of the two.
|
|
290
|
-
efa : int, default 0
|
|
291
|
-
Number of elastic fabric adapter network devices to attach to container
|
|
292
|
-
ephemeral_storage : int, default None
|
|
293
|
-
The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
|
|
294
|
-
This is only relevant for Fargate compute environments
|
|
295
|
-
log_driver: str, optional, default None
|
|
296
|
-
The log driver to use for the Amazon ECS container.
|
|
297
|
-
log_options: List[str], optional, default None
|
|
298
|
-
List of strings containing options for the chosen log driver. The configurable values
|
|
299
|
-
depend on the `log driver` chosen. Validation of these options is not supported yet.
|
|
300
|
-
Example: [`awslogs-group:aws/batch/job`]
|
|
301
|
-
"""
|
|
302
|
-
...
|
|
303
|
-
|
|
304
155
|
@typing.overload
|
|
305
156
|
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
306
157
|
"""
|
|
@@ -320,85 +171,6 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
|
320
171
|
"""
|
|
321
172
|
...
|
|
322
173
|
|
|
323
|
-
@typing.overload
|
|
324
|
-
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
325
|
-
"""
|
|
326
|
-
Specifies the resources needed when executing this step.
|
|
327
|
-
|
|
328
|
-
Use `@resources` to specify the resource requirements
|
|
329
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
|
330
|
-
|
|
331
|
-
You can choose the compute layer on the command line by executing e.g.
|
|
332
|
-
```
|
|
333
|
-
python myflow.py run --with batch
|
|
334
|
-
```
|
|
335
|
-
or
|
|
336
|
-
```
|
|
337
|
-
python myflow.py run --with kubernetes
|
|
338
|
-
```
|
|
339
|
-
which executes the flow on the desired system using the
|
|
340
|
-
requirements specified in `@resources`.
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
Parameters
|
|
344
|
-
----------
|
|
345
|
-
cpu : int, default 1
|
|
346
|
-
Number of CPUs required for this step.
|
|
347
|
-
gpu : int, optional, default None
|
|
348
|
-
Number of GPUs required for this step.
|
|
349
|
-
disk : int, optional, default None
|
|
350
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
|
351
|
-
memory : int, default 4096
|
|
352
|
-
Memory size (in MB) required for this step.
|
|
353
|
-
shared_memory : int, optional, default None
|
|
354
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
355
|
-
This parameter maps to the `--shm-size` option in Docker.
|
|
356
|
-
"""
|
|
357
|
-
...
|
|
358
|
-
|
|
359
|
-
@typing.overload
|
|
360
|
-
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
361
|
-
...
|
|
362
|
-
|
|
363
|
-
@typing.overload
|
|
364
|
-
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
365
|
-
...
|
|
366
|
-
|
|
367
|
-
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
|
368
|
-
"""
|
|
369
|
-
Specifies the resources needed when executing this step.
|
|
370
|
-
|
|
371
|
-
Use `@resources` to specify the resource requirements
|
|
372
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
|
373
|
-
|
|
374
|
-
You can choose the compute layer on the command line by executing e.g.
|
|
375
|
-
```
|
|
376
|
-
python myflow.py run --with batch
|
|
377
|
-
```
|
|
378
|
-
or
|
|
379
|
-
```
|
|
380
|
-
python myflow.py run --with kubernetes
|
|
381
|
-
```
|
|
382
|
-
which executes the flow on the desired system using the
|
|
383
|
-
requirements specified in `@resources`.
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
Parameters
|
|
387
|
-
----------
|
|
388
|
-
cpu : int, default 1
|
|
389
|
-
Number of CPUs required for this step.
|
|
390
|
-
gpu : int, optional, default None
|
|
391
|
-
Number of GPUs required for this step.
|
|
392
|
-
disk : int, optional, default None
|
|
393
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
|
394
|
-
memory : int, default 4096
|
|
395
|
-
Memory size (in MB) required for this step.
|
|
396
|
-
shared_memory : int, optional, default None
|
|
397
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
398
|
-
This parameter maps to the `--shm-size` option in Docker.
|
|
399
|
-
"""
|
|
400
|
-
...
|
|
401
|
-
|
|
402
174
|
@typing.overload
|
|
403
175
|
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
404
176
|
"""
|
|
@@ -449,40 +221,58 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
|
|
|
449
221
|
...
|
|
450
222
|
|
|
451
223
|
@typing.overload
|
|
452
|
-
def
|
|
224
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
453
225
|
"""
|
|
454
|
-
Specifies
|
|
455
|
-
|
|
456
|
-
This decorator is useful if this step may hang indefinitely.
|
|
457
|
-
|
|
458
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
|
459
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
|
460
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
|
226
|
+
Specifies the PyPI packages for the step.
|
|
461
227
|
|
|
462
|
-
|
|
463
|
-
|
|
228
|
+
Information in this decorator will augment any
|
|
229
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
|
230
|
+
you can use `@pypi_base` to set packages required by all
|
|
231
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
464
232
|
|
|
465
233
|
|
|
466
234
|
Parameters
|
|
467
235
|
----------
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
236
|
+
packages : Dict[str, str], default: {}
|
|
237
|
+
Packages to use for this step. The key is the name of the package
|
|
238
|
+
and the value is the version to use.
|
|
239
|
+
python : str, optional, default: None
|
|
240
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
241
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
474
242
|
"""
|
|
475
243
|
...
|
|
476
244
|
|
|
477
245
|
@typing.overload
|
|
478
|
-
def
|
|
246
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
479
247
|
...
|
|
480
248
|
|
|
481
249
|
@typing.overload
|
|
482
|
-
def
|
|
250
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
483
251
|
...
|
|
484
252
|
|
|
485
|
-
def
|
|
253
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
|
254
|
+
"""
|
|
255
|
+
Specifies the PyPI packages for the step.
|
|
256
|
+
|
|
257
|
+
Information in this decorator will augment any
|
|
258
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
|
259
|
+
you can use `@pypi_base` to set packages required by all
|
|
260
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
Parameters
|
|
264
|
+
----------
|
|
265
|
+
packages : Dict[str, str], default: {}
|
|
266
|
+
Packages to use for this step. The key is the name of the package
|
|
267
|
+
and the value is the version to use.
|
|
268
|
+
python : str, optional, default: None
|
|
269
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
270
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
271
|
+
"""
|
|
272
|
+
...
|
|
273
|
+
|
|
274
|
+
@typing.overload
|
|
275
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
486
276
|
"""
|
|
487
277
|
Specifies a timeout for your step.
|
|
488
278
|
|
|
@@ -508,159 +298,147 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
|
508
298
|
...
|
|
509
299
|
|
|
510
300
|
@typing.overload
|
|
511
|
-
def
|
|
512
|
-
"""
|
|
513
|
-
Specifies that the step will success under all circumstances.
|
|
514
|
-
|
|
515
|
-
The decorator will create an optional artifact, specified by `var`, which
|
|
516
|
-
contains the exception raised. You can use it to detect the presence
|
|
517
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
|
518
|
-
are missing.
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
Parameters
|
|
522
|
-
----------
|
|
523
|
-
var : str, optional, default None
|
|
524
|
-
Name of the artifact in which to store the caught exception.
|
|
525
|
-
If not specified, the exception is not stored.
|
|
526
|
-
print_exception : bool, default True
|
|
527
|
-
Determines whether or not the exception is printed to
|
|
528
|
-
stdout when caught.
|
|
529
|
-
"""
|
|
530
|
-
...
|
|
531
|
-
|
|
532
|
-
@typing.overload
|
|
533
|
-
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
301
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
534
302
|
...
|
|
535
303
|
|
|
536
304
|
@typing.overload
|
|
537
|
-
def
|
|
305
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
538
306
|
...
|
|
539
307
|
|
|
540
|
-
def
|
|
308
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
|
541
309
|
"""
|
|
542
|
-
Specifies
|
|
310
|
+
Specifies a timeout for your step.
|
|
543
311
|
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
312
|
+
This decorator is useful if this step may hang indefinitely.
|
|
313
|
+
|
|
314
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
|
315
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
|
316
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
|
317
|
+
|
|
318
|
+
Note that all the values specified in parameters are added together so if you specify
|
|
319
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
|
548
320
|
|
|
549
321
|
|
|
550
322
|
Parameters
|
|
551
323
|
----------
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
324
|
+
seconds : int, default 0
|
|
325
|
+
Number of seconds to wait prior to timing out.
|
|
326
|
+
minutes : int, default 0
|
|
327
|
+
Number of minutes to wait prior to timing out.
|
|
328
|
+
hours : int, default 0
|
|
329
|
+
Number of hours to wait prior to timing out.
|
|
558
330
|
"""
|
|
559
331
|
...
|
|
560
332
|
|
|
561
333
|
@typing.overload
|
|
562
|
-
def
|
|
334
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
563
335
|
"""
|
|
564
|
-
Specifies
|
|
565
|
-
|
|
566
|
-
Information in this decorator will augment any
|
|
567
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
|
568
|
-
you can use `@pypi_base` to set packages required by all
|
|
569
|
-
steps and use `@pypi` to specify step-specific overrides.
|
|
336
|
+
Specifies environment variables to be set prior to the execution of a step.
|
|
570
337
|
|
|
571
338
|
|
|
572
339
|
Parameters
|
|
573
340
|
----------
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
and the value is the version to use.
|
|
577
|
-
python : str, optional, default: None
|
|
578
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
579
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
341
|
+
vars : Dict[str, str], default {}
|
|
342
|
+
Dictionary of environment variables to set.
|
|
580
343
|
"""
|
|
581
344
|
...
|
|
582
345
|
|
|
583
346
|
@typing.overload
|
|
584
|
-
def
|
|
347
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
585
348
|
...
|
|
586
349
|
|
|
587
350
|
@typing.overload
|
|
588
|
-
def
|
|
351
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
589
352
|
...
|
|
590
353
|
|
|
591
|
-
def
|
|
354
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
|
592
355
|
"""
|
|
593
|
-
Specifies
|
|
594
|
-
|
|
595
|
-
Information in this decorator will augment any
|
|
596
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
|
597
|
-
you can use `@pypi_base` to set packages required by all
|
|
598
|
-
steps and use `@pypi` to specify step-specific overrides.
|
|
356
|
+
Specifies environment variables to be set prior to the execution of a step.
|
|
599
357
|
|
|
600
358
|
|
|
601
359
|
Parameters
|
|
602
360
|
----------
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
and the value is the version to use.
|
|
606
|
-
python : str, optional, default: None
|
|
607
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
608
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
361
|
+
vars : Dict[str, str], default {}
|
|
362
|
+
Dictionary of environment variables to set.
|
|
609
363
|
"""
|
|
610
364
|
...
|
|
611
365
|
|
|
612
366
|
@typing.overload
|
|
613
|
-
def
|
|
367
|
+
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
614
368
|
"""
|
|
615
|
-
Specifies the
|
|
616
|
-
to a step needs to be retried.
|
|
369
|
+
Specifies the resources needed when executing this step.
|
|
617
370
|
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
|
371
|
+
Use `@resources` to specify the resource requirements
|
|
372
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
|
621
373
|
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
374
|
+
You can choose the compute layer on the command line by executing e.g.
|
|
375
|
+
```
|
|
376
|
+
python myflow.py run --with batch
|
|
377
|
+
```
|
|
378
|
+
or
|
|
379
|
+
```
|
|
380
|
+
python myflow.py run --with kubernetes
|
|
381
|
+
```
|
|
382
|
+
which executes the flow on the desired system using the
|
|
383
|
+
requirements specified in `@resources`.
|
|
625
384
|
|
|
626
385
|
|
|
627
386
|
Parameters
|
|
628
387
|
----------
|
|
629
|
-
|
|
630
|
-
Number of
|
|
631
|
-
|
|
632
|
-
Number of
|
|
388
|
+
cpu : int, default 1
|
|
389
|
+
Number of CPUs required for this step.
|
|
390
|
+
gpu : int, optional, default None
|
|
391
|
+
Number of GPUs required for this step.
|
|
392
|
+
disk : int, optional, default None
|
|
393
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
|
394
|
+
memory : int, default 4096
|
|
395
|
+
Memory size (in MB) required for this step.
|
|
396
|
+
shared_memory : int, optional, default None
|
|
397
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
398
|
+
This parameter maps to the `--shm-size` option in Docker.
|
|
633
399
|
"""
|
|
634
400
|
...
|
|
635
401
|
|
|
636
402
|
@typing.overload
|
|
637
|
-
def
|
|
403
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
638
404
|
...
|
|
639
405
|
|
|
640
406
|
@typing.overload
|
|
641
|
-
def
|
|
407
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
642
408
|
...
|
|
643
409
|
|
|
644
|
-
def
|
|
410
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
|
645
411
|
"""
|
|
646
|
-
Specifies the
|
|
647
|
-
to a step needs to be retried.
|
|
412
|
+
Specifies the resources needed when executing this step.
|
|
648
413
|
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
|
414
|
+
Use `@resources` to specify the resource requirements
|
|
415
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
|
652
416
|
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
417
|
+
You can choose the compute layer on the command line by executing e.g.
|
|
418
|
+
```
|
|
419
|
+
python myflow.py run --with batch
|
|
420
|
+
```
|
|
421
|
+
or
|
|
422
|
+
```
|
|
423
|
+
python myflow.py run --with kubernetes
|
|
424
|
+
```
|
|
425
|
+
which executes the flow on the desired system using the
|
|
426
|
+
requirements specified in `@resources`.
|
|
656
427
|
|
|
657
428
|
|
|
658
429
|
Parameters
|
|
659
430
|
----------
|
|
660
|
-
|
|
661
|
-
Number of
|
|
662
|
-
|
|
663
|
-
Number of
|
|
431
|
+
cpu : int, default 1
|
|
432
|
+
Number of CPUs required for this step.
|
|
433
|
+
gpu : int, optional, default None
|
|
434
|
+
Number of GPUs required for this step.
|
|
435
|
+
disk : int, optional, default None
|
|
436
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
|
437
|
+
memory : int, default 4096
|
|
438
|
+
Memory size (in MB) required for this step.
|
|
439
|
+
shared_memory : int, optional, default None
|
|
440
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
441
|
+
This parameter maps to the `--shm-size` option in Docker.
|
|
664
442
|
"""
|
|
665
443
|
...
|
|
666
444
|
|
|
@@ -813,261 +591,296 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
|
|
|
813
591
|
...
|
|
814
592
|
|
|
815
593
|
@typing.overload
|
|
816
|
-
def
|
|
594
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
817
595
|
"""
|
|
818
|
-
Specifies
|
|
596
|
+
Specifies the number of times the task corresponding
|
|
597
|
+
to a step needs to be retried.
|
|
598
|
+
|
|
599
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
|
600
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
|
601
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
|
602
|
+
|
|
603
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
|
604
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
|
605
|
+
ensuring that the flow execution can continue.
|
|
819
606
|
|
|
820
607
|
|
|
821
608
|
Parameters
|
|
822
609
|
----------
|
|
823
|
-
|
|
824
|
-
|
|
610
|
+
times : int, default 3
|
|
611
|
+
Number of times to retry this task.
|
|
612
|
+
minutes_between_retries : int, default 2
|
|
613
|
+
Number of minutes between retries.
|
|
825
614
|
"""
|
|
826
615
|
...
|
|
827
616
|
|
|
828
617
|
@typing.overload
|
|
829
|
-
def
|
|
618
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
830
619
|
...
|
|
831
620
|
|
|
832
621
|
@typing.overload
|
|
833
|
-
def
|
|
622
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
834
623
|
...
|
|
835
624
|
|
|
836
|
-
def
|
|
625
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
|
837
626
|
"""
|
|
838
|
-
Specifies
|
|
627
|
+
Specifies the number of times the task corresponding
|
|
628
|
+
to a step needs to be retried.
|
|
629
|
+
|
|
630
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
|
631
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
|
632
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
|
633
|
+
|
|
634
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
|
635
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
|
636
|
+
ensuring that the flow execution can continue.
|
|
839
637
|
|
|
840
638
|
|
|
841
639
|
Parameters
|
|
842
640
|
----------
|
|
843
|
-
|
|
844
|
-
|
|
641
|
+
times : int, default 3
|
|
642
|
+
Number of times to retry this task.
|
|
643
|
+
minutes_between_retries : int, default 2
|
|
644
|
+
Number of minutes between retries.
|
|
845
645
|
"""
|
|
846
646
|
...
|
|
847
647
|
|
|
848
648
|
@typing.overload
|
|
849
|
-
def
|
|
649
|
+
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
850
650
|
"""
|
|
851
|
-
Specifies
|
|
852
|
-
the execution of a step.
|
|
651
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
|
853
652
|
|
|
854
653
|
|
|
855
654
|
Parameters
|
|
856
655
|
----------
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
656
|
+
cpu : int, default 1
|
|
657
|
+
Number of CPUs required for this step. If `@resources` is
|
|
658
|
+
also present, the maximum value from all decorators is used.
|
|
659
|
+
gpu : int, default 0
|
|
660
|
+
Number of GPUs required for this step. If `@resources` is
|
|
661
|
+
also present, the maximum value from all decorators is used.
|
|
662
|
+
memory : int, default 4096
|
|
663
|
+
Memory size (in MB) required for this step. If
|
|
664
|
+
`@resources` is also present, the maximum value from all decorators is
|
|
665
|
+
used.
|
|
666
|
+
image : str, optional, default None
|
|
667
|
+
Docker image to use when launching on AWS Batch. If not specified, and
|
|
668
|
+
METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
|
|
669
|
+
not, a default Docker image mapping to the current version of Python is used.
|
|
670
|
+
queue : str, default METAFLOW_BATCH_JOB_QUEUE
|
|
671
|
+
AWS Batch Job Queue to submit the job to.
|
|
672
|
+
iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
|
|
673
|
+
AWS IAM role that AWS Batch container uses to access AWS cloud resources.
|
|
674
|
+
execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
|
|
675
|
+
AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
|
|
676
|
+
(https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
|
|
677
|
+
shared_memory : int, optional, default None
|
|
678
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
679
|
+
This parameter maps to the `--shm-size` option in Docker.
|
|
680
|
+
max_swap : int, optional, default None
|
|
681
|
+
The total amount of swap memory (in MiB) a container can use for this
|
|
682
|
+
step. This parameter is translated to the `--memory-swap` option in
|
|
683
|
+
Docker where the value is the sum of the container memory plus the
|
|
684
|
+
`max_swap` value.
|
|
685
|
+
swappiness : int, optional, default None
|
|
686
|
+
This allows you to tune memory swappiness behavior for this step.
|
|
687
|
+
A swappiness value of 0 causes swapping not to happen unless absolutely
|
|
688
|
+
necessary. A swappiness value of 100 causes pages to be swapped very
|
|
689
|
+
aggressively. Accepted values are whole numbers between 0 and 100.
|
|
690
|
+
use_tmpfs : bool, default False
|
|
691
|
+
This enables an explicit tmpfs mount for this step. Note that tmpfs is
|
|
692
|
+
not available on Fargate compute environments
|
|
693
|
+
tmpfs_tempdir : bool, default True
|
|
694
|
+
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
|
695
|
+
tmpfs_size : int, optional, default None
|
|
696
|
+
The value for the size (in MiB) of the tmpfs mount for this step.
|
|
697
|
+
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
|
698
|
+
memory allocated for this step.
|
|
699
|
+
tmpfs_path : str, optional, default None
|
|
700
|
+
Path to tmpfs mount for this step. Defaults to /metaflow_temp.
|
|
701
|
+
inferentia : int, default 0
|
|
702
|
+
Number of Inferentia chips required for this step.
|
|
703
|
+
trainium : int, default None
|
|
704
|
+
Alias for inferentia. Use only one of the two.
|
|
705
|
+
efa : int, default 0
|
|
706
|
+
Number of elastic fabric adapter network devices to attach to container
|
|
707
|
+
ephemeral_storage : int, default None
|
|
708
|
+
The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
|
|
709
|
+
This is only relevant for Fargate compute environments
|
|
710
|
+
log_driver: str, optional, default None
|
|
711
|
+
The log driver to use for the Amazon ECS container.
|
|
712
|
+
log_options: List[str], optional, default None
|
|
713
|
+
List of strings containing options for the chosen log driver. The configurable values
|
|
714
|
+
depend on the `log driver` chosen. Validation of these options is not supported yet.
|
|
715
|
+
Example: [`awslogs-group:aws/batch/job`]
|
|
861
716
|
"""
|
|
862
717
|
...
|
|
863
718
|
|
|
864
719
|
@typing.overload
|
|
865
|
-
def
|
|
720
|
+
def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
866
721
|
...
|
|
867
722
|
|
|
868
723
|
@typing.overload
|
|
869
|
-
def
|
|
724
|
+
def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
870
725
|
...
|
|
871
726
|
|
|
872
|
-
def
|
|
727
|
+
def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
|
|
873
728
|
"""
|
|
874
|
-
Specifies
|
|
875
|
-
the execution of a step.
|
|
729
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
|
876
730
|
|
|
877
731
|
|
|
878
732
|
Parameters
|
|
879
733
|
----------
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
734
|
+
cpu : int, default 1
|
|
735
|
+
Number of CPUs required for this step. If `@resources` is
|
|
736
|
+
also present, the maximum value from all decorators is used.
|
|
737
|
+
gpu : int, default 0
|
|
738
|
+
Number of GPUs required for this step. If `@resources` is
|
|
739
|
+
also present, the maximum value from all decorators is used.
|
|
740
|
+
memory : int, default 4096
|
|
741
|
+
Memory size (in MB) required for this step. If
|
|
742
|
+
`@resources` is also present, the maximum value from all decorators is
|
|
743
|
+
used.
|
|
744
|
+
image : str, optional, default None
|
|
745
|
+
Docker image to use when launching on AWS Batch. If not specified, and
|
|
746
|
+
METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
|
|
747
|
+
not, a default Docker image mapping to the current version of Python is used.
|
|
748
|
+
queue : str, default METAFLOW_BATCH_JOB_QUEUE
|
|
749
|
+
AWS Batch Job Queue to submit the job to.
|
|
750
|
+
iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
|
|
751
|
+
AWS IAM role that AWS Batch container uses to access AWS cloud resources.
|
|
752
|
+
execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
|
|
753
|
+
AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
|
|
754
|
+
(https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
|
|
755
|
+
shared_memory : int, optional, default None
|
|
756
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
757
|
+
This parameter maps to the `--shm-size` option in Docker.
|
|
758
|
+
max_swap : int, optional, default None
|
|
759
|
+
The total amount of swap memory (in MiB) a container can use for this
|
|
760
|
+
step. This parameter is translated to the `--memory-swap` option in
|
|
761
|
+
Docker where the value is the sum of the container memory plus the
|
|
762
|
+
`max_swap` value.
|
|
763
|
+
swappiness : int, optional, default None
|
|
764
|
+
This allows you to tune memory swappiness behavior for this step.
|
|
765
|
+
A swappiness value of 0 causes swapping not to happen unless absolutely
|
|
766
|
+
necessary. A swappiness value of 100 causes pages to be swapped very
|
|
767
|
+
aggressively. Accepted values are whole numbers between 0 and 100.
|
|
768
|
+
use_tmpfs : bool, default False
|
|
769
|
+
This enables an explicit tmpfs mount for this step. Note that tmpfs is
|
|
770
|
+
not available on Fargate compute environments
|
|
771
|
+
tmpfs_tempdir : bool, default True
|
|
772
|
+
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
|
773
|
+
tmpfs_size : int, optional, default None
|
|
774
|
+
The value for the size (in MiB) of the tmpfs mount for this step.
|
|
775
|
+
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
|
776
|
+
memory allocated for this step.
|
|
777
|
+
tmpfs_path : str, optional, default None
|
|
778
|
+
Path to tmpfs mount for this step. Defaults to /metaflow_temp.
|
|
779
|
+
inferentia : int, default 0
|
|
780
|
+
Number of Inferentia chips required for this step.
|
|
781
|
+
trainium : int, default None
|
|
782
|
+
Alias for inferentia. Use only one of the two.
|
|
783
|
+
efa : int, default 0
|
|
784
|
+
Number of elastic fabric adapter network devices to attach to container
|
|
785
|
+
ephemeral_storage : int, default None
|
|
786
|
+
The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
|
|
787
|
+
This is only relevant for Fargate compute environments
|
|
788
|
+
log_driver: str, optional, default None
|
|
789
|
+
The log driver to use for the Amazon ECS container.
|
|
790
|
+
log_options: List[str], optional, default None
|
|
791
|
+
List of strings containing options for the chosen log driver. The configurable values
|
|
792
|
+
depend on the `log driver` chosen. Validation of these options is not supported yet.
|
|
793
|
+
Example: [`awslogs-group:aws/batch/job`]
|
|
884
794
|
"""
|
|
885
795
|
...
|
|
886
796
|
|
|
887
797
|
@typing.overload
|
|
888
|
-
def
|
|
798
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
889
799
|
"""
|
|
890
|
-
Specifies
|
|
891
|
-
|
|
800
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
|
801
|
+
the execution of a step.
|
|
892
802
|
|
|
893
803
|
|
|
894
804
|
Parameters
|
|
895
805
|
----------
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
weekly : bool, default False
|
|
901
|
-
Run the workflow weekly.
|
|
902
|
-
cron : str, optional, default None
|
|
903
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
|
904
|
-
specified by this expression.
|
|
905
|
-
timezone : str, optional, default None
|
|
906
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
|
907
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
|
806
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
807
|
+
List of secret specs, defining how the secrets are to be retrieved
|
|
808
|
+
role : str, optional, default: None
|
|
809
|
+
Role to use for fetching secrets
|
|
908
810
|
"""
|
|
909
811
|
...
|
|
910
812
|
|
|
911
813
|
@typing.overload
|
|
912
|
-
def
|
|
814
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
913
815
|
...
|
|
914
816
|
|
|
915
|
-
|
|
817
|
+
@typing.overload
|
|
818
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
819
|
+
...
|
|
820
|
+
|
|
821
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
|
|
916
822
|
"""
|
|
917
|
-
Specifies
|
|
918
|
-
|
|
823
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
|
824
|
+
the execution of a step.
|
|
919
825
|
|
|
920
826
|
|
|
921
827
|
Parameters
|
|
922
828
|
----------
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
weekly : bool, default False
|
|
928
|
-
Run the workflow weekly.
|
|
929
|
-
cron : str, optional, default None
|
|
930
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
|
931
|
-
specified by this expression.
|
|
932
|
-
timezone : str, optional, default None
|
|
933
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
|
934
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
|
829
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
830
|
+
List of secret specs, defining how the secrets are to be retrieved
|
|
831
|
+
role : str, optional, default: None
|
|
832
|
+
Role to use for fetching secrets
|
|
935
833
|
"""
|
|
936
834
|
...
|
|
937
835
|
|
|
938
836
|
@typing.overload
|
|
939
|
-
def
|
|
837
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
940
838
|
"""
|
|
941
|
-
Specifies the
|
|
942
|
-
|
|
943
|
-
```
|
|
944
|
-
@trigger_on_finish(flow='FooFlow')
|
|
945
|
-
```
|
|
946
|
-
or
|
|
947
|
-
```
|
|
948
|
-
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
|
949
|
-
```
|
|
950
|
-
This decorator respects the @project decorator and triggers the flow
|
|
951
|
-
when upstream runs within the same namespace complete successfully
|
|
952
|
-
|
|
953
|
-
Additionally, you can specify project aware upstream flow dependencies
|
|
954
|
-
by specifying the fully qualified project_flow_name.
|
|
955
|
-
```
|
|
956
|
-
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
|
957
|
-
```
|
|
958
|
-
or
|
|
959
|
-
```
|
|
960
|
-
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
|
961
|
-
```
|
|
962
|
-
|
|
963
|
-
You can also specify just the project or project branch (other values will be
|
|
964
|
-
inferred from the current project or project branch):
|
|
965
|
-
```
|
|
966
|
-
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
|
967
|
-
```
|
|
839
|
+
Specifies that the step will success under all circumstances.
|
|
968
840
|
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
- `prod.staging`
|
|
841
|
+
The decorator will create an optional artifact, specified by `var`, which
|
|
842
|
+
contains the exception raised. You can use it to detect the presence
|
|
843
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
|
844
|
+
are missing.
|
|
974
845
|
|
|
975
846
|
|
|
976
847
|
Parameters
|
|
977
848
|
----------
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
849
|
+
var : str, optional, default None
|
|
850
|
+
Name of the artifact in which to store the caught exception.
|
|
851
|
+
If not specified, the exception is not stored.
|
|
852
|
+
print_exception : bool, default True
|
|
853
|
+
Determines whether or not the exception is printed to
|
|
854
|
+
stdout when caught.
|
|
984
855
|
"""
|
|
985
856
|
...
|
|
986
857
|
|
|
987
858
|
@typing.overload
|
|
988
|
-
def
|
|
859
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
989
860
|
...
|
|
990
861
|
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
Specifies the flow(s) that this flow depends on.
|
|
994
|
-
|
|
995
|
-
```
|
|
996
|
-
@trigger_on_finish(flow='FooFlow')
|
|
997
|
-
```
|
|
998
|
-
or
|
|
999
|
-
```
|
|
1000
|
-
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
|
1001
|
-
```
|
|
1002
|
-
This decorator respects the @project decorator and triggers the flow
|
|
1003
|
-
when upstream runs within the same namespace complete successfully
|
|
1004
|
-
|
|
1005
|
-
Additionally, you can specify project aware upstream flow dependencies
|
|
1006
|
-
by specifying the fully qualified project_flow_name.
|
|
1007
|
-
```
|
|
1008
|
-
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
|
1009
|
-
```
|
|
1010
|
-
or
|
|
1011
|
-
```
|
|
1012
|
-
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
|
1013
|
-
```
|
|
1014
|
-
|
|
1015
|
-
You can also specify just the project or project branch (other values will be
|
|
1016
|
-
inferred from the current project or project branch):
|
|
1017
|
-
```
|
|
1018
|
-
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
|
1019
|
-
```
|
|
1020
|
-
|
|
1021
|
-
Note that `branch` is typically one of:
|
|
1022
|
-
- `prod`
|
|
1023
|
-
- `user.bob`
|
|
1024
|
-
- `test.my_experiment`
|
|
1025
|
-
- `prod.staging`
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
Parameters
|
|
1029
|
-
----------
|
|
1030
|
-
flow : Union[str, Dict[str, str]], optional, default None
|
|
1031
|
-
Upstream flow dependency for this flow.
|
|
1032
|
-
flows : List[Union[str, Dict[str, str]]], default []
|
|
1033
|
-
Upstream flow dependencies for this flow.
|
|
1034
|
-
options : Dict[str, Any], default {}
|
|
1035
|
-
Backend-specific configuration for tuning eventing behavior.
|
|
1036
|
-
"""
|
|
862
|
+
@typing.overload
|
|
863
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1037
864
|
...
|
|
1038
865
|
|
|
1039
|
-
def
|
|
866
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
|
1040
867
|
"""
|
|
1041
|
-
Specifies
|
|
868
|
+
Specifies that the step will success under all circumstances.
|
|
1042
869
|
|
|
1043
|
-
|
|
1044
|
-
use the
|
|
870
|
+
The decorator will create an optional artifact, specified by `var`, which
|
|
871
|
+
contains the exception raised. You can use it to detect the presence
|
|
872
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
|
873
|
+
are missing.
|
|
1045
874
|
|
|
1046
875
|
|
|
1047
876
|
Parameters
|
|
1048
877
|
----------
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
The branch to use. If not specified, the branch is set to
|
|
1056
|
-
`user.<username>` unless `production` is set to `True`. This can
|
|
1057
|
-
also be set on the command line using `--branch` as a top-level option.
|
|
1058
|
-
It is an error to specify `branch` in the decorator and on the command line.
|
|
1059
|
-
|
|
1060
|
-
production : bool, default False
|
|
1061
|
-
Whether or not the branch is the production branch. This can also be set on the
|
|
1062
|
-
command line using `--production` as a top-level option. It is an error to specify
|
|
1063
|
-
`production` in the decorator and on the command line.
|
|
1064
|
-
The project branch name will be:
|
|
1065
|
-
- if `branch` is specified:
|
|
1066
|
-
- if `production` is True: `prod.<branch>`
|
|
1067
|
-
- if `production` is False: `test.<branch>`
|
|
1068
|
-
- if `branch` is not specified:
|
|
1069
|
-
- if `production` is True: `prod`
|
|
1070
|
-
- if `production` is False: `user.<username>`
|
|
878
|
+
var : str, optional, default None
|
|
879
|
+
Name of the artifact in which to store the caught exception.
|
|
880
|
+
If not specified, the exception is not stored.
|
|
881
|
+
print_exception : bool, default True
|
|
882
|
+
Determines whether or not the exception is printed to
|
|
883
|
+
stdout when caught.
|
|
1071
884
|
"""
|
|
1072
885
|
...
|
|
1073
886
|
|
|
@@ -1112,13 +925,10 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
|
|
|
1112
925
|
"""
|
|
1113
926
|
...
|
|
1114
927
|
|
|
1115
|
-
def
|
|
928
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1116
929
|
"""
|
|
1117
|
-
The `@
|
|
1118
|
-
|
|
1119
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
|
1120
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
|
1121
|
-
starts only after all sensors finish.
|
|
930
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
|
931
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
|
1122
932
|
|
|
1123
933
|
|
|
1124
934
|
Parameters
|
|
@@ -1140,18 +950,56 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
|
|
|
1140
950
|
Name of the sensor on Airflow
|
|
1141
951
|
description : str
|
|
1142
952
|
Description of sensor in the Airflow UI
|
|
1143
|
-
|
|
1144
|
-
The
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
|
|
1150
|
-
|
|
1151
|
-
|
|
1152
|
-
|
|
1153
|
-
|
|
1154
|
-
|
|
953
|
+
external_dag_id : str
|
|
954
|
+
The dag_id that contains the task you want to wait for.
|
|
955
|
+
external_task_ids : List[str]
|
|
956
|
+
The list of task_ids that you want to wait for.
|
|
957
|
+
If None (default value) the sensor waits for the DAG. (Default: None)
|
|
958
|
+
allowed_states : List[str]
|
|
959
|
+
Iterable of allowed states, (Default: ['success'])
|
|
960
|
+
failed_states : List[str]
|
|
961
|
+
Iterable of failed or dis-allowed states. (Default: None)
|
|
962
|
+
execution_delta : datetime.timedelta
|
|
963
|
+
time difference with the previous execution to look at,
|
|
964
|
+
the default is the same logical date as the current task or DAG. (Default: None)
|
|
965
|
+
check_existence: bool
|
|
966
|
+
Set to True to check if the external task exists or check if
|
|
967
|
+
the DAG to wait for exists. (Default: True)
|
|
968
|
+
"""
|
|
969
|
+
...
|
|
970
|
+
|
|
971
|
+
def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
972
|
+
"""
|
|
973
|
+
Specifies what flows belong to the same project.
|
|
974
|
+
|
|
975
|
+
A project-specific namespace is created for all flows that
|
|
976
|
+
use the same `@project(name)`.
|
|
977
|
+
|
|
978
|
+
|
|
979
|
+
Parameters
|
|
980
|
+
----------
|
|
981
|
+
name : str
|
|
982
|
+
Project name. Make sure that the name is unique amongst all
|
|
983
|
+
projects that use the same production scheduler. The name may
|
|
984
|
+
contain only lowercase alphanumeric characters and underscores.
|
|
985
|
+
|
|
986
|
+
branch : Optional[str], default None
|
|
987
|
+
The branch to use. If not specified, the branch is set to
|
|
988
|
+
`user.<username>` unless `production` is set to `True`. This can
|
|
989
|
+
also be set on the command line using `--branch` as a top-level option.
|
|
990
|
+
It is an error to specify `branch` in the decorator and on the command line.
|
|
991
|
+
|
|
992
|
+
production : bool, default False
|
|
993
|
+
Whether or not the branch is the production branch. This can also be set on the
|
|
994
|
+
command line using `--production` as a top-level option. It is an error to specify
|
|
995
|
+
`production` in the decorator and on the command line.
|
|
996
|
+
The project branch name will be:
|
|
997
|
+
- if `branch` is specified:
|
|
998
|
+
- if `production` is True: `prod.<branch>`
|
|
999
|
+
- if `production` is False: `test.<branch>`
|
|
1000
|
+
- if `branch` is not specified:
|
|
1001
|
+
- if `production` is True: `prod`
|
|
1002
|
+
- if `production` is False: `user.<username>`
|
|
1155
1003
|
"""
|
|
1156
1004
|
...
|
|
1157
1005
|
|
|
@@ -1248,6 +1096,100 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
|
|
|
1248
1096
|
"""
|
|
1249
1097
|
...
|
|
1250
1098
|
|
|
1099
|
+
@typing.overload
|
|
1100
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1101
|
+
"""
|
|
1102
|
+
Specifies the times when the flow should be run when running on a
|
|
1103
|
+
production scheduler.
|
|
1104
|
+
|
|
1105
|
+
|
|
1106
|
+
Parameters
|
|
1107
|
+
----------
|
|
1108
|
+
hourly : bool, default False
|
|
1109
|
+
Run the workflow hourly.
|
|
1110
|
+
daily : bool, default True
|
|
1111
|
+
Run the workflow daily.
|
|
1112
|
+
weekly : bool, default False
|
|
1113
|
+
Run the workflow weekly.
|
|
1114
|
+
cron : str, optional, default None
|
|
1115
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
|
1116
|
+
specified by this expression.
|
|
1117
|
+
timezone : str, optional, default None
|
|
1118
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
|
1119
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
|
1120
|
+
"""
|
|
1121
|
+
...
|
|
1122
|
+
|
|
1123
|
+
@typing.overload
|
|
1124
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1125
|
+
...
|
|
1126
|
+
|
|
1127
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
|
1128
|
+
"""
|
|
1129
|
+
Specifies the times when the flow should be run when running on a
|
|
1130
|
+
production scheduler.
|
|
1131
|
+
|
|
1132
|
+
|
|
1133
|
+
Parameters
|
|
1134
|
+
----------
|
|
1135
|
+
hourly : bool, default False
|
|
1136
|
+
Run the workflow hourly.
|
|
1137
|
+
daily : bool, default True
|
|
1138
|
+
Run the workflow daily.
|
|
1139
|
+
weekly : bool, default False
|
|
1140
|
+
Run the workflow weekly.
|
|
1141
|
+
cron : str, optional, default None
|
|
1142
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
|
1143
|
+
specified by this expression.
|
|
1144
|
+
timezone : str, optional, default None
|
|
1145
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
|
1146
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
|
1147
|
+
"""
|
|
1148
|
+
...
|
|
1149
|
+
|
|
1150
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1151
|
+
"""
|
|
1152
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
|
1153
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
|
1154
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
|
1155
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
|
1156
|
+
starts only after all sensors finish.
|
|
1157
|
+
|
|
1158
|
+
|
|
1159
|
+
Parameters
|
|
1160
|
+
----------
|
|
1161
|
+
timeout : int
|
|
1162
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
|
1163
|
+
poke_interval : int
|
|
1164
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
|
1165
|
+
mode : str
|
|
1166
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
|
1167
|
+
exponential_backoff : bool
|
|
1168
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
|
1169
|
+
pool : str
|
|
1170
|
+
the slot pool this task should run in,
|
|
1171
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
|
1172
|
+
soft_fail : bool
|
|
1173
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
|
1174
|
+
name : str
|
|
1175
|
+
Name of the sensor on Airflow
|
|
1176
|
+
description : str
|
|
1177
|
+
Description of sensor in the Airflow UI
|
|
1178
|
+
bucket_key : Union[str, List[str]]
|
|
1179
|
+
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
|
1180
|
+
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
|
1181
|
+
bucket_name : str
|
|
1182
|
+
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
|
1183
|
+
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
|
1184
|
+
wildcard_match : bool
|
|
1185
|
+
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
|
1186
|
+
aws_conn_id : str
|
|
1187
|
+
a reference to the s3 connection on Airflow. (Default: None)
|
|
1188
|
+
verify : bool
|
|
1189
|
+
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
|
1190
|
+
"""
|
|
1191
|
+
...
|
|
1192
|
+
|
|
1251
1193
|
@typing.overload
|
|
1252
1194
|
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1253
1195
|
"""
|
|
@@ -1299,46 +1241,104 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
|
|
|
1299
1241
|
"""
|
|
1300
1242
|
...
|
|
1301
1243
|
|
|
1302
|
-
|
|
1244
|
+
@typing.overload
|
|
1245
|
+
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1303
1246
|
"""
|
|
1304
|
-
|
|
1305
|
-
|
|
1247
|
+
Specifies the flow(s) that this flow depends on.
|
|
1248
|
+
|
|
1249
|
+
```
|
|
1250
|
+
@trigger_on_finish(flow='FooFlow')
|
|
1251
|
+
```
|
|
1252
|
+
or
|
|
1253
|
+
```
|
|
1254
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
|
1255
|
+
```
|
|
1256
|
+
This decorator respects the @project decorator and triggers the flow
|
|
1257
|
+
when upstream runs within the same namespace complete successfully
|
|
1258
|
+
|
|
1259
|
+
Additionally, you can specify project aware upstream flow dependencies
|
|
1260
|
+
by specifying the fully qualified project_flow_name.
|
|
1261
|
+
```
|
|
1262
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
|
1263
|
+
```
|
|
1264
|
+
or
|
|
1265
|
+
```
|
|
1266
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
|
1267
|
+
```
|
|
1268
|
+
|
|
1269
|
+
You can also specify just the project or project branch (other values will be
|
|
1270
|
+
inferred from the current project or project branch):
|
|
1271
|
+
```
|
|
1272
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
|
1273
|
+
```
|
|
1274
|
+
|
|
1275
|
+
Note that `branch` is typically one of:
|
|
1276
|
+
- `prod`
|
|
1277
|
+
- `user.bob`
|
|
1278
|
+
- `test.my_experiment`
|
|
1279
|
+
- `prod.staging`
|
|
1306
1280
|
|
|
1307
1281
|
|
|
1308
1282
|
Parameters
|
|
1309
1283
|
----------
|
|
1310
|
-
|
|
1311
|
-
|
|
1312
|
-
|
|
1313
|
-
|
|
1314
|
-
|
|
1315
|
-
|
|
1316
|
-
|
|
1317
|
-
|
|
1318
|
-
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
|
|
1329
|
-
|
|
1330
|
-
|
|
1331
|
-
|
|
1332
|
-
|
|
1333
|
-
|
|
1334
|
-
|
|
1335
|
-
|
|
1336
|
-
|
|
1337
|
-
|
|
1338
|
-
|
|
1339
|
-
|
|
1340
|
-
|
|
1341
|
-
|
|
1284
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
|
1285
|
+
Upstream flow dependency for this flow.
|
|
1286
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
|
1287
|
+
Upstream flow dependencies for this flow.
|
|
1288
|
+
options : Dict[str, Any], default {}
|
|
1289
|
+
Backend-specific configuration for tuning eventing behavior.
|
|
1290
|
+
"""
|
|
1291
|
+
...
|
|
1292
|
+
|
|
1293
|
+
@typing.overload
|
|
1294
|
+
def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1295
|
+
...
|
|
1296
|
+
|
|
1297
|
+
def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
|
1298
|
+
"""
|
|
1299
|
+
Specifies the flow(s) that this flow depends on.
|
|
1300
|
+
|
|
1301
|
+
```
|
|
1302
|
+
@trigger_on_finish(flow='FooFlow')
|
|
1303
|
+
```
|
|
1304
|
+
or
|
|
1305
|
+
```
|
|
1306
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
|
1307
|
+
```
|
|
1308
|
+
This decorator respects the @project decorator and triggers the flow
|
|
1309
|
+
when upstream runs within the same namespace complete successfully
|
|
1310
|
+
|
|
1311
|
+
Additionally, you can specify project aware upstream flow dependencies
|
|
1312
|
+
by specifying the fully qualified project_flow_name.
|
|
1313
|
+
```
|
|
1314
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
|
1315
|
+
```
|
|
1316
|
+
or
|
|
1317
|
+
```
|
|
1318
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
|
1319
|
+
```
|
|
1320
|
+
|
|
1321
|
+
You can also specify just the project or project branch (other values will be
|
|
1322
|
+
inferred from the current project or project branch):
|
|
1323
|
+
```
|
|
1324
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
|
1325
|
+
```
|
|
1326
|
+
|
|
1327
|
+
Note that `branch` is typically one of:
|
|
1328
|
+
- `prod`
|
|
1329
|
+
- `user.bob`
|
|
1330
|
+
- `test.my_experiment`
|
|
1331
|
+
- `prod.staging`
|
|
1332
|
+
|
|
1333
|
+
|
|
1334
|
+
Parameters
|
|
1335
|
+
----------
|
|
1336
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
|
1337
|
+
Upstream flow dependency for this flow.
|
|
1338
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
|
1339
|
+
Upstream flow dependencies for this flow.
|
|
1340
|
+
options : Dict[str, Any], default {}
|
|
1341
|
+
Backend-specific configuration for tuning eventing behavior.
|
|
1342
1342
|
"""
|
|
1343
1343
|
...
|
|
1344
1344
|
|