ob-metaflow-stubs 6.0.3.176rc6__py2.py3-none-any.whl → 6.0.3.178__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +793 -789
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +27 -27
- metaflow-stubs/client/filecache.pyi +3 -3
- metaflow-stubs/events.pyi +6 -6
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +5 -5
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/info_file.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +3 -3
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +4 -2
- metaflow-stubs/metaflow_current.pyi +104 -104
- metaflow-stubs/metaflow_git.pyi +2 -2
- metaflow-stubs/mf_extensions/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +5 -5
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +5 -5
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/{fast_bakery → aws}/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +52 -0
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +3 -3
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +5 -5
- metaflow-stubs/plugins/__init__.pyi +10 -10
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +5 -3
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +5 -7
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +5 -5
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +3 -3
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +2 -2
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +15 -15
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +4 -4
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +8 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
- metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/perimeters.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
- metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
- metaflow-stubs/profilers/__init__.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +9 -6
- metaflow-stubs/runner/deployer_impl.pyi +6 -3
- metaflow-stubs/runner/metaflow_runner.pyi +7 -4
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +6 -3
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +2 -2
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_decorators.pyi +6 -6
- metaflow-stubs/user_configs/config_options.pyi +4 -4
- metaflow-stubs/user_configs/config_parameters.pyi +77 -17
- {ob_metaflow_stubs-6.0.3.176rc6.dist-info → ob_metaflow_stubs-6.0.3.178.dist-info}/METADATA +1 -1
- ob_metaflow_stubs-6.0.3.178.dist-info/RECORD +215 -0
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +0 -51
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +0 -65
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +0 -74
- metaflow-stubs/ob_internal.pyi +0 -11
- ob_metaflow_stubs-6.0.3.176rc6.dist-info/RECORD +0 -218
- {ob_metaflow_stubs-6.0.3.176rc6.dist-info → ob_metaflow_stubs-6.0.3.178.dist-info}/WHEEL +0 -0
- {ob_metaflow_stubs-6.0.3.176rc6.dist-info → ob_metaflow_stubs-6.0.3.178.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.15.
|
4
|
-
# Generated on 2025-06-
|
3
|
+
# MF version: 2.15.17.1+obcheckpoint(0.2.1);ob(v1) #
|
4
|
+
# Generated on 2025-06-13T18:34:09.309751 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
9
|
import typing
|
10
10
|
if typing.TYPE_CHECKING:
|
11
|
-
import datetime
|
12
11
|
import typing
|
12
|
+
import datetime
|
13
13
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
14
14
|
StepFlag = typing.NewType("StepFlag", bool)
|
15
15
|
|
@@ -45,8 +45,8 @@ from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package imp
|
|
45
45
|
from . import includefile as includefile
|
46
46
|
from .includefile import IncludeFile as IncludeFile
|
47
47
|
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
48
|
-
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
49
48
|
from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
|
49
|
+
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
50
50
|
from . import client as client
|
51
51
|
from .client.core import namespace as namespace
|
52
52
|
from .client.core import get_namespace as get_namespace
|
@@ -72,12 +72,12 @@ from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package imp
|
|
72
72
|
from .mf_extensions.outerbounds.plugins.snowflake.snowflake import Snowflake as Snowflake
|
73
73
|
from .mf_extensions.outerbounds.plugins.checkpoint_datastores.nebius import nebius_checkpoints as nebius_checkpoints
|
74
74
|
from .mf_extensions.outerbounds.plugins.checkpoint_datastores.coreweave import coreweave_checkpoints as coreweave_checkpoints
|
75
|
+
from .mf_extensions.outerbounds.plugins.aws.assume_role_decorator import assume_role as assume_role
|
75
76
|
from . import cli_components as cli_components
|
76
77
|
from . import system as system
|
77
78
|
from . import pylint_wrapper as pylint_wrapper
|
78
79
|
from . import cli as cli
|
79
80
|
from . import profilers as profilers
|
80
|
-
from . import ob_internal as ob_internal
|
81
81
|
|
82
82
|
EXT_PKG: str
|
83
83
|
|
@@ -155,73 +155,78 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
155
155
|
...
|
156
156
|
|
157
157
|
@typing.overload
|
158
|
-
def
|
158
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
159
159
|
"""
|
160
|
-
|
160
|
+
Specifies a timeout for your step.
|
161
161
|
|
162
|
+
This decorator is useful if this step may hang indefinitely.
|
163
|
+
|
164
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
165
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
166
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
167
|
+
|
168
|
+
Note that all the values specified in parameters are added together so if you specify
|
169
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
162
170
|
|
163
171
|
|
164
172
|
Parameters
|
165
173
|
----------
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
- "fresh": Loads the lastest checkpoint created within the running Task.
|
173
|
-
This mode helps loading checkpoints across various retry attempts of the same task.
|
174
|
-
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
175
|
-
created within the task will be loaded when the task is retries execution on failure.
|
176
|
-
|
177
|
-
temp_dir_root : str, default: None
|
178
|
-
The root directory under which `current.checkpoint.directory` will be created.
|
174
|
+
seconds : int, default 0
|
175
|
+
Number of seconds to wait prior to timing out.
|
176
|
+
minutes : int, default 0
|
177
|
+
Number of minutes to wait prior to timing out.
|
178
|
+
hours : int, default 0
|
179
|
+
Number of hours to wait prior to timing out.
|
179
180
|
"""
|
180
181
|
...
|
181
182
|
|
182
183
|
@typing.overload
|
183
|
-
def
|
184
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
184
185
|
...
|
185
186
|
|
186
187
|
@typing.overload
|
187
|
-
def
|
188
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
188
189
|
...
|
189
190
|
|
190
|
-
def
|
191
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
191
192
|
"""
|
192
|
-
|
193
|
+
Specifies a timeout for your step.
|
194
|
+
|
195
|
+
This decorator is useful if this step may hang indefinitely.
|
196
|
+
|
197
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
198
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
199
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
193
200
|
|
201
|
+
Note that all the values specified in parameters are added together so if you specify
|
202
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
194
203
|
|
195
204
|
|
196
205
|
Parameters
|
197
206
|
----------
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
- "fresh": Loads the lastest checkpoint created within the running Task.
|
205
|
-
This mode helps loading checkpoints across various retry attempts of the same task.
|
206
|
-
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
207
|
-
created within the task will be loaded when the task is retries execution on failure.
|
208
|
-
|
209
|
-
temp_dir_root : str, default: None
|
210
|
-
The root directory under which `current.checkpoint.directory` will be created.
|
207
|
+
seconds : int, default 0
|
208
|
+
Number of seconds to wait prior to timing out.
|
209
|
+
minutes : int, default 0
|
210
|
+
Number of minutes to wait prior to timing out.
|
211
|
+
hours : int, default 0
|
212
|
+
Number of hours to wait prior to timing out.
|
211
213
|
"""
|
212
214
|
...
|
213
215
|
|
214
|
-
|
216
|
+
@typing.overload
|
217
|
+
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
215
218
|
"""
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
219
|
+
Internal decorator to support Fast bakery
|
220
|
+
"""
|
221
|
+
...
|
222
|
+
|
223
|
+
@typing.overload
|
224
|
+
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
225
|
+
...
|
226
|
+
|
227
|
+
def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
228
|
+
"""
|
229
|
+
Internal decorator to support Fast bakery
|
225
230
|
"""
|
226
231
|
...
|
227
232
|
|
@@ -305,149 +310,154 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
|
|
305
310
|
...
|
306
311
|
|
307
312
|
@typing.overload
|
308
|
-
def
|
313
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
309
314
|
"""
|
310
|
-
Specifies
|
315
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
316
|
+
the execution of a step.
|
311
317
|
|
312
318
|
|
313
319
|
Parameters
|
314
320
|
----------
|
315
|
-
|
316
|
-
|
321
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
322
|
+
List of secret specs, defining how the secrets are to be retrieved
|
317
323
|
"""
|
318
324
|
...
|
319
325
|
|
320
326
|
@typing.overload
|
321
|
-
def
|
327
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
322
328
|
...
|
323
329
|
|
324
330
|
@typing.overload
|
325
|
-
def
|
331
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
326
332
|
...
|
327
333
|
|
328
|
-
def
|
334
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
329
335
|
"""
|
330
|
-
Specifies
|
336
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
337
|
+
the execution of a step.
|
331
338
|
|
332
339
|
|
333
340
|
Parameters
|
334
341
|
----------
|
335
|
-
|
336
|
-
|
342
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
343
|
+
List of secret specs, defining how the secrets are to be retrieved
|
337
344
|
"""
|
338
345
|
...
|
339
346
|
|
340
|
-
|
347
|
+
@typing.overload
|
348
|
+
def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
341
349
|
"""
|
342
|
-
|
350
|
+
Enables loading / saving of models within a step.
|
351
|
+
|
343
352
|
|
344
353
|
|
345
354
|
Parameters
|
346
355
|
----------
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
used.
|
354
|
-
disk : int, default 10240
|
355
|
-
Disk size (in MB) required for this step. If
|
356
|
-
`@resources` is also present, the maximum value from all decorators is
|
357
|
-
used.
|
358
|
-
image : str, optional, default None
|
359
|
-
Docker image to use when launching on Kubernetes. If not specified, and
|
360
|
-
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
361
|
-
not, a default Docker image mapping to the current version of Python is used.
|
362
|
-
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
363
|
-
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
364
|
-
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
365
|
-
Kubernetes service account to use when launching pod in Kubernetes.
|
366
|
-
secrets : List[str], optional, default None
|
367
|
-
Kubernetes secrets to use when launching pod in Kubernetes. These
|
368
|
-
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
369
|
-
in Metaflow configuration.
|
370
|
-
node_selector: Union[Dict[str,str], str], optional, default None
|
371
|
-
Kubernetes node selector(s) to apply to the pod running the task.
|
372
|
-
Can be passed in as a comma separated string of values e.g.
|
373
|
-
'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
|
374
|
-
{'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
|
375
|
-
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
376
|
-
Kubernetes namespace to use when launching pod in Kubernetes.
|
377
|
-
gpu : int, optional, default None
|
378
|
-
Number of GPUs required for this step. A value of zero implies that
|
379
|
-
the scheduled node should not have GPUs.
|
380
|
-
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
381
|
-
The vendor of the GPUs to be used for this step.
|
382
|
-
tolerations : List[str], default []
|
383
|
-
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
384
|
-
Kubernetes tolerations to use when launching pod in Kubernetes.
|
385
|
-
labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
|
386
|
-
Kubernetes labels to use when launching pod in Kubernetes.
|
387
|
-
annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
|
388
|
-
Kubernetes annotations to use when launching pod in Kubernetes.
|
389
|
-
use_tmpfs : bool, default False
|
390
|
-
This enables an explicit tmpfs mount for this step.
|
391
|
-
tmpfs_tempdir : bool, default True
|
392
|
-
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
393
|
-
tmpfs_size : int, optional, default: None
|
394
|
-
The value for the size (in MiB) of the tmpfs mount for this step.
|
395
|
-
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
396
|
-
memory allocated for this step.
|
397
|
-
tmpfs_path : str, optional, default /metaflow_temp
|
398
|
-
Path to tmpfs mount for this step.
|
399
|
-
persistent_volume_claims : Dict[str, str], optional, default None
|
400
|
-
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
401
|
-
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
402
|
-
shared_memory: int, optional
|
403
|
-
Shared memory size (in MiB) required for this step
|
404
|
-
port: int, optional
|
405
|
-
Port number to specify in the Kubernetes job object
|
406
|
-
compute_pool : str, optional, default None
|
407
|
-
Compute pool to be used for for this step.
|
408
|
-
If not specified, any accessible compute pool within the perimeter is used.
|
409
|
-
hostname_resolution_timeout: int, default 10 * 60
|
410
|
-
Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
|
411
|
-
Only applicable when @parallel is used.
|
412
|
-
qos: str, default: Burstable
|
413
|
-
Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
|
356
|
+
load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
|
357
|
+
Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
|
358
|
+
These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
|
359
|
+
- `current.checkpoint`
|
360
|
+
- `current.model`
|
361
|
+
- `current.huggingface_hub`
|
414
362
|
|
415
|
-
|
416
|
-
|
417
|
-
|
418
|
-
|
419
|
-
|
420
|
-
|
421
|
-
- run_as_non_root: bool, optional, default None
|
363
|
+
If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
|
364
|
+
the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
|
365
|
+
If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
|
366
|
+
|
367
|
+
temp_dir_root : str, default: None
|
368
|
+
The root directory under which `current.model.loaded` will store loaded models
|
422
369
|
"""
|
423
370
|
...
|
424
371
|
|
425
|
-
|
372
|
+
@typing.overload
|
373
|
+
def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
374
|
+
...
|
375
|
+
|
376
|
+
@typing.overload
|
377
|
+
def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
378
|
+
...
|
379
|
+
|
380
|
+
def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
|
426
381
|
"""
|
427
|
-
|
382
|
+
Enables loading / saving of models within a step.
|
428
383
|
|
429
|
-
User code call
|
430
|
-
--------------
|
431
|
-
@ollama(
|
432
|
-
models=[...],
|
433
|
-
...
|
434
|
-
)
|
435
384
|
|
436
|
-
Valid backend options
|
437
|
-
---------------------
|
438
|
-
- 'local': Run as a separate process on the local task machine.
|
439
|
-
- (TODO) 'managed': Outerbounds hosts and selects compute provider.
|
440
|
-
- (TODO) 'remote': Spin up separate instance to serve Ollama models.
|
441
385
|
|
442
|
-
|
443
|
-
|
444
|
-
|
386
|
+
Parameters
|
387
|
+
----------
|
388
|
+
load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
|
389
|
+
Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
|
390
|
+
These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
|
391
|
+
- `current.checkpoint`
|
392
|
+
- `current.model`
|
393
|
+
- `current.huggingface_hub`
|
394
|
+
|
395
|
+
If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
|
396
|
+
the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
|
397
|
+
If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
|
398
|
+
|
399
|
+
temp_dir_root : str, default: None
|
400
|
+
The root directory under which `current.model.loaded` will store loaded models
|
401
|
+
"""
|
402
|
+
...
|
403
|
+
|
404
|
+
def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
405
|
+
"""
|
406
|
+
Specifies that this step is used to deploy an instance of the app.
|
407
|
+
Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
|
445
408
|
|
446
409
|
|
447
410
|
Parameters
|
448
411
|
----------
|
449
|
-
|
450
|
-
|
412
|
+
app_port : int
|
413
|
+
Number of GPUs to use.
|
414
|
+
app_name : str
|
415
|
+
Name of the app to deploy.
|
416
|
+
"""
|
417
|
+
...
|
418
|
+
|
419
|
+
def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
420
|
+
"""
|
421
|
+
Specifies that this step should execute on DGX cloud.
|
422
|
+
|
423
|
+
|
424
|
+
Parameters
|
425
|
+
----------
|
426
|
+
gpu : int
|
427
|
+
Number of GPUs to use.
|
428
|
+
gpu_type : str
|
429
|
+
Type of Nvidia GPU to use.
|
430
|
+
queue_timeout : int
|
431
|
+
Time to keep the job in NVCF's queue.
|
432
|
+
"""
|
433
|
+
...
|
434
|
+
|
435
|
+
def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
436
|
+
"""
|
437
|
+
This decorator is used to run Ollama APIs as Metaflow task sidecars.
|
438
|
+
|
439
|
+
User code call
|
440
|
+
--------------
|
441
|
+
@ollama(
|
442
|
+
models=[...],
|
443
|
+
...
|
444
|
+
)
|
445
|
+
|
446
|
+
Valid backend options
|
447
|
+
---------------------
|
448
|
+
- 'local': Run as a separate process on the local task machine.
|
449
|
+
- (TODO) 'managed': Outerbounds hosts and selects compute provider.
|
450
|
+
- (TODO) 'remote': Spin up separate instance to serve Ollama models.
|
451
|
+
|
452
|
+
Valid model options
|
453
|
+
-------------------
|
454
|
+
Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
|
455
|
+
|
456
|
+
|
457
|
+
Parameters
|
458
|
+
----------
|
459
|
+
models: list[str]
|
460
|
+
List of Ollama containers running models in sidecars.
|
451
461
|
backend: str
|
452
462
|
Determines where and how to run the Ollama process.
|
453
463
|
force_pull: bool
|
@@ -514,65 +524,6 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
|
|
514
524
|
"""
|
515
525
|
...
|
516
526
|
|
517
|
-
@typing.overload
|
518
|
-
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
519
|
-
"""
|
520
|
-
Specifies a timeout for your step.
|
521
|
-
|
522
|
-
This decorator is useful if this step may hang indefinitely.
|
523
|
-
|
524
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
525
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
526
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
527
|
-
|
528
|
-
Note that all the values specified in parameters are added together so if you specify
|
529
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
530
|
-
|
531
|
-
|
532
|
-
Parameters
|
533
|
-
----------
|
534
|
-
seconds : int, default 0
|
535
|
-
Number of seconds to wait prior to timing out.
|
536
|
-
minutes : int, default 0
|
537
|
-
Number of minutes to wait prior to timing out.
|
538
|
-
hours : int, default 0
|
539
|
-
Number of hours to wait prior to timing out.
|
540
|
-
"""
|
541
|
-
...
|
542
|
-
|
543
|
-
@typing.overload
|
544
|
-
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
545
|
-
...
|
546
|
-
|
547
|
-
@typing.overload
|
548
|
-
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
549
|
-
...
|
550
|
-
|
551
|
-
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
552
|
-
"""
|
553
|
-
Specifies a timeout for your step.
|
554
|
-
|
555
|
-
This decorator is useful if this step may hang indefinitely.
|
556
|
-
|
557
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
558
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
559
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
560
|
-
|
561
|
-
Note that all the values specified in parameters are added together so if you specify
|
562
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
563
|
-
|
564
|
-
|
565
|
-
Parameters
|
566
|
-
----------
|
567
|
-
seconds : int, default 0
|
568
|
-
Number of seconds to wait prior to timing out.
|
569
|
-
minutes : int, default 0
|
570
|
-
Number of minutes to wait prior to timing out.
|
571
|
-
hours : int, default 0
|
572
|
-
Number of hours to wait prior to timing out.
|
573
|
-
"""
|
574
|
-
...
|
575
|
-
|
576
527
|
def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
577
528
|
"""
|
578
529
|
Decorator that helps cache, version and store models/datasets from huggingface hub.
|
@@ -599,126 +550,231 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
|
|
599
550
|
...
|
600
551
|
|
601
552
|
@typing.overload
|
602
|
-
def
|
553
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
603
554
|
"""
|
604
|
-
|
555
|
+
Specifies environment variables to be set prior to the execution of a step.
|
605
556
|
|
606
557
|
|
558
|
+
Parameters
|
559
|
+
----------
|
560
|
+
vars : Dict[str, str], default {}
|
561
|
+
Dictionary of environment variables to set.
|
562
|
+
"""
|
563
|
+
...
|
564
|
+
|
565
|
+
@typing.overload
|
566
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
567
|
+
...
|
568
|
+
|
569
|
+
@typing.overload
|
570
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
571
|
+
...
|
572
|
+
|
573
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
574
|
+
"""
|
575
|
+
Specifies environment variables to be set prior to the execution of a step.
|
576
|
+
|
607
577
|
|
608
578
|
Parameters
|
609
579
|
----------
|
610
|
-
|
611
|
-
|
612
|
-
|
613
|
-
|
614
|
-
|
615
|
-
|
580
|
+
vars : Dict[str, str], default {}
|
581
|
+
Dictionary of environment variables to set.
|
582
|
+
"""
|
583
|
+
...
|
584
|
+
|
585
|
+
@typing.overload
|
586
|
+
def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
587
|
+
"""
|
588
|
+
Enables checkpointing for a step.
|
616
589
|
|
617
|
-
|
618
|
-
|
619
|
-
|
590
|
+
|
591
|
+
|
592
|
+
Parameters
|
593
|
+
----------
|
594
|
+
load_policy : str, default: "fresh"
|
595
|
+
The policy for loading the checkpoint. The following policies are supported:
|
596
|
+
- "eager": Loads the the latest available checkpoint within the namespace.
|
597
|
+
With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
|
598
|
+
will be loaded at the start of the task.
|
599
|
+
- "none": Do not load any checkpoint
|
600
|
+
- "fresh": Loads the lastest checkpoint created within the running Task.
|
601
|
+
This mode helps loading checkpoints across various retry attempts of the same task.
|
602
|
+
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
603
|
+
created within the task will be loaded when the task is retries execution on failure.
|
620
604
|
|
621
605
|
temp_dir_root : str, default: None
|
622
|
-
The root directory under which `current.
|
606
|
+
The root directory under which `current.checkpoint.directory` will be created.
|
623
607
|
"""
|
624
608
|
...
|
625
609
|
|
626
610
|
@typing.overload
|
627
|
-
def
|
611
|
+
def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
628
612
|
...
|
629
613
|
|
630
614
|
@typing.overload
|
631
|
-
def
|
615
|
+
def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
632
616
|
...
|
633
617
|
|
634
|
-
def
|
618
|
+
def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
|
635
619
|
"""
|
636
|
-
Enables
|
620
|
+
Enables checkpointing for a step.
|
637
621
|
|
638
622
|
|
639
623
|
|
640
624
|
Parameters
|
641
625
|
----------
|
642
|
-
|
643
|
-
|
644
|
-
|
645
|
-
|
646
|
-
|
647
|
-
|
648
|
-
|
649
|
-
|
650
|
-
|
651
|
-
|
626
|
+
load_policy : str, default: "fresh"
|
627
|
+
The policy for loading the checkpoint. The following policies are supported:
|
628
|
+
- "eager": Loads the the latest available checkpoint within the namespace.
|
629
|
+
With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
|
630
|
+
will be loaded at the start of the task.
|
631
|
+
- "none": Do not load any checkpoint
|
632
|
+
- "fresh": Loads the lastest checkpoint created within the running Task.
|
633
|
+
This mode helps loading checkpoints across various retry attempts of the same task.
|
634
|
+
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
635
|
+
created within the task will be loaded when the task is retries execution on failure.
|
652
636
|
|
653
637
|
temp_dir_root : str, default: None
|
654
|
-
The root directory under which `current.
|
638
|
+
The root directory under which `current.checkpoint.directory` will be created.
|
655
639
|
"""
|
656
640
|
...
|
657
641
|
|
658
|
-
def
|
642
|
+
def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
659
643
|
"""
|
660
|
-
Specifies that this step
|
661
|
-
Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
|
644
|
+
Specifies that this step should execute on DGX cloud.
|
662
645
|
|
663
646
|
|
664
647
|
Parameters
|
665
648
|
----------
|
666
|
-
|
649
|
+
gpu : int
|
667
650
|
Number of GPUs to use.
|
668
|
-
|
669
|
-
|
651
|
+
gpu_type : str
|
652
|
+
Type of Nvidia GPU to use.
|
670
653
|
"""
|
671
654
|
...
|
672
655
|
|
673
656
|
@typing.overload
|
674
|
-
def
|
657
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
675
658
|
"""
|
676
|
-
Specifies
|
677
|
-
|
659
|
+
Specifies the number of times the task corresponding
|
660
|
+
to a step needs to be retried.
|
661
|
+
|
662
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
663
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
664
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
665
|
+
|
666
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
667
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
668
|
+
ensuring that the flow execution can continue.
|
678
669
|
|
679
670
|
|
680
671
|
Parameters
|
681
672
|
----------
|
682
|
-
|
683
|
-
|
673
|
+
times : int, default 3
|
674
|
+
Number of times to retry this task.
|
675
|
+
minutes_between_retries : int, default 2
|
676
|
+
Number of minutes between retries.
|
684
677
|
"""
|
685
678
|
...
|
686
679
|
|
687
680
|
@typing.overload
|
688
|
-
def
|
681
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
689
682
|
...
|
690
683
|
|
691
684
|
@typing.overload
|
692
|
-
def
|
685
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
693
686
|
...
|
694
687
|
|
695
|
-
def
|
688
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
696
689
|
"""
|
697
|
-
Specifies
|
698
|
-
|
690
|
+
Specifies the number of times the task corresponding
|
691
|
+
to a step needs to be retried.
|
692
|
+
|
693
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
694
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
695
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
696
|
+
|
697
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
698
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
699
|
+
ensuring that the flow execution can continue.
|
699
700
|
|
700
701
|
|
701
702
|
Parameters
|
702
703
|
----------
|
703
|
-
|
704
|
-
|
704
|
+
times : int, default 3
|
705
|
+
Number of times to retry this task.
|
706
|
+
minutes_between_retries : int, default 2
|
707
|
+
Number of minutes between retries.
|
705
708
|
"""
|
706
709
|
...
|
707
710
|
|
708
711
|
@typing.overload
|
709
|
-
def
|
710
|
-
"""
|
711
|
-
Internal decorator to support Fast bakery
|
712
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
712
713
|
"""
|
713
|
-
|
714
|
-
|
714
|
+
Specifies that the step will success under all circumstances.
|
715
|
+
|
716
|
+
The decorator will create an optional artifact, specified by `var`, which
|
717
|
+
contains the exception raised. You can use it to detect the presence
|
718
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
719
|
+
are missing.
|
720
|
+
|
721
|
+
|
722
|
+
Parameters
|
723
|
+
----------
|
724
|
+
var : str, optional, default None
|
725
|
+
Name of the artifact in which to store the caught exception.
|
726
|
+
If not specified, the exception is not stored.
|
727
|
+
print_exception : bool, default True
|
728
|
+
Determines whether or not the exception is printed to
|
729
|
+
stdout when caught.
|
730
|
+
"""
|
731
|
+
...
|
732
|
+
|
715
733
|
@typing.overload
|
716
|
-
def
|
734
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
717
735
|
...
|
718
736
|
|
719
|
-
|
737
|
+
@typing.overload
|
738
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
739
|
+
...
|
740
|
+
|
741
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
720
742
|
"""
|
721
|
-
|
743
|
+
Specifies that the step will success under all circumstances.
|
744
|
+
|
745
|
+
The decorator will create an optional artifact, specified by `var`, which
|
746
|
+
contains the exception raised. You can use it to detect the presence
|
747
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
748
|
+
are missing.
|
749
|
+
|
750
|
+
|
751
|
+
Parameters
|
752
|
+
----------
|
753
|
+
var : str, optional, default None
|
754
|
+
Name of the artifact in which to store the caught exception.
|
755
|
+
If not specified, the exception is not stored.
|
756
|
+
print_exception : bool, default True
|
757
|
+
Determines whether or not the exception is printed to
|
758
|
+
stdout when caught.
|
759
|
+
"""
|
760
|
+
...
|
761
|
+
|
762
|
+
@typing.overload
|
763
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
764
|
+
"""
|
765
|
+
Decorator prototype for all step decorators. This function gets specialized
|
766
|
+
and imported for all decorators types by _import_plugin_decorators().
|
767
|
+
"""
|
768
|
+
...
|
769
|
+
|
770
|
+
@typing.overload
|
771
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
772
|
+
...
|
773
|
+
|
774
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
775
|
+
"""
|
776
|
+
Decorator prototype for all step decorators. This function gets specialized
|
777
|
+
and imported for all decorators types by _import_plugin_decorators().
|
722
778
|
"""
|
723
779
|
...
|
724
780
|
|
@@ -773,6 +829,95 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
|
|
773
829
|
"""
|
774
830
|
...
|
775
831
|
|
832
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
833
|
+
"""
|
834
|
+
Specifies that this step should execute on Kubernetes.
|
835
|
+
|
836
|
+
|
837
|
+
Parameters
|
838
|
+
----------
|
839
|
+
cpu : int, default 1
|
840
|
+
Number of CPUs required for this step. If `@resources` is
|
841
|
+
also present, the maximum value from all decorators is used.
|
842
|
+
memory : int, default 4096
|
843
|
+
Memory size (in MB) required for this step. If
|
844
|
+
`@resources` is also present, the maximum value from all decorators is
|
845
|
+
used.
|
846
|
+
disk : int, default 10240
|
847
|
+
Disk size (in MB) required for this step. If
|
848
|
+
`@resources` is also present, the maximum value from all decorators is
|
849
|
+
used.
|
850
|
+
image : str, optional, default None
|
851
|
+
Docker image to use when launching on Kubernetes. If not specified, and
|
852
|
+
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
853
|
+
not, a default Docker image mapping to the current version of Python is used.
|
854
|
+
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
855
|
+
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
856
|
+
image_pull_secrets: List[str], default []
|
857
|
+
The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
|
858
|
+
Kubernetes image pull secrets to use when pulling container images
|
859
|
+
in Kubernetes.
|
860
|
+
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
861
|
+
Kubernetes service account to use when launching pod in Kubernetes.
|
862
|
+
secrets : List[str], optional, default None
|
863
|
+
Kubernetes secrets to use when launching pod in Kubernetes. These
|
864
|
+
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
865
|
+
in Metaflow configuration.
|
866
|
+
node_selector: Union[Dict[str,str], str], optional, default None
|
867
|
+
Kubernetes node selector(s) to apply to the pod running the task.
|
868
|
+
Can be passed in as a comma separated string of values e.g.
|
869
|
+
'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
|
870
|
+
{'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
|
871
|
+
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
872
|
+
Kubernetes namespace to use when launching pod in Kubernetes.
|
873
|
+
gpu : int, optional, default None
|
874
|
+
Number of GPUs required for this step. A value of zero implies that
|
875
|
+
the scheduled node should not have GPUs.
|
876
|
+
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
877
|
+
The vendor of the GPUs to be used for this step.
|
878
|
+
tolerations : List[str], default []
|
879
|
+
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
880
|
+
Kubernetes tolerations to use when launching pod in Kubernetes.
|
881
|
+
labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
|
882
|
+
Kubernetes labels to use when launching pod in Kubernetes.
|
883
|
+
annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
|
884
|
+
Kubernetes annotations to use when launching pod in Kubernetes.
|
885
|
+
use_tmpfs : bool, default False
|
886
|
+
This enables an explicit tmpfs mount for this step.
|
887
|
+
tmpfs_tempdir : bool, default True
|
888
|
+
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
889
|
+
tmpfs_size : int, optional, default: None
|
890
|
+
The value for the size (in MiB) of the tmpfs mount for this step.
|
891
|
+
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
892
|
+
memory allocated for this step.
|
893
|
+
tmpfs_path : str, optional, default /metaflow_temp
|
894
|
+
Path to tmpfs mount for this step.
|
895
|
+
persistent_volume_claims : Dict[str, str], optional, default None
|
896
|
+
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
897
|
+
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
898
|
+
shared_memory: int, optional
|
899
|
+
Shared memory size (in MiB) required for this step
|
900
|
+
port: int, optional
|
901
|
+
Port number to specify in the Kubernetes job object
|
902
|
+
compute_pool : str, optional, default None
|
903
|
+
Compute pool to be used for for this step.
|
904
|
+
If not specified, any accessible compute pool within the perimeter is used.
|
905
|
+
hostname_resolution_timeout: int, default 10 * 60
|
906
|
+
Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
|
907
|
+
Only applicable when @parallel is used.
|
908
|
+
qos: str, default: Burstable
|
909
|
+
Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
|
910
|
+
|
911
|
+
security_context: Dict[str, Any], optional, default None
|
912
|
+
Container security context. Applies to the task container. Allows the following keys:
|
913
|
+
- privileged: bool, optional, default None
|
914
|
+
- allow_privilege_escalation: bool, optional, default None
|
915
|
+
- run_as_user: int, optional, default None
|
916
|
+
- run_as_group: int, optional, default None
|
917
|
+
- run_as_non_root: bool, optional, default None
|
918
|
+
"""
|
919
|
+
...
|
920
|
+
|
776
921
|
@typing.overload
|
777
922
|
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
778
923
|
"""
|
@@ -832,348 +977,238 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
832
977
|
"""
|
833
978
|
...
|
834
979
|
|
835
|
-
def
|
980
|
+
def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
|
836
981
|
"""
|
837
|
-
|
838
|
-
|
982
|
+
Allows setting external datastores to save data for the
|
983
|
+
`@checkpoint`/`@model`/`@huggingface_hub` decorators.
|
839
984
|
|
840
|
-
|
841
|
-
|
842
|
-
gpu : int
|
843
|
-
Number of GPUs to use.
|
844
|
-
gpu_type : str
|
845
|
-
Type of Nvidia GPU to use.
|
846
|
-
queue_timeout : int
|
847
|
-
Time to keep the job in NVCF's queue.
|
848
|
-
"""
|
849
|
-
...
|
850
|
-
|
851
|
-
@typing.overload
|
852
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
853
|
-
"""
|
854
|
-
Decorator prototype for all step decorators. This function gets specialized
|
855
|
-
and imported for all decorators types by _import_plugin_decorators().
|
856
|
-
"""
|
857
|
-
...
|
858
|
-
|
859
|
-
@typing.overload
|
860
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
861
|
-
...
|
862
|
-
|
863
|
-
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
864
|
-
"""
|
865
|
-
Decorator prototype for all step decorators. This function gets specialized
|
866
|
-
and imported for all decorators types by _import_plugin_decorators().
|
867
|
-
"""
|
868
|
-
...
|
869
|
-
|
870
|
-
@typing.overload
|
871
|
-
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
872
|
-
"""
|
873
|
-
Specifies the number of times the task corresponding
|
874
|
-
to a step needs to be retried.
|
985
|
+
This decorator is useful when users wish to save data to a different datastore
|
986
|
+
than what is configured in Metaflow. This can be for variety of reasons:
|
875
987
|
|
876
|
-
|
877
|
-
|
878
|
-
|
988
|
+
1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
|
989
|
+
2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
|
990
|
+
- Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
|
991
|
+
3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
|
992
|
+
- Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
|
879
993
|
|
880
|
-
|
881
|
-
|
882
|
-
ensuring that the flow execution can continue.
|
994
|
+
Usage:
|
995
|
+
----------
|
883
996
|
|
997
|
+
- Using a custom IAM role to access the datastore.
|
884
998
|
|
885
|
-
|
886
|
-
|
887
|
-
|
888
|
-
|
889
|
-
|
890
|
-
|
891
|
-
|
892
|
-
|
893
|
-
|
894
|
-
@typing.overload
|
895
|
-
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
896
|
-
...
|
897
|
-
|
898
|
-
@typing.overload
|
899
|
-
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
900
|
-
...
|
901
|
-
|
902
|
-
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
903
|
-
"""
|
904
|
-
Specifies the number of times the task corresponding
|
905
|
-
to a step needs to be retried.
|
999
|
+
```python
|
1000
|
+
@with_artifact_store(
|
1001
|
+
type="s3",
|
1002
|
+
config=lambda: {
|
1003
|
+
"root": "s3://my-bucket-foo/path/to/root",
|
1004
|
+
"role_arn": ROLE,
|
1005
|
+
},
|
1006
|
+
)
|
1007
|
+
class MyFlow(FlowSpec):
|
906
1008
|
|
907
|
-
|
908
|
-
|
909
|
-
|
1009
|
+
@checkpoint
|
1010
|
+
@step
|
1011
|
+
def start(self):
|
1012
|
+
with open("my_file.txt", "w") as f:
|
1013
|
+
f.write("Hello, World!")
|
1014
|
+
self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
|
1015
|
+
self.next(self.end)
|
910
1016
|
|
911
|
-
|
912
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
913
|
-
ensuring that the flow execution can continue.
|
1017
|
+
```
|
914
1018
|
|
1019
|
+
- Using credentials to access the s3-compatible datastore.
|
915
1020
|
|
916
|
-
|
917
|
-
|
918
|
-
|
919
|
-
|
920
|
-
|
921
|
-
|
922
|
-
|
1021
|
+
```python
|
1022
|
+
@with_artifact_store(
|
1023
|
+
type="s3",
|
1024
|
+
config=lambda: {
|
1025
|
+
"root": "s3://my-bucket-foo/path/to/root",
|
1026
|
+
"client_params": {
|
1027
|
+
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
1028
|
+
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
1029
|
+
},
|
1030
|
+
},
|
1031
|
+
)
|
1032
|
+
class MyFlow(FlowSpec):
|
1033
|
+
|
1034
|
+
@checkpoint
|
1035
|
+
@step
|
1036
|
+
def start(self):
|
1037
|
+
with open("my_file.txt", "w") as f:
|
1038
|
+
f.write("Hello, World!")
|
1039
|
+
self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
|
1040
|
+
self.next(self.end)
|
1041
|
+
|
1042
|
+
```
|
1043
|
+
|
1044
|
+
- Accessing objects stored in external datastores after task execution.
|
1045
|
+
|
1046
|
+
```python
|
1047
|
+
run = Run("CheckpointsTestsFlow/8992")
|
1048
|
+
with artifact_store_from(run=run, config={
|
1049
|
+
"client_params": {
|
1050
|
+
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
1051
|
+
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
1052
|
+
},
|
1053
|
+
}):
|
1054
|
+
with Checkpoint() as cp:
|
1055
|
+
latest = cp.list(
|
1056
|
+
task=run["start"].task
|
1057
|
+
)[0]
|
1058
|
+
print(latest)
|
1059
|
+
cp.load(
|
1060
|
+
latest,
|
1061
|
+
"test-checkpoints"
|
1062
|
+
)
|
1063
|
+
|
1064
|
+
task = Task("TorchTuneFlow/8484/train/53673")
|
1065
|
+
with artifact_store_from(run=run, config={
|
1066
|
+
"client_params": {
|
1067
|
+
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
1068
|
+
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
1069
|
+
},
|
1070
|
+
}):
|
1071
|
+
load_model(
|
1072
|
+
task.data.model_ref,
|
1073
|
+
"test-models"
|
1074
|
+
)
|
1075
|
+
```
|
1076
|
+
Parameters:
|
1077
|
+
----------
|
1078
|
+
|
1079
|
+
type: str
|
1080
|
+
The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
|
1081
|
+
|
1082
|
+
config: dict or Callable
|
1083
|
+
Dictionary of configuration options for the datastore. The following keys are required:
|
1084
|
+
- root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
|
1085
|
+
- example: 's3://bucket-name/path/to/root'
|
1086
|
+
- example: 'gs://bucket-name/path/to/root'
|
1087
|
+
- example: 'https://myblockacc.blob.core.windows.net/metaflow/'
|
1088
|
+
- role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
|
1089
|
+
- session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
|
1090
|
+
- client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
|
1091
|
+
"""
|
923
1092
|
...
|
924
1093
|
|
925
1094
|
@typing.overload
|
926
|
-
def
|
1095
|
+
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
927
1096
|
"""
|
928
|
-
Specifies
|
929
|
-
|
930
|
-
The decorator will create an optional artifact, specified by `var`, which
|
931
|
-
contains the exception raised. You can use it to detect the presence
|
932
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
933
|
-
are missing.
|
1097
|
+
Specifies the PyPI packages for all steps of the flow.
|
934
1098
|
|
1099
|
+
Use `@pypi_base` to set common packages required by all
|
1100
|
+
steps and use `@pypi` to specify step-specific overrides.
|
935
1101
|
|
936
1102
|
Parameters
|
937
1103
|
----------
|
938
|
-
|
939
|
-
|
940
|
-
|
941
|
-
|
942
|
-
|
943
|
-
|
1104
|
+
packages : Dict[str, str], default: {}
|
1105
|
+
Packages to use for this flow. The key is the name of the package
|
1106
|
+
and the value is the version to use.
|
1107
|
+
python : str, optional, default: None
|
1108
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1109
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
944
1110
|
"""
|
945
1111
|
...
|
946
1112
|
|
947
1113
|
@typing.overload
|
948
|
-
def
|
1114
|
+
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
949
1115
|
...
|
950
1116
|
|
951
|
-
|
952
|
-
|
1117
|
+
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
1118
|
+
"""
|
1119
|
+
Specifies the PyPI packages for all steps of the flow.
|
1120
|
+
|
1121
|
+
Use `@pypi_base` to set common packages required by all
|
1122
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1123
|
+
|
1124
|
+
Parameters
|
1125
|
+
----------
|
1126
|
+
packages : Dict[str, str], default: {}
|
1127
|
+
Packages to use for this flow. The key is the name of the package
|
1128
|
+
and the value is the version to use.
|
1129
|
+
python : str, optional, default: None
|
1130
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1131
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1132
|
+
"""
|
953
1133
|
...
|
954
1134
|
|
955
|
-
def
|
1135
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
956
1136
|
"""
|
957
|
-
|
958
|
-
|
959
|
-
|
960
|
-
|
961
|
-
|
962
|
-
are missing.
|
1137
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
1138
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
1139
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
1140
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
1141
|
+
starts only after all sensors finish.
|
963
1142
|
|
964
1143
|
|
965
1144
|
Parameters
|
966
1145
|
----------
|
967
|
-
|
968
|
-
|
969
|
-
|
970
|
-
|
971
|
-
|
972
|
-
|
1146
|
+
timeout : int
|
1147
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
1148
|
+
poke_interval : int
|
1149
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
1150
|
+
mode : str
|
1151
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1152
|
+
exponential_backoff : bool
|
1153
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1154
|
+
pool : str
|
1155
|
+
the slot pool this task should run in,
|
1156
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1157
|
+
soft_fail : bool
|
1158
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1159
|
+
name : str
|
1160
|
+
Name of the sensor on Airflow
|
1161
|
+
description : str
|
1162
|
+
Description of sensor in the Airflow UI
|
1163
|
+
bucket_key : Union[str, List[str]]
|
1164
|
+
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
1165
|
+
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
1166
|
+
bucket_name : str
|
1167
|
+
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
1168
|
+
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
1169
|
+
wildcard_match : bool
|
1170
|
+
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
1171
|
+
aws_conn_id : str
|
1172
|
+
a reference to the s3 connection on Airflow. (Default: None)
|
1173
|
+
verify : bool
|
1174
|
+
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
973
1175
|
"""
|
974
1176
|
...
|
975
1177
|
|
976
1178
|
@typing.overload
|
977
|
-
def
|
1179
|
+
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
978
1180
|
"""
|
979
|
-
Specifies the
|
980
|
-
|
981
|
-
```
|
982
|
-
@trigger_on_finish(flow='FooFlow')
|
983
|
-
```
|
984
|
-
or
|
985
|
-
```
|
986
|
-
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
987
|
-
```
|
988
|
-
This decorator respects the @project decorator and triggers the flow
|
989
|
-
when upstream runs within the same namespace complete successfully
|
990
|
-
|
991
|
-
Additionally, you can specify project aware upstream flow dependencies
|
992
|
-
by specifying the fully qualified project_flow_name.
|
993
|
-
```
|
994
|
-
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
995
|
-
```
|
996
|
-
or
|
997
|
-
```
|
998
|
-
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
999
|
-
```
|
1000
|
-
|
1001
|
-
You can also specify just the project or project branch (other values will be
|
1002
|
-
inferred from the current project or project branch):
|
1003
|
-
```
|
1004
|
-
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
1005
|
-
```
|
1181
|
+
Specifies the Conda environment for all steps of the flow.
|
1006
1182
|
|
1007
|
-
|
1008
|
-
|
1009
|
-
- `user.bob`
|
1010
|
-
- `test.my_experiment`
|
1011
|
-
- `prod.staging`
|
1183
|
+
Use `@conda_base` to set common libraries required by all
|
1184
|
+
steps and use `@conda` to specify step-specific additions.
|
1012
1185
|
|
1013
1186
|
|
1014
1187
|
Parameters
|
1015
1188
|
----------
|
1016
|
-
|
1017
|
-
|
1018
|
-
|
1019
|
-
|
1020
|
-
|
1021
|
-
|
1189
|
+
packages : Dict[str, str], default {}
|
1190
|
+
Packages to use for this flow. The key is the name of the package
|
1191
|
+
and the value is the version to use.
|
1192
|
+
libraries : Dict[str, str], default {}
|
1193
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1194
|
+
python : str, optional, default None
|
1195
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1196
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1197
|
+
disabled : bool, default False
|
1198
|
+
If set to True, disables Conda.
|
1022
1199
|
"""
|
1023
1200
|
...
|
1024
1201
|
|
1025
1202
|
@typing.overload
|
1026
|
-
def
|
1203
|
+
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1027
1204
|
...
|
1028
1205
|
|
1029
|
-
def
|
1206
|
+
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1030
1207
|
"""
|
1031
|
-
Specifies the
|
1032
|
-
|
1033
|
-
```
|
1034
|
-
@trigger_on_finish(flow='FooFlow')
|
1035
|
-
```
|
1036
|
-
or
|
1037
|
-
```
|
1038
|
-
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
1039
|
-
```
|
1040
|
-
This decorator respects the @project decorator and triggers the flow
|
1041
|
-
when upstream runs within the same namespace complete successfully
|
1042
|
-
|
1043
|
-
Additionally, you can specify project aware upstream flow dependencies
|
1044
|
-
by specifying the fully qualified project_flow_name.
|
1045
|
-
```
|
1046
|
-
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
1047
|
-
```
|
1048
|
-
or
|
1049
|
-
```
|
1050
|
-
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
1051
|
-
```
|
1052
|
-
|
1053
|
-
You can also specify just the project or project branch (other values will be
|
1054
|
-
inferred from the current project or project branch):
|
1055
|
-
```
|
1056
|
-
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
1057
|
-
```
|
1208
|
+
Specifies the Conda environment for all steps of the flow.
|
1058
1209
|
|
1059
|
-
|
1060
|
-
|
1061
|
-
- `user.bob`
|
1062
|
-
- `test.my_experiment`
|
1063
|
-
- `prod.staging`
|
1064
|
-
|
1065
|
-
|
1066
|
-
Parameters
|
1067
|
-
----------
|
1068
|
-
flow : Union[str, Dict[str, str]], optional, default None
|
1069
|
-
Upstream flow dependency for this flow.
|
1070
|
-
flows : List[Union[str, Dict[str, str]]], default []
|
1071
|
-
Upstream flow dependencies for this flow.
|
1072
|
-
options : Dict[str, Any], default {}
|
1073
|
-
Backend-specific configuration for tuning eventing behavior.
|
1074
|
-
"""
|
1075
|
-
...
|
1076
|
-
|
1077
|
-
@typing.overload
|
1078
|
-
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1079
|
-
"""
|
1080
|
-
Specifies the event(s) that this flow depends on.
|
1081
|
-
|
1082
|
-
```
|
1083
|
-
@trigger(event='foo')
|
1084
|
-
```
|
1085
|
-
or
|
1086
|
-
```
|
1087
|
-
@trigger(events=['foo', 'bar'])
|
1088
|
-
```
|
1089
|
-
|
1090
|
-
Additionally, you can specify the parameter mappings
|
1091
|
-
to map event payload to Metaflow parameters for the flow.
|
1092
|
-
```
|
1093
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1094
|
-
```
|
1095
|
-
or
|
1096
|
-
```
|
1097
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1098
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1099
|
-
```
|
1100
|
-
|
1101
|
-
'parameters' can also be a list of strings and tuples like so:
|
1102
|
-
```
|
1103
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1104
|
-
```
|
1105
|
-
This is equivalent to:
|
1106
|
-
```
|
1107
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1108
|
-
```
|
1109
|
-
|
1110
|
-
|
1111
|
-
Parameters
|
1112
|
-
----------
|
1113
|
-
event : Union[str, Dict[str, Any]], optional, default None
|
1114
|
-
Event dependency for this flow.
|
1115
|
-
events : List[Union[str, Dict[str, Any]]], default []
|
1116
|
-
Events dependency for this flow.
|
1117
|
-
options : Dict[str, Any], default {}
|
1118
|
-
Backend-specific configuration for tuning eventing behavior.
|
1119
|
-
"""
|
1120
|
-
...
|
1121
|
-
|
1122
|
-
@typing.overload
|
1123
|
-
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1124
|
-
...
|
1125
|
-
|
1126
|
-
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
1127
|
-
"""
|
1128
|
-
Specifies the event(s) that this flow depends on.
|
1129
|
-
|
1130
|
-
```
|
1131
|
-
@trigger(event='foo')
|
1132
|
-
```
|
1133
|
-
or
|
1134
|
-
```
|
1135
|
-
@trigger(events=['foo', 'bar'])
|
1136
|
-
```
|
1137
|
-
|
1138
|
-
Additionally, you can specify the parameter mappings
|
1139
|
-
to map event payload to Metaflow parameters for the flow.
|
1140
|
-
```
|
1141
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1142
|
-
```
|
1143
|
-
or
|
1144
|
-
```
|
1145
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1146
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1147
|
-
```
|
1148
|
-
|
1149
|
-
'parameters' can also be a list of strings and tuples like so:
|
1150
|
-
```
|
1151
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1152
|
-
```
|
1153
|
-
This is equivalent to:
|
1154
|
-
```
|
1155
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1156
|
-
```
|
1157
|
-
|
1158
|
-
|
1159
|
-
Parameters
|
1160
|
-
----------
|
1161
|
-
event : Union[str, Dict[str, Any]], optional, default None
|
1162
|
-
Event dependency for this flow.
|
1163
|
-
events : List[Union[str, Dict[str, Any]]], default []
|
1164
|
-
Events dependency for this flow.
|
1165
|
-
options : Dict[str, Any], default {}
|
1166
|
-
Backend-specific configuration for tuning eventing behavior.
|
1167
|
-
"""
|
1168
|
-
...
|
1169
|
-
|
1170
|
-
@typing.overload
|
1171
|
-
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1172
|
-
"""
|
1173
|
-
Specifies the Conda environment for all steps of the flow.
|
1174
|
-
|
1175
|
-
Use `@conda_base` to set common libraries required by all
|
1176
|
-
steps and use `@conda` to specify step-specific additions.
|
1210
|
+
Use `@conda_base` to set common libraries required by all
|
1211
|
+
steps and use `@conda` to specify step-specific additions.
|
1177
1212
|
|
1178
1213
|
|
1179
1214
|
Parameters
|
@@ -1191,30 +1226,46 @@ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[s
|
|
1191
1226
|
"""
|
1192
1227
|
...
|
1193
1228
|
|
1194
|
-
|
1195
|
-
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1196
|
-
...
|
1197
|
-
|
1198
|
-
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1229
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1199
1230
|
"""
|
1200
|
-
|
1201
|
-
|
1202
|
-
Use `@conda_base` to set common libraries required by all
|
1203
|
-
steps and use `@conda` to specify step-specific additions.
|
1231
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
1232
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
1204
1233
|
|
1205
1234
|
|
1206
1235
|
Parameters
|
1207
1236
|
----------
|
1208
|
-
|
1209
|
-
|
1210
|
-
|
1211
|
-
|
1212
|
-
|
1213
|
-
|
1214
|
-
|
1215
|
-
|
1216
|
-
|
1217
|
-
|
1237
|
+
timeout : int
|
1238
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
1239
|
+
poke_interval : int
|
1240
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
1241
|
+
mode : str
|
1242
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1243
|
+
exponential_backoff : bool
|
1244
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1245
|
+
pool : str
|
1246
|
+
the slot pool this task should run in,
|
1247
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1248
|
+
soft_fail : bool
|
1249
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1250
|
+
name : str
|
1251
|
+
Name of the sensor on Airflow
|
1252
|
+
description : str
|
1253
|
+
Description of sensor in the Airflow UI
|
1254
|
+
external_dag_id : str
|
1255
|
+
The dag_id that contains the task you want to wait for.
|
1256
|
+
external_task_ids : List[str]
|
1257
|
+
The list of task_ids that you want to wait for.
|
1258
|
+
If None (default value) the sensor waits for the DAG. (Default: None)
|
1259
|
+
allowed_states : List[str]
|
1260
|
+
Iterable of allowed states, (Default: ['success'])
|
1261
|
+
failed_states : List[str]
|
1262
|
+
Iterable of failed or dis-allowed states. (Default: None)
|
1263
|
+
execution_delta : datetime.timedelta
|
1264
|
+
time difference with the previous execution to look at,
|
1265
|
+
the default is the same logical date as the current task or DAG. (Default: None)
|
1266
|
+
check_existence: bool
|
1267
|
+
Set to True to check if the external task exists or check if
|
1268
|
+
the DAG to wait for exists. (Default: True)
|
1218
1269
|
"""
|
1219
1270
|
...
|
1220
1271
|
|
@@ -1269,90 +1320,6 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
|
|
1269
1320
|
"""
|
1270
1321
|
...
|
1271
1322
|
|
1272
|
-
@typing.overload
|
1273
|
-
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1274
|
-
"""
|
1275
|
-
Specifies the PyPI packages for all steps of the flow.
|
1276
|
-
|
1277
|
-
Use `@pypi_base` to set common packages required by all
|
1278
|
-
steps and use `@pypi` to specify step-specific overrides.
|
1279
|
-
|
1280
|
-
Parameters
|
1281
|
-
----------
|
1282
|
-
packages : Dict[str, str], default: {}
|
1283
|
-
Packages to use for this flow. The key is the name of the package
|
1284
|
-
and the value is the version to use.
|
1285
|
-
python : str, optional, default: None
|
1286
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1287
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1288
|
-
"""
|
1289
|
-
...
|
1290
|
-
|
1291
|
-
@typing.overload
|
1292
|
-
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1293
|
-
...
|
1294
|
-
|
1295
|
-
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
1296
|
-
"""
|
1297
|
-
Specifies the PyPI packages for all steps of the flow.
|
1298
|
-
|
1299
|
-
Use `@pypi_base` to set common packages required by all
|
1300
|
-
steps and use `@pypi` to specify step-specific overrides.
|
1301
|
-
|
1302
|
-
Parameters
|
1303
|
-
----------
|
1304
|
-
packages : Dict[str, str], default: {}
|
1305
|
-
Packages to use for this flow. The key is the name of the package
|
1306
|
-
and the value is the version to use.
|
1307
|
-
python : str, optional, default: None
|
1308
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1309
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1310
|
-
"""
|
1311
|
-
...
|
1312
|
-
|
1313
|
-
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1314
|
-
"""
|
1315
|
-
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
1316
|
-
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
1317
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
1318
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
1319
|
-
starts only after all sensors finish.
|
1320
|
-
|
1321
|
-
|
1322
|
-
Parameters
|
1323
|
-
----------
|
1324
|
-
timeout : int
|
1325
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
1326
|
-
poke_interval : int
|
1327
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
1328
|
-
mode : str
|
1329
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1330
|
-
exponential_backoff : bool
|
1331
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1332
|
-
pool : str
|
1333
|
-
the slot pool this task should run in,
|
1334
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1335
|
-
soft_fail : bool
|
1336
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1337
|
-
name : str
|
1338
|
-
Name of the sensor on Airflow
|
1339
|
-
description : str
|
1340
|
-
Description of sensor in the Airflow UI
|
1341
|
-
bucket_key : Union[str, List[str]]
|
1342
|
-
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
1343
|
-
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
1344
|
-
bucket_name : str
|
1345
|
-
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
1346
|
-
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
1347
|
-
wildcard_match : bool
|
1348
|
-
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
1349
|
-
aws_conn_id : str
|
1350
|
-
a reference to the s3 connection on Airflow. (Default: None)
|
1351
|
-
verify : bool
|
1352
|
-
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
1353
|
-
"""
|
1354
|
-
...
|
1355
|
-
|
1356
1323
|
def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1357
1324
|
"""
|
1358
1325
|
Specifies what flows belong to the same project.
|
@@ -1388,160 +1355,197 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
|
|
1388
1355
|
"""
|
1389
1356
|
...
|
1390
1357
|
|
1391
|
-
|
1358
|
+
@typing.overload
|
1359
|
+
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1392
1360
|
"""
|
1393
|
-
|
1394
|
-
|
1395
|
-
|
1361
|
+
Specifies the flow(s) that this flow depends on.
|
1362
|
+
|
1363
|
+
```
|
1364
|
+
@trigger_on_finish(flow='FooFlow')
|
1365
|
+
```
|
1366
|
+
or
|
1367
|
+
```
|
1368
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
1369
|
+
```
|
1370
|
+
This decorator respects the @project decorator and triggers the flow
|
1371
|
+
when upstream runs within the same namespace complete successfully
|
1372
|
+
|
1373
|
+
Additionally, you can specify project aware upstream flow dependencies
|
1374
|
+
by specifying the fully qualified project_flow_name.
|
1375
|
+
```
|
1376
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
1377
|
+
```
|
1378
|
+
or
|
1379
|
+
```
|
1380
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
1381
|
+
```
|
1382
|
+
|
1383
|
+
You can also specify just the project or project branch (other values will be
|
1384
|
+
inferred from the current project or project branch):
|
1385
|
+
```
|
1386
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
1387
|
+
```
|
1388
|
+
|
1389
|
+
Note that `branch` is typically one of:
|
1390
|
+
- `prod`
|
1391
|
+
- `user.bob`
|
1392
|
+
- `test.my_experiment`
|
1393
|
+
- `prod.staging`
|
1394
|
+
|
1396
1395
|
|
1397
1396
|
Parameters
|
1398
1397
|
----------
|
1399
|
-
|
1400
|
-
|
1401
|
-
|
1402
|
-
|
1403
|
-
|
1404
|
-
|
1405
|
-
exponential_backoff : bool
|
1406
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1407
|
-
pool : str
|
1408
|
-
the slot pool this task should run in,
|
1409
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1410
|
-
soft_fail : bool
|
1411
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1412
|
-
name : str
|
1413
|
-
Name of the sensor on Airflow
|
1414
|
-
description : str
|
1415
|
-
Description of sensor in the Airflow UI
|
1416
|
-
external_dag_id : str
|
1417
|
-
The dag_id that contains the task you want to wait for.
|
1418
|
-
external_task_ids : List[str]
|
1419
|
-
The list of task_ids that you want to wait for.
|
1420
|
-
If None (default value) the sensor waits for the DAG. (Default: None)
|
1421
|
-
allowed_states : List[str]
|
1422
|
-
Iterable of allowed states, (Default: ['success'])
|
1423
|
-
failed_states : List[str]
|
1424
|
-
Iterable of failed or dis-allowed states. (Default: None)
|
1425
|
-
execution_delta : datetime.timedelta
|
1426
|
-
time difference with the previous execution to look at,
|
1427
|
-
the default is the same logical date as the current task or DAG. (Default: None)
|
1428
|
-
check_existence: bool
|
1429
|
-
Set to True to check if the external task exists or check if
|
1430
|
-
the DAG to wait for exists. (Default: True)
|
1398
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
1399
|
+
Upstream flow dependency for this flow.
|
1400
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
1401
|
+
Upstream flow dependencies for this flow.
|
1402
|
+
options : Dict[str, Any], default {}
|
1403
|
+
Backend-specific configuration for tuning eventing behavior.
|
1431
1404
|
"""
|
1432
1405
|
...
|
1433
1406
|
|
1434
|
-
|
1407
|
+
@typing.overload
|
1408
|
+
def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1409
|
+
...
|
1410
|
+
|
1411
|
+
def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
1435
1412
|
"""
|
1436
|
-
|
1437
|
-
`@checkpoint`/`@model`/`@huggingface_hub` decorators.
|
1413
|
+
Specifies the flow(s) that this flow depends on.
|
1438
1414
|
|
1439
|
-
|
1440
|
-
|
1415
|
+
```
|
1416
|
+
@trigger_on_finish(flow='FooFlow')
|
1417
|
+
```
|
1418
|
+
or
|
1419
|
+
```
|
1420
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
1421
|
+
```
|
1422
|
+
This decorator respects the @project decorator and triggers the flow
|
1423
|
+
when upstream runs within the same namespace complete successfully
|
1441
1424
|
|
1442
|
-
|
1443
|
-
|
1444
|
-
|
1445
|
-
|
1446
|
-
|
1425
|
+
Additionally, you can specify project aware upstream flow dependencies
|
1426
|
+
by specifying the fully qualified project_flow_name.
|
1427
|
+
```
|
1428
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
1429
|
+
```
|
1430
|
+
or
|
1431
|
+
```
|
1432
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
1433
|
+
```
|
1447
1434
|
|
1448
|
-
|
1449
|
-
|
1435
|
+
You can also specify just the project or project branch (other values will be
|
1436
|
+
inferred from the current project or project branch):
|
1437
|
+
```
|
1438
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
1439
|
+
```
|
1450
1440
|
|
1451
|
-
|
1441
|
+
Note that `branch` is typically one of:
|
1442
|
+
- `prod`
|
1443
|
+
- `user.bob`
|
1444
|
+
- `test.my_experiment`
|
1445
|
+
- `prod.staging`
|
1452
1446
|
|
1453
|
-
```python
|
1454
|
-
@with_artifact_store(
|
1455
|
-
type="s3",
|
1456
|
-
config=lambda: {
|
1457
|
-
"root": "s3://my-bucket-foo/path/to/root",
|
1458
|
-
"role_arn": ROLE,
|
1459
|
-
},
|
1460
|
-
)
|
1461
|
-
class MyFlow(FlowSpec):
|
1462
1447
|
|
1463
|
-
|
1464
|
-
|
1465
|
-
|
1466
|
-
|
1467
|
-
|
1468
|
-
|
1469
|
-
|
1448
|
+
Parameters
|
1449
|
+
----------
|
1450
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
1451
|
+
Upstream flow dependency for this flow.
|
1452
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
1453
|
+
Upstream flow dependencies for this flow.
|
1454
|
+
options : Dict[str, Any], default {}
|
1455
|
+
Backend-specific configuration for tuning eventing behavior.
|
1456
|
+
"""
|
1457
|
+
...
|
1458
|
+
|
1459
|
+
@typing.overload
|
1460
|
+
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1461
|
+
"""
|
1462
|
+
Specifies the event(s) that this flow depends on.
|
1470
1463
|
|
1471
|
-
|
1464
|
+
```
|
1465
|
+
@trigger(event='foo')
|
1466
|
+
```
|
1467
|
+
or
|
1468
|
+
```
|
1469
|
+
@trigger(events=['foo', 'bar'])
|
1470
|
+
```
|
1472
1471
|
|
1473
|
-
|
1472
|
+
Additionally, you can specify the parameter mappings
|
1473
|
+
to map event payload to Metaflow parameters for the flow.
|
1474
|
+
```
|
1475
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1476
|
+
```
|
1477
|
+
or
|
1478
|
+
```
|
1479
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1480
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1481
|
+
```
|
1474
1482
|
|
1475
|
-
|
1476
|
-
|
1477
|
-
|
1478
|
-
|
1479
|
-
|
1480
|
-
|
1481
|
-
|
1482
|
-
|
1483
|
-
},
|
1484
|
-
},
|
1485
|
-
)
|
1486
|
-
class MyFlow(FlowSpec):
|
1483
|
+
'parameters' can also be a list of strings and tuples like so:
|
1484
|
+
```
|
1485
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1486
|
+
```
|
1487
|
+
This is equivalent to:
|
1488
|
+
```
|
1489
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1490
|
+
```
|
1487
1491
|
|
1488
|
-
@checkpoint
|
1489
|
-
@step
|
1490
|
-
def start(self):
|
1491
|
-
with open("my_file.txt", "w") as f:
|
1492
|
-
f.write("Hello, World!")
|
1493
|
-
self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
|
1494
|
-
self.next(self.end)
|
1495
1492
|
|
1496
|
-
|
1493
|
+
Parameters
|
1494
|
+
----------
|
1495
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1496
|
+
Event dependency for this flow.
|
1497
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1498
|
+
Events dependency for this flow.
|
1499
|
+
options : Dict[str, Any], default {}
|
1500
|
+
Backend-specific configuration for tuning eventing behavior.
|
1501
|
+
"""
|
1502
|
+
...
|
1503
|
+
|
1504
|
+
@typing.overload
|
1505
|
+
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1506
|
+
...
|
1507
|
+
|
1508
|
+
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
1509
|
+
"""
|
1510
|
+
Specifies the event(s) that this flow depends on.
|
1497
1511
|
|
1498
|
-
|
1512
|
+
```
|
1513
|
+
@trigger(event='foo')
|
1514
|
+
```
|
1515
|
+
or
|
1516
|
+
```
|
1517
|
+
@trigger(events=['foo', 'bar'])
|
1518
|
+
```
|
1499
1519
|
|
1500
|
-
|
1501
|
-
|
1502
|
-
|
1503
|
-
|
1504
|
-
|
1505
|
-
|
1506
|
-
|
1507
|
-
|
1508
|
-
|
1509
|
-
|
1510
|
-
task=run["start"].task
|
1511
|
-
)[0]
|
1512
|
-
print(latest)
|
1513
|
-
cp.load(
|
1514
|
-
latest,
|
1515
|
-
"test-checkpoints"
|
1516
|
-
)
|
1520
|
+
Additionally, you can specify the parameter mappings
|
1521
|
+
to map event payload to Metaflow parameters for the flow.
|
1522
|
+
```
|
1523
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1524
|
+
```
|
1525
|
+
or
|
1526
|
+
```
|
1527
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1528
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1529
|
+
```
|
1517
1530
|
|
1518
|
-
|
1519
|
-
|
1520
|
-
|
1521
|
-
|
1522
|
-
|
1523
|
-
|
1524
|
-
|
1525
|
-
|
1526
|
-
task.data.model_ref,
|
1527
|
-
"test-models"
|
1528
|
-
)
|
1529
|
-
```
|
1530
|
-
Parameters:
|
1531
|
-
----------
|
1531
|
+
'parameters' can also be a list of strings and tuples like so:
|
1532
|
+
```
|
1533
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1534
|
+
```
|
1535
|
+
This is equivalent to:
|
1536
|
+
```
|
1537
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1538
|
+
```
|
1532
1539
|
|
1533
|
-
type: str
|
1534
|
-
The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
|
1535
1540
|
|
1536
|
-
|
1537
|
-
|
1538
|
-
|
1539
|
-
|
1540
|
-
|
1541
|
-
|
1542
|
-
|
1543
|
-
-
|
1544
|
-
- client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
|
1541
|
+
Parameters
|
1542
|
+
----------
|
1543
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1544
|
+
Event dependency for this flow.
|
1545
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1546
|
+
Events dependency for this flow.
|
1547
|
+
options : Dict[str, Any], default {}
|
1548
|
+
Backend-specific configuration for tuning eventing behavior.
|
1545
1549
|
"""
|
1546
1550
|
...
|
1547
1551
|
|