ob-metaflow-stubs 6.0.3.178__py2.py3-none-any.whl → 6.0.3.179rc0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +653 -652
- metaflow-stubs/cards.pyi +1 -1
- metaflow-stubs/cli.pyi +1 -1
- metaflow-stubs/cli_components/__init__.pyi +1 -1
- metaflow-stubs/cli_components/utils.pyi +1 -1
- metaflow-stubs/client/__init__.pyi +1 -1
- metaflow-stubs/client/core.pyi +5 -5
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/events.pyi +1 -1
- metaflow-stubs/exception.pyi +1 -1
- metaflow-stubs/flowspec.pyi +5 -5
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +3 -3
- metaflow-stubs/info_file.pyi +1 -1
- metaflow-stubs/metadata_provider/__init__.pyi +1 -1
- metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
- metaflow-stubs/metadata_provider/metadata.pyi +2 -2
- metaflow-stubs/metadata_provider/util.pyi +1 -1
- metaflow-stubs/metaflow_config.pyi +1 -1
- metaflow-stubs/metaflow_current.pyi +49 -49
- metaflow-stubs/metaflow_git.pyi +1 -1
- metaflow-stubs/mf_extensions/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +6 -0
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +51 -0
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +65 -0
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +74 -0
- metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
- metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -1
- metaflow-stubs/multicore_utils.pyi +1 -1
- metaflow-stubs/ob_internal.pyi +11 -0
- metaflow-stubs/parameters.pyi +3 -3
- metaflow-stubs/plugins/__init__.pyi +12 -12
- metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
- metaflow-stubs/plugins/airflow/exception.pyi +1 -1
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
- metaflow-stubs/plugins/argo/__init__.pyi +1 -1
- metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
- metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
- metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +1 -1
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/aws/__init__.pyi +1 -1
- metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
- metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
- metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
- metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/azure/__init__.pyi +1 -1
- metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
- metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
- metaflow-stubs/plugins/cards/__init__.pyi +1 -1
- metaflow-stubs/plugins/cards/card_client.pyi +1 -1
- metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
- metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
- metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
- metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
- metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
- metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
- metaflow-stubs/plugins/cards/exception.pyi +1 -1
- metaflow-stubs/plugins/catch_decorator.pyi +2 -2
- metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
- metaflow-stubs/plugins/datatools/local.pyi +1 -1
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
- metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
- metaflow-stubs/plugins/debug_logger.pyi +1 -1
- metaflow-stubs/plugins/debug_monitor.pyi +1 -1
- metaflow-stubs/plugins/environment_decorator.pyi +1 -1
- metaflow-stubs/plugins/events_decorator.pyi +1 -1
- metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
- metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
- metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
- metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
- metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
- metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
- metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
- metaflow-stubs/plugins/ollama/__init__.pyi +1 -1
- metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
- metaflow-stubs/plugins/perimeters.pyi +1 -1
- metaflow-stubs/plugins/project_decorator.pyi +1 -1
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
- metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
- metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
- metaflow-stubs/plugins/pypi/utils.pyi +1 -1
- metaflow-stubs/plugins/resources_decorator.pyi +1 -1
- metaflow-stubs/plugins/retry_decorator.pyi +1 -1
- metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
- metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
- metaflow-stubs/plugins/storage_executor.pyi +1 -1
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
- metaflow-stubs/plugins/uv/__init__.pyi +1 -1
- metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
- metaflow-stubs/profilers/__init__.pyi +1 -1
- metaflow-stubs/pylint_wrapper.pyi +1 -1
- metaflow-stubs/runner/__init__.pyi +1 -1
- metaflow-stubs/runner/deployer.pyi +29 -29
- metaflow-stubs/runner/deployer_impl.pyi +2 -2
- metaflow-stubs/runner/metaflow_runner.pyi +3 -3
- metaflow-stubs/runner/nbdeploy.pyi +1 -1
- metaflow-stubs/runner/nbrun.pyi +1 -1
- metaflow-stubs/runner/subprocess_manager.pyi +1 -1
- metaflow-stubs/runner/utils.pyi +2 -2
- metaflow-stubs/system/__init__.pyi +1 -1
- metaflow-stubs/system/system_logger.pyi +2 -2
- metaflow-stubs/system/system_monitor.pyi +1 -1
- metaflow-stubs/tagging_util.pyi +1 -1
- metaflow-stubs/tuple_util.pyi +1 -1
- metaflow-stubs/user_configs/__init__.pyi +1 -1
- metaflow-stubs/user_configs/config_decorators.pyi +4 -4
- metaflow-stubs/user_configs/config_options.pyi +3 -3
- metaflow-stubs/user_configs/config_parameters.pyi +5 -5
- {ob_metaflow_stubs-6.0.3.178.dist-info → ob_metaflow_stubs-6.0.3.179rc0.dist-info}/METADATA +1 -1
- ob_metaflow_stubs-6.0.3.179rc0.dist-info/RECORD +220 -0
- ob_metaflow_stubs-6.0.3.178.dist-info/RECORD +0 -215
- {ob_metaflow_stubs-6.0.3.178.dist-info → ob_metaflow_stubs-6.0.3.179rc0.dist-info}/WHEEL +0 -0
- {ob_metaflow_stubs-6.0.3.178.dist-info → ob_metaflow_stubs-6.0.3.179rc0.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
3
|
# MF version: 2.15.17.1+obcheckpoint(0.2.1);ob(v1) #
|
4
|
-
# Generated on 2025-06-13T18:
|
4
|
+
# Generated on 2025-06-13T18:47:55.254260 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
9
|
import typing
|
10
10
|
if typing.TYPE_CHECKING:
|
11
|
-
import typing
|
12
11
|
import datetime
|
12
|
+
import typing
|
13
13
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
14
14
|
StepFlag = typing.NewType("StepFlag", bool)
|
15
15
|
|
@@ -35,17 +35,17 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
|
|
35
35
|
from .user_configs.config_parameters import config_expr as config_expr
|
36
36
|
from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
|
37
37
|
from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
|
38
|
-
from . import tuple_util as tuple_util
|
39
38
|
from . import cards as cards
|
40
|
-
from . import
|
39
|
+
from . import tuple_util as tuple_util
|
41
40
|
from . import metaflow_git as metaflow_git
|
41
|
+
from . import events as events
|
42
42
|
from . import runner as runner
|
43
43
|
from . import plugins as plugins
|
44
44
|
from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
|
45
45
|
from . import includefile as includefile
|
46
46
|
from .includefile import IncludeFile as IncludeFile
|
47
|
-
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
48
47
|
from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
|
48
|
+
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
49
49
|
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
50
50
|
from . import client as client
|
51
51
|
from .client.core import namespace as namespace
|
@@ -78,6 +78,7 @@ from . import system as system
|
|
78
78
|
from . import pylint_wrapper as pylint_wrapper
|
79
79
|
from . import cli as cli
|
80
80
|
from . import profilers as profilers
|
81
|
+
from . import ob_internal as ob_internal
|
81
82
|
|
82
83
|
EXT_PKG: str
|
83
84
|
|
@@ -154,193 +155,120 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
154
155
|
"""
|
155
156
|
...
|
156
157
|
|
157
|
-
|
158
|
-
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
158
|
+
def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
159
159
|
"""
|
160
|
-
Specifies
|
161
|
-
|
162
|
-
This decorator is useful if this step may hang indefinitely.
|
163
|
-
|
164
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
165
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
166
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
167
|
-
|
168
|
-
Note that all the values specified in parameters are added together so if you specify
|
169
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
160
|
+
Specifies that this step should execute on DGX cloud.
|
170
161
|
|
171
162
|
|
172
163
|
Parameters
|
173
164
|
----------
|
174
|
-
|
175
|
-
Number of
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
165
|
+
gpu : int
|
166
|
+
Number of GPUs to use.
|
167
|
+
gpu_type : str
|
168
|
+
Type of Nvidia GPU to use.
|
169
|
+
queue_timeout : int
|
170
|
+
Time to keep the job in NVCF's queue.
|
180
171
|
"""
|
181
172
|
...
|
182
173
|
|
183
174
|
@typing.overload
|
184
|
-
def
|
185
|
-
...
|
186
|
-
|
187
|
-
@typing.overload
|
188
|
-
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
189
|
-
...
|
190
|
-
|
191
|
-
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
175
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
192
176
|
"""
|
193
|
-
Specifies
|
194
|
-
|
195
|
-
This decorator is useful if this step may hang indefinitely.
|
196
|
-
|
197
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
198
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
199
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
177
|
+
Specifies that the step will success under all circumstances.
|
200
178
|
|
201
|
-
|
202
|
-
|
179
|
+
The decorator will create an optional artifact, specified by `var`, which
|
180
|
+
contains the exception raised. You can use it to detect the presence
|
181
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
182
|
+
are missing.
|
203
183
|
|
204
184
|
|
205
185
|
Parameters
|
206
186
|
----------
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
187
|
+
var : str, optional, default None
|
188
|
+
Name of the artifact in which to store the caught exception.
|
189
|
+
If not specified, the exception is not stored.
|
190
|
+
print_exception : bool, default True
|
191
|
+
Determines whether or not the exception is printed to
|
192
|
+
stdout when caught.
|
213
193
|
"""
|
214
194
|
...
|
215
195
|
|
216
196
|
@typing.overload
|
217
|
-
def
|
218
|
-
"""
|
219
|
-
Internal decorator to support Fast bakery
|
220
|
-
"""
|
197
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
221
198
|
...
|
222
199
|
|
223
200
|
@typing.overload
|
224
|
-
def
|
201
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
225
202
|
...
|
226
203
|
|
227
|
-
def
|
204
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
228
205
|
"""
|
229
|
-
|
206
|
+
Specifies that the step will success under all circumstances.
|
207
|
+
|
208
|
+
The decorator will create an optional artifact, specified by `var`, which
|
209
|
+
contains the exception raised. You can use it to detect the presence
|
210
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
211
|
+
are missing.
|
212
|
+
|
213
|
+
|
214
|
+
Parameters
|
215
|
+
----------
|
216
|
+
var : str, optional, default None
|
217
|
+
Name of the artifact in which to store the caught exception.
|
218
|
+
If not specified, the exception is not stored.
|
219
|
+
print_exception : bool, default True
|
220
|
+
Determines whether or not the exception is printed to
|
221
|
+
stdout when caught.
|
230
222
|
"""
|
231
223
|
...
|
232
224
|
|
233
225
|
@typing.overload
|
234
|
-
def
|
226
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
235
227
|
"""
|
236
|
-
Specifies
|
237
|
-
|
238
|
-
Use `@resources` to specify the resource requirements
|
239
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
240
|
-
|
241
|
-
You can choose the compute layer on the command line by executing e.g.
|
242
|
-
```
|
243
|
-
python myflow.py run --with batch
|
244
|
-
```
|
245
|
-
or
|
246
|
-
```
|
247
|
-
python myflow.py run --with kubernetes
|
248
|
-
```
|
249
|
-
which executes the flow on the desired system using the
|
250
|
-
requirements specified in `@resources`.
|
228
|
+
Specifies environment variables to be set prior to the execution of a step.
|
251
229
|
|
252
230
|
|
253
231
|
Parameters
|
254
232
|
----------
|
255
|
-
|
256
|
-
|
257
|
-
gpu : int, optional, default None
|
258
|
-
Number of GPUs required for this step.
|
259
|
-
disk : int, optional, default None
|
260
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
261
|
-
memory : int, default 4096
|
262
|
-
Memory size (in MB) required for this step.
|
263
|
-
shared_memory : int, optional, default None
|
264
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
265
|
-
This parameter maps to the `--shm-size` option in Docker.
|
233
|
+
vars : Dict[str, str], default {}
|
234
|
+
Dictionary of environment variables to set.
|
266
235
|
"""
|
267
236
|
...
|
268
237
|
|
269
238
|
@typing.overload
|
270
|
-
def
|
239
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
271
240
|
...
|
272
241
|
|
273
242
|
@typing.overload
|
274
|
-
def
|
243
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
275
244
|
...
|
276
245
|
|
277
|
-
def
|
246
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
278
247
|
"""
|
279
|
-
Specifies
|
280
|
-
|
281
|
-
Use `@resources` to specify the resource requirements
|
282
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
283
|
-
|
284
|
-
You can choose the compute layer on the command line by executing e.g.
|
285
|
-
```
|
286
|
-
python myflow.py run --with batch
|
287
|
-
```
|
288
|
-
or
|
289
|
-
```
|
290
|
-
python myflow.py run --with kubernetes
|
291
|
-
```
|
292
|
-
which executes the flow on the desired system using the
|
293
|
-
requirements specified in `@resources`.
|
248
|
+
Specifies environment variables to be set prior to the execution of a step.
|
294
249
|
|
295
250
|
|
296
251
|
Parameters
|
297
252
|
----------
|
298
|
-
|
299
|
-
|
300
|
-
gpu : int, optional, default None
|
301
|
-
Number of GPUs required for this step.
|
302
|
-
disk : int, optional, default None
|
303
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
304
|
-
memory : int, default 4096
|
305
|
-
Memory size (in MB) required for this step.
|
306
|
-
shared_memory : int, optional, default None
|
307
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
308
|
-
This parameter maps to the `--shm-size` option in Docker.
|
253
|
+
vars : Dict[str, str], default {}
|
254
|
+
Dictionary of environment variables to set.
|
309
255
|
"""
|
310
256
|
...
|
311
257
|
|
312
258
|
@typing.overload
|
313
|
-
def
|
259
|
+
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
314
260
|
"""
|
315
|
-
|
316
|
-
the execution of a step.
|
317
|
-
|
318
|
-
|
319
|
-
Parameters
|
320
|
-
----------
|
321
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
322
|
-
List of secret specs, defining how the secrets are to be retrieved
|
261
|
+
Internal decorator to support Fast bakery
|
323
262
|
"""
|
324
263
|
...
|
325
264
|
|
326
265
|
@typing.overload
|
327
|
-
def
|
328
|
-
...
|
329
|
-
|
330
|
-
@typing.overload
|
331
|
-
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
266
|
+
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
332
267
|
...
|
333
268
|
|
334
|
-
def
|
269
|
+
def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
335
270
|
"""
|
336
|
-
|
337
|
-
the execution of a step.
|
338
|
-
|
339
|
-
|
340
|
-
Parameters
|
341
|
-
----------
|
342
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
343
|
-
List of secret specs, defining how the secrets are to be retrieved
|
271
|
+
Internal decorator to support Fast bakery
|
344
272
|
"""
|
345
273
|
...
|
346
274
|
|
@@ -401,44 +329,205 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
401
329
|
"""
|
402
330
|
...
|
403
331
|
|
404
|
-
|
332
|
+
@typing.overload
|
333
|
+
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
405
334
|
"""
|
406
|
-
Specifies
|
407
|
-
|
335
|
+
Specifies the Conda environment for the step.
|
336
|
+
|
337
|
+
Information in this decorator will augment any
|
338
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
339
|
+
you can use `@conda_base` to set packages required by all
|
340
|
+
steps and use `@conda` to specify step-specific overrides.
|
408
341
|
|
409
342
|
|
410
343
|
Parameters
|
411
344
|
----------
|
412
|
-
|
413
|
-
|
414
|
-
|
415
|
-
|
345
|
+
packages : Dict[str, str], default {}
|
346
|
+
Packages to use for this step. The key is the name of the package
|
347
|
+
and the value is the version to use.
|
348
|
+
libraries : Dict[str, str], default {}
|
349
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
350
|
+
python : str, optional, default None
|
351
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
352
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
353
|
+
disabled : bool, default False
|
354
|
+
If set to True, disables @conda.
|
416
355
|
"""
|
417
356
|
...
|
418
357
|
|
419
|
-
|
358
|
+
@typing.overload
|
359
|
+
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
360
|
+
...
|
361
|
+
|
362
|
+
@typing.overload
|
363
|
+
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
364
|
+
...
|
365
|
+
|
366
|
+
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
420
367
|
"""
|
421
|
-
Specifies
|
368
|
+
Specifies the Conda environment for the step.
|
369
|
+
|
370
|
+
Information in this decorator will augment any
|
371
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
372
|
+
you can use `@conda_base` to set packages required by all
|
373
|
+
steps and use `@conda` to specify step-specific overrides.
|
422
374
|
|
423
375
|
|
424
376
|
Parameters
|
425
377
|
----------
|
426
|
-
|
427
|
-
|
428
|
-
|
429
|
-
|
430
|
-
|
431
|
-
|
378
|
+
packages : Dict[str, str], default {}
|
379
|
+
Packages to use for this step. The key is the name of the package
|
380
|
+
and the value is the version to use.
|
381
|
+
libraries : Dict[str, str], default {}
|
382
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
383
|
+
python : str, optional, default None
|
384
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
385
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
386
|
+
disabled : bool, default False
|
387
|
+
If set to True, disables @conda.
|
432
388
|
"""
|
433
389
|
...
|
434
390
|
|
435
|
-
|
391
|
+
@typing.overload
|
392
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
436
393
|
"""
|
437
|
-
|
394
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
395
|
+
the execution of a step.
|
438
396
|
|
439
|
-
|
440
|
-
|
441
|
-
|
397
|
+
|
398
|
+
Parameters
|
399
|
+
----------
|
400
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
401
|
+
List of secret specs, defining how the secrets are to be retrieved
|
402
|
+
"""
|
403
|
+
...
|
404
|
+
|
405
|
+
@typing.overload
|
406
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
407
|
+
...
|
408
|
+
|
409
|
+
@typing.overload
|
410
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
411
|
+
...
|
412
|
+
|
413
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
414
|
+
"""
|
415
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
416
|
+
the execution of a step.
|
417
|
+
|
418
|
+
|
419
|
+
Parameters
|
420
|
+
----------
|
421
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
422
|
+
List of secret specs, defining how the secrets are to be retrieved
|
423
|
+
"""
|
424
|
+
...
|
425
|
+
|
426
|
+
@typing.overload
|
427
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
428
|
+
"""
|
429
|
+
Decorator prototype for all step decorators. This function gets specialized
|
430
|
+
and imported for all decorators types by _import_plugin_decorators().
|
431
|
+
"""
|
432
|
+
...
|
433
|
+
|
434
|
+
@typing.overload
|
435
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
436
|
+
...
|
437
|
+
|
438
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
439
|
+
"""
|
440
|
+
Decorator prototype for all step decorators. This function gets specialized
|
441
|
+
and imported for all decorators types by _import_plugin_decorators().
|
442
|
+
"""
|
443
|
+
...
|
444
|
+
|
445
|
+
@typing.overload
|
446
|
+
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
447
|
+
"""
|
448
|
+
Specifies the resources needed when executing this step.
|
449
|
+
|
450
|
+
Use `@resources` to specify the resource requirements
|
451
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
452
|
+
|
453
|
+
You can choose the compute layer on the command line by executing e.g.
|
454
|
+
```
|
455
|
+
python myflow.py run --with batch
|
456
|
+
```
|
457
|
+
or
|
458
|
+
```
|
459
|
+
python myflow.py run --with kubernetes
|
460
|
+
```
|
461
|
+
which executes the flow on the desired system using the
|
462
|
+
requirements specified in `@resources`.
|
463
|
+
|
464
|
+
|
465
|
+
Parameters
|
466
|
+
----------
|
467
|
+
cpu : int, default 1
|
468
|
+
Number of CPUs required for this step.
|
469
|
+
gpu : int, optional, default None
|
470
|
+
Number of GPUs required for this step.
|
471
|
+
disk : int, optional, default None
|
472
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
473
|
+
memory : int, default 4096
|
474
|
+
Memory size (in MB) required for this step.
|
475
|
+
shared_memory : int, optional, default None
|
476
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
477
|
+
This parameter maps to the `--shm-size` option in Docker.
|
478
|
+
"""
|
479
|
+
...
|
480
|
+
|
481
|
+
@typing.overload
|
482
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
483
|
+
...
|
484
|
+
|
485
|
+
@typing.overload
|
486
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
487
|
+
...
|
488
|
+
|
489
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
490
|
+
"""
|
491
|
+
Specifies the resources needed when executing this step.
|
492
|
+
|
493
|
+
Use `@resources` to specify the resource requirements
|
494
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
495
|
+
|
496
|
+
You can choose the compute layer on the command line by executing e.g.
|
497
|
+
```
|
498
|
+
python myflow.py run --with batch
|
499
|
+
```
|
500
|
+
or
|
501
|
+
```
|
502
|
+
python myflow.py run --with kubernetes
|
503
|
+
```
|
504
|
+
which executes the flow on the desired system using the
|
505
|
+
requirements specified in `@resources`.
|
506
|
+
|
507
|
+
|
508
|
+
Parameters
|
509
|
+
----------
|
510
|
+
cpu : int, default 1
|
511
|
+
Number of CPUs required for this step.
|
512
|
+
gpu : int, optional, default None
|
513
|
+
Number of GPUs required for this step.
|
514
|
+
disk : int, optional, default None
|
515
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
516
|
+
memory : int, default 4096
|
517
|
+
Memory size (in MB) required for this step.
|
518
|
+
shared_memory : int, optional, default None
|
519
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
520
|
+
This parameter maps to the `--shm-size` option in Docker.
|
521
|
+
"""
|
522
|
+
...
|
523
|
+
|
524
|
+
def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
525
|
+
"""
|
526
|
+
This decorator is used to run Ollama APIs as Metaflow task sidecars.
|
527
|
+
|
528
|
+
User code call
|
529
|
+
--------------
|
530
|
+
@ollama(
|
442
531
|
models=[...],
|
443
532
|
...
|
444
533
|
)
|
@@ -475,6 +564,31 @@ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy:
|
|
475
564
|
"""
|
476
565
|
...
|
477
566
|
|
567
|
+
def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
568
|
+
"""
|
569
|
+
Decorator that helps cache, version and store models/datasets from huggingface hub.
|
570
|
+
|
571
|
+
|
572
|
+
Parameters
|
573
|
+
----------
|
574
|
+
temp_dir_root : str, optional
|
575
|
+
The root directory that will hold the temporary directory where objects will be downloaded.
|
576
|
+
|
577
|
+
load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
|
578
|
+
The list of repos (models/datasets) to load.
|
579
|
+
|
580
|
+
Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
|
581
|
+
|
582
|
+
- If repo (model/dataset) is not found in the datastore:
|
583
|
+
- Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
|
584
|
+
- Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
|
585
|
+
- All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
|
586
|
+
|
587
|
+
- If repo is found in the datastore:
|
588
|
+
- Loads it directly from datastore to local path (can be temporary directory or specified path)
|
589
|
+
"""
|
590
|
+
...
|
591
|
+
|
478
592
|
@typing.overload
|
479
593
|
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
480
594
|
"""
|
@@ -524,132 +638,128 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
|
|
524
638
|
"""
|
525
639
|
...
|
526
640
|
|
527
|
-
def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
528
|
-
"""
|
529
|
-
Decorator that helps cache, version and store models/datasets from huggingface hub.
|
530
|
-
|
531
|
-
|
532
|
-
Parameters
|
533
|
-
----------
|
534
|
-
temp_dir_root : str, optional
|
535
|
-
The root directory that will hold the temporary directory where objects will be downloaded.
|
536
|
-
|
537
|
-
load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
|
538
|
-
The list of repos (models/datasets) to load.
|
539
|
-
|
540
|
-
Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
|
541
|
-
|
542
|
-
- If repo (model/dataset) is not found in the datastore:
|
543
|
-
- Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
|
544
|
-
- Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
|
545
|
-
- All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
|
546
|
-
|
547
|
-
- If repo is found in the datastore:
|
548
|
-
- Loads it directly from datastore to local path (can be temporary directory or specified path)
|
549
|
-
"""
|
550
|
-
...
|
551
|
-
|
552
641
|
@typing.overload
|
553
|
-
def
|
642
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
554
643
|
"""
|
555
|
-
Specifies
|
644
|
+
Specifies the PyPI packages for the step.
|
645
|
+
|
646
|
+
Information in this decorator will augment any
|
647
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
648
|
+
you can use `@pypi_base` to set packages required by all
|
649
|
+
steps and use `@pypi` to specify step-specific overrides.
|
556
650
|
|
557
651
|
|
558
652
|
Parameters
|
559
653
|
----------
|
560
|
-
|
561
|
-
|
654
|
+
packages : Dict[str, str], default: {}
|
655
|
+
Packages to use for this step. The key is the name of the package
|
656
|
+
and the value is the version to use.
|
657
|
+
python : str, optional, default: None
|
658
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
659
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
562
660
|
"""
|
563
661
|
...
|
564
662
|
|
565
663
|
@typing.overload
|
566
|
-
def
|
664
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
567
665
|
...
|
568
666
|
|
569
667
|
@typing.overload
|
570
|
-
def
|
668
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
571
669
|
...
|
572
670
|
|
573
|
-
def
|
671
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
574
672
|
"""
|
575
|
-
Specifies
|
673
|
+
Specifies the PyPI packages for the step.
|
674
|
+
|
675
|
+
Information in this decorator will augment any
|
676
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
677
|
+
you can use `@pypi_base` to set packages required by all
|
678
|
+
steps and use `@pypi` to specify step-specific overrides.
|
576
679
|
|
577
680
|
|
578
681
|
Parameters
|
579
682
|
----------
|
580
|
-
|
581
|
-
|
683
|
+
packages : Dict[str, str], default: {}
|
684
|
+
Packages to use for this step. The key is the name of the package
|
685
|
+
and the value is the version to use.
|
686
|
+
python : str, optional, default: None
|
687
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
688
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
582
689
|
"""
|
583
690
|
...
|
584
691
|
|
585
|
-
|
586
|
-
def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
692
|
+
def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
587
693
|
"""
|
588
|
-
|
589
|
-
|
694
|
+
Specifies that this step is used to deploy an instance of the app.
|
695
|
+
Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
|
590
696
|
|
591
697
|
|
592
698
|
Parameters
|
593
699
|
----------
|
594
|
-
|
595
|
-
|
596
|
-
|
597
|
-
|
598
|
-
will be loaded at the start of the task.
|
599
|
-
- "none": Do not load any checkpoint
|
600
|
-
- "fresh": Loads the lastest checkpoint created within the running Task.
|
601
|
-
This mode helps loading checkpoints across various retry attempts of the same task.
|
602
|
-
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
603
|
-
created within the task will be loaded when the task is retries execution on failure.
|
604
|
-
|
605
|
-
temp_dir_root : str, default: None
|
606
|
-
The root directory under which `current.checkpoint.directory` will be created.
|
700
|
+
app_port : int
|
701
|
+
Number of GPUs to use.
|
702
|
+
app_name : str
|
703
|
+
Name of the app to deploy.
|
607
704
|
"""
|
608
705
|
...
|
609
706
|
|
610
707
|
@typing.overload
|
611
|
-
def
|
612
|
-
...
|
613
|
-
|
614
|
-
@typing.overload
|
615
|
-
def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
616
|
-
...
|
617
|
-
|
618
|
-
def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
|
708
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
619
709
|
"""
|
620
|
-
|
710
|
+
Specifies a timeout for your step.
|
621
711
|
|
712
|
+
This decorator is useful if this step may hang indefinitely.
|
713
|
+
|
714
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
715
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
716
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
622
717
|
|
718
|
+
Note that all the values specified in parameters are added together so if you specify
|
719
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
623
720
|
|
624
|
-
Parameters
|
625
|
-
----------
|
626
|
-
load_policy : str, default: "fresh"
|
627
|
-
The policy for loading the checkpoint. The following policies are supported:
|
628
|
-
- "eager": Loads the the latest available checkpoint within the namespace.
|
629
|
-
With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
|
630
|
-
will be loaded at the start of the task.
|
631
|
-
- "none": Do not load any checkpoint
|
632
|
-
- "fresh": Loads the lastest checkpoint created within the running Task.
|
633
|
-
This mode helps loading checkpoints across various retry attempts of the same task.
|
634
|
-
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
635
|
-
created within the task will be loaded when the task is retries execution on failure.
|
636
721
|
|
637
|
-
|
638
|
-
|
722
|
+
Parameters
|
723
|
+
----------
|
724
|
+
seconds : int, default 0
|
725
|
+
Number of seconds to wait prior to timing out.
|
726
|
+
minutes : int, default 0
|
727
|
+
Number of minutes to wait prior to timing out.
|
728
|
+
hours : int, default 0
|
729
|
+
Number of hours to wait prior to timing out.
|
639
730
|
"""
|
640
731
|
...
|
641
732
|
|
642
|
-
|
733
|
+
@typing.overload
|
734
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
735
|
+
...
|
736
|
+
|
737
|
+
@typing.overload
|
738
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
739
|
+
...
|
740
|
+
|
741
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
643
742
|
"""
|
644
|
-
Specifies
|
743
|
+
Specifies a timeout for your step.
|
744
|
+
|
745
|
+
This decorator is useful if this step may hang indefinitely.
|
746
|
+
|
747
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
748
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
749
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
750
|
+
|
751
|
+
Note that all the values specified in parameters are added together so if you specify
|
752
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
645
753
|
|
646
754
|
|
647
755
|
Parameters
|
648
756
|
----------
|
649
|
-
|
650
|
-
Number of
|
651
|
-
|
652
|
-
|
757
|
+
seconds : int, default 0
|
758
|
+
Number of seconds to wait prior to timing out.
|
759
|
+
minutes : int, default 0
|
760
|
+
Number of minutes to wait prior to timing out.
|
761
|
+
hours : int, default 0
|
762
|
+
Number of hours to wait prior to timing out.
|
653
763
|
"""
|
654
764
|
...
|
655
765
|
|
@@ -708,127 +818,6 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
708
818
|
"""
|
709
819
|
...
|
710
820
|
|
711
|
-
@typing.overload
|
712
|
-
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
713
|
-
"""
|
714
|
-
Specifies that the step will success under all circumstances.
|
715
|
-
|
716
|
-
The decorator will create an optional artifact, specified by `var`, which
|
717
|
-
contains the exception raised. You can use it to detect the presence
|
718
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
719
|
-
are missing.
|
720
|
-
|
721
|
-
|
722
|
-
Parameters
|
723
|
-
----------
|
724
|
-
var : str, optional, default None
|
725
|
-
Name of the artifact in which to store the caught exception.
|
726
|
-
If not specified, the exception is not stored.
|
727
|
-
print_exception : bool, default True
|
728
|
-
Determines whether or not the exception is printed to
|
729
|
-
stdout when caught.
|
730
|
-
"""
|
731
|
-
...
|
732
|
-
|
733
|
-
@typing.overload
|
734
|
-
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
735
|
-
...
|
736
|
-
|
737
|
-
@typing.overload
|
738
|
-
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
739
|
-
...
|
740
|
-
|
741
|
-
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
742
|
-
"""
|
743
|
-
Specifies that the step will success under all circumstances.
|
744
|
-
|
745
|
-
The decorator will create an optional artifact, specified by `var`, which
|
746
|
-
contains the exception raised. You can use it to detect the presence
|
747
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
748
|
-
are missing.
|
749
|
-
|
750
|
-
|
751
|
-
Parameters
|
752
|
-
----------
|
753
|
-
var : str, optional, default None
|
754
|
-
Name of the artifact in which to store the caught exception.
|
755
|
-
If not specified, the exception is not stored.
|
756
|
-
print_exception : bool, default True
|
757
|
-
Determines whether or not the exception is printed to
|
758
|
-
stdout when caught.
|
759
|
-
"""
|
760
|
-
...
|
761
|
-
|
762
|
-
@typing.overload
|
763
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
764
|
-
"""
|
765
|
-
Decorator prototype for all step decorators. This function gets specialized
|
766
|
-
and imported for all decorators types by _import_plugin_decorators().
|
767
|
-
"""
|
768
|
-
...
|
769
|
-
|
770
|
-
@typing.overload
|
771
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
772
|
-
...
|
773
|
-
|
774
|
-
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
775
|
-
"""
|
776
|
-
Decorator prototype for all step decorators. This function gets specialized
|
777
|
-
and imported for all decorators types by _import_plugin_decorators().
|
778
|
-
"""
|
779
|
-
...
|
780
|
-
|
781
|
-
@typing.overload
|
782
|
-
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
783
|
-
"""
|
784
|
-
Specifies the PyPI packages for the step.
|
785
|
-
|
786
|
-
Information in this decorator will augment any
|
787
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
788
|
-
you can use `@pypi_base` to set packages required by all
|
789
|
-
steps and use `@pypi` to specify step-specific overrides.
|
790
|
-
|
791
|
-
|
792
|
-
Parameters
|
793
|
-
----------
|
794
|
-
packages : Dict[str, str], default: {}
|
795
|
-
Packages to use for this step. The key is the name of the package
|
796
|
-
and the value is the version to use.
|
797
|
-
python : str, optional, default: None
|
798
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
799
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
800
|
-
"""
|
801
|
-
...
|
802
|
-
|
803
|
-
@typing.overload
|
804
|
-
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
805
|
-
...
|
806
|
-
|
807
|
-
@typing.overload
|
808
|
-
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
809
|
-
...
|
810
|
-
|
811
|
-
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
812
|
-
"""
|
813
|
-
Specifies the PyPI packages for the step.
|
814
|
-
|
815
|
-
Information in this decorator will augment any
|
816
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
817
|
-
you can use `@pypi_base` to set packages required by all
|
818
|
-
steps and use `@pypi` to specify step-specific overrides.
|
819
|
-
|
820
|
-
|
821
|
-
Parameters
|
822
|
-
----------
|
823
|
-
packages : Dict[str, str], default: {}
|
824
|
-
Packages to use for this step. The key is the name of the package
|
825
|
-
and the value is the version to use.
|
826
|
-
python : str, optional, default: None
|
827
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
828
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
829
|
-
"""
|
830
|
-
...
|
831
|
-
|
832
821
|
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
833
822
|
"""
|
834
823
|
Specifies that this step should execute on Kubernetes.
|
@@ -919,216 +908,166 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
|
|
919
908
|
...
|
920
909
|
|
921
910
|
@typing.overload
|
922
|
-
def
|
923
|
-
"""
|
924
|
-
Specifies the Conda environment for the step.
|
925
|
-
|
926
|
-
Information in this decorator will augment any
|
927
|
-
attributes set in the `@conda_base` flow-level decorator. Hence,
|
928
|
-
you can use `@conda_base` to set packages required by all
|
929
|
-
steps and use `@conda` to specify step-specific overrides.
|
930
|
-
|
931
|
-
|
932
|
-
Parameters
|
933
|
-
----------
|
934
|
-
packages : Dict[str, str], default {}
|
935
|
-
Packages to use for this step. The key is the name of the package
|
936
|
-
and the value is the version to use.
|
937
|
-
libraries : Dict[str, str], default {}
|
938
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
939
|
-
python : str, optional, default None
|
940
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
941
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
942
|
-
disabled : bool, default False
|
943
|
-
If set to True, disables @conda.
|
944
|
-
"""
|
945
|
-
...
|
946
|
-
|
947
|
-
@typing.overload
|
948
|
-
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
949
|
-
...
|
950
|
-
|
951
|
-
@typing.overload
|
952
|
-
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
953
|
-
...
|
954
|
-
|
955
|
-
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
956
|
-
"""
|
957
|
-
Specifies the Conda environment for the step.
|
958
|
-
|
959
|
-
Information in this decorator will augment any
|
960
|
-
attributes set in the `@conda_base` flow-level decorator. Hence,
|
961
|
-
you can use `@conda_base` to set packages required by all
|
962
|
-
steps and use `@conda` to specify step-specific overrides.
|
963
|
-
|
964
|
-
|
965
|
-
Parameters
|
966
|
-
----------
|
967
|
-
packages : Dict[str, str], default {}
|
968
|
-
Packages to use for this step. The key is the name of the package
|
969
|
-
and the value is the version to use.
|
970
|
-
libraries : Dict[str, str], default {}
|
971
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
972
|
-
python : str, optional, default None
|
973
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
974
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
975
|
-
disabled : bool, default False
|
976
|
-
If set to True, disables @conda.
|
977
|
-
"""
|
978
|
-
...
|
979
|
-
|
980
|
-
def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
|
911
|
+
def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
981
912
|
"""
|
982
|
-
|
983
|
-
`@checkpoint`/`@model`/`@huggingface_hub` decorators.
|
984
|
-
|
985
|
-
This decorator is useful when users wish to save data to a different datastore
|
986
|
-
than what is configured in Metaflow. This can be for variety of reasons:
|
987
|
-
|
988
|
-
1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
|
989
|
-
2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
|
990
|
-
- Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
|
991
|
-
3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
|
992
|
-
- Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
|
993
|
-
|
994
|
-
Usage:
|
995
|
-
----------
|
996
|
-
|
997
|
-
- Using a custom IAM role to access the datastore.
|
998
|
-
|
999
|
-
```python
|
1000
|
-
@with_artifact_store(
|
1001
|
-
type="s3",
|
1002
|
-
config=lambda: {
|
1003
|
-
"root": "s3://my-bucket-foo/path/to/root",
|
1004
|
-
"role_arn": ROLE,
|
1005
|
-
},
|
1006
|
-
)
|
1007
|
-
class MyFlow(FlowSpec):
|
1008
|
-
|
1009
|
-
@checkpoint
|
1010
|
-
@step
|
1011
|
-
def start(self):
|
1012
|
-
with open("my_file.txt", "w") as f:
|
1013
|
-
f.write("Hello, World!")
|
1014
|
-
self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
|
1015
|
-
self.next(self.end)
|
1016
|
-
|
1017
|
-
```
|
913
|
+
Enables checkpointing for a step.
|
1018
914
|
|
1019
|
-
- Using credentials to access the s3-compatible datastore.
|
1020
915
|
|
1021
|
-
```python
|
1022
|
-
@with_artifact_store(
|
1023
|
-
type="s3",
|
1024
|
-
config=lambda: {
|
1025
|
-
"root": "s3://my-bucket-foo/path/to/root",
|
1026
|
-
"client_params": {
|
1027
|
-
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
1028
|
-
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
1029
|
-
},
|
1030
|
-
},
|
1031
|
-
)
|
1032
|
-
class MyFlow(FlowSpec):
|
1033
916
|
|
1034
|
-
|
1035
|
-
|
1036
|
-
|
1037
|
-
|
1038
|
-
|
1039
|
-
|
1040
|
-
|
917
|
+
Parameters
|
918
|
+
----------
|
919
|
+
load_policy : str, default: "fresh"
|
920
|
+
The policy for loading the checkpoint. The following policies are supported:
|
921
|
+
- "eager": Loads the the latest available checkpoint within the namespace.
|
922
|
+
With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
|
923
|
+
will be loaded at the start of the task.
|
924
|
+
- "none": Do not load any checkpoint
|
925
|
+
- "fresh": Loads the lastest checkpoint created within the running Task.
|
926
|
+
This mode helps loading checkpoints across various retry attempts of the same task.
|
927
|
+
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
928
|
+
created within the task will be loaded when the task is retries execution on failure.
|
1041
929
|
|
1042
|
-
|
930
|
+
temp_dir_root : str, default: None
|
931
|
+
The root directory under which `current.checkpoint.directory` will be created.
|
932
|
+
"""
|
933
|
+
...
|
934
|
+
|
935
|
+
@typing.overload
|
936
|
+
def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
937
|
+
...
|
938
|
+
|
939
|
+
@typing.overload
|
940
|
+
def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
941
|
+
...
|
942
|
+
|
943
|
+
def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
|
944
|
+
"""
|
945
|
+
Enables checkpointing for a step.
|
1043
946
|
|
1044
|
-
- Accessing objects stored in external datastores after task execution.
|
1045
947
|
|
1046
|
-
```python
|
1047
|
-
run = Run("CheckpointsTestsFlow/8992")
|
1048
|
-
with artifact_store_from(run=run, config={
|
1049
|
-
"client_params": {
|
1050
|
-
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
1051
|
-
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
1052
|
-
},
|
1053
|
-
}):
|
1054
|
-
with Checkpoint() as cp:
|
1055
|
-
latest = cp.list(
|
1056
|
-
task=run["start"].task
|
1057
|
-
)[0]
|
1058
|
-
print(latest)
|
1059
|
-
cp.load(
|
1060
|
-
latest,
|
1061
|
-
"test-checkpoints"
|
1062
|
-
)
|
1063
948
|
|
1064
|
-
|
1065
|
-
with artifact_store_from(run=run, config={
|
1066
|
-
"client_params": {
|
1067
|
-
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
1068
|
-
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
1069
|
-
},
|
1070
|
-
}):
|
1071
|
-
load_model(
|
1072
|
-
task.data.model_ref,
|
1073
|
-
"test-models"
|
1074
|
-
)
|
1075
|
-
```
|
1076
|
-
Parameters:
|
949
|
+
Parameters
|
1077
950
|
----------
|
951
|
+
load_policy : str, default: "fresh"
|
952
|
+
The policy for loading the checkpoint. The following policies are supported:
|
953
|
+
- "eager": Loads the the latest available checkpoint within the namespace.
|
954
|
+
With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
|
955
|
+
will be loaded at the start of the task.
|
956
|
+
- "none": Do not load any checkpoint
|
957
|
+
- "fresh": Loads the lastest checkpoint created within the running Task.
|
958
|
+
This mode helps loading checkpoints across various retry attempts of the same task.
|
959
|
+
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
960
|
+
created within the task will be loaded when the task is retries execution on failure.
|
1078
961
|
|
1079
|
-
|
1080
|
-
The
|
962
|
+
temp_dir_root : str, default: None
|
963
|
+
The root directory under which `current.checkpoint.directory` will be created.
|
964
|
+
"""
|
965
|
+
...
|
966
|
+
|
967
|
+
def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
968
|
+
"""
|
969
|
+
Specifies that this step should execute on DGX cloud.
|
1081
970
|
|
1082
|
-
|
1083
|
-
|
1084
|
-
|
1085
|
-
|
1086
|
-
|
1087
|
-
|
1088
|
-
|
1089
|
-
- session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
|
1090
|
-
- client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
|
971
|
+
|
972
|
+
Parameters
|
973
|
+
----------
|
974
|
+
gpu : int
|
975
|
+
Number of GPUs to use.
|
976
|
+
gpu_type : str
|
977
|
+
Type of Nvidia GPU to use.
|
1091
978
|
"""
|
1092
979
|
...
|
1093
980
|
|
1094
981
|
@typing.overload
|
1095
|
-
def
|
982
|
+
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1096
983
|
"""
|
1097
|
-
Specifies the
|
984
|
+
Specifies the event(s) that this flow depends on.
|
985
|
+
|
986
|
+
```
|
987
|
+
@trigger(event='foo')
|
988
|
+
```
|
989
|
+
or
|
990
|
+
```
|
991
|
+
@trigger(events=['foo', 'bar'])
|
992
|
+
```
|
993
|
+
|
994
|
+
Additionally, you can specify the parameter mappings
|
995
|
+
to map event payload to Metaflow parameters for the flow.
|
996
|
+
```
|
997
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
998
|
+
```
|
999
|
+
or
|
1000
|
+
```
|
1001
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1002
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1003
|
+
```
|
1004
|
+
|
1005
|
+
'parameters' can also be a list of strings and tuples like so:
|
1006
|
+
```
|
1007
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1008
|
+
```
|
1009
|
+
This is equivalent to:
|
1010
|
+
```
|
1011
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1012
|
+
```
|
1098
1013
|
|
1099
|
-
Use `@pypi_base` to set common packages required by all
|
1100
|
-
steps and use `@pypi` to specify step-specific overrides.
|
1101
1014
|
|
1102
1015
|
Parameters
|
1103
1016
|
----------
|
1104
|
-
|
1105
|
-
|
1106
|
-
|
1107
|
-
|
1108
|
-
|
1109
|
-
|
1017
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1018
|
+
Event dependency for this flow.
|
1019
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1020
|
+
Events dependency for this flow.
|
1021
|
+
options : Dict[str, Any], default {}
|
1022
|
+
Backend-specific configuration for tuning eventing behavior.
|
1110
1023
|
"""
|
1111
1024
|
...
|
1112
1025
|
|
1113
1026
|
@typing.overload
|
1114
|
-
def
|
1027
|
+
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1115
1028
|
...
|
1116
1029
|
|
1117
|
-
def
|
1030
|
+
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
1118
1031
|
"""
|
1119
|
-
Specifies the
|
1032
|
+
Specifies the event(s) that this flow depends on.
|
1033
|
+
|
1034
|
+
```
|
1035
|
+
@trigger(event='foo')
|
1036
|
+
```
|
1037
|
+
or
|
1038
|
+
```
|
1039
|
+
@trigger(events=['foo', 'bar'])
|
1040
|
+
```
|
1041
|
+
|
1042
|
+
Additionally, you can specify the parameter mappings
|
1043
|
+
to map event payload to Metaflow parameters for the flow.
|
1044
|
+
```
|
1045
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1046
|
+
```
|
1047
|
+
or
|
1048
|
+
```
|
1049
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1050
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1051
|
+
```
|
1052
|
+
|
1053
|
+
'parameters' can also be a list of strings and tuples like so:
|
1054
|
+
```
|
1055
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1056
|
+
```
|
1057
|
+
This is equivalent to:
|
1058
|
+
```
|
1059
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1060
|
+
```
|
1120
1061
|
|
1121
|
-
Use `@pypi_base` to set common packages required by all
|
1122
|
-
steps and use `@pypi` to specify step-specific overrides.
|
1123
1062
|
|
1124
1063
|
Parameters
|
1125
1064
|
----------
|
1126
|
-
|
1127
|
-
|
1128
|
-
|
1129
|
-
|
1130
|
-
|
1131
|
-
|
1065
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1066
|
+
Event dependency for this flow.
|
1067
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1068
|
+
Events dependency for this flow.
|
1069
|
+
options : Dict[str, Any], default {}
|
1070
|
+
Backend-specific configuration for tuning eventing behavior.
|
1132
1071
|
"""
|
1133
1072
|
...
|
1134
1073
|
|
@@ -1176,53 +1115,43 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
|
|
1176
1115
|
...
|
1177
1116
|
|
1178
1117
|
@typing.overload
|
1179
|
-
def
|
1118
|
+
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1180
1119
|
"""
|
1181
|
-
Specifies the
|
1182
|
-
|
1183
|
-
Use `@conda_base` to set common libraries required by all
|
1184
|
-
steps and use `@conda` to specify step-specific additions.
|
1120
|
+
Specifies the PyPI packages for all steps of the flow.
|
1185
1121
|
|
1122
|
+
Use `@pypi_base` to set common packages required by all
|
1123
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1186
1124
|
|
1187
1125
|
Parameters
|
1188
1126
|
----------
|
1189
|
-
packages : Dict[str, str], default {}
|
1127
|
+
packages : Dict[str, str], default: {}
|
1190
1128
|
Packages to use for this flow. The key is the name of the package
|
1191
1129
|
and the value is the version to use.
|
1192
|
-
|
1193
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1194
|
-
python : str, optional, default None
|
1130
|
+
python : str, optional, default: None
|
1195
1131
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1196
1132
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1197
|
-
disabled : bool, default False
|
1198
|
-
If set to True, disables Conda.
|
1199
1133
|
"""
|
1200
1134
|
...
|
1201
1135
|
|
1202
1136
|
@typing.overload
|
1203
|
-
def
|
1137
|
+
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1204
1138
|
...
|
1205
1139
|
|
1206
|
-
def
|
1140
|
+
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
1207
1141
|
"""
|
1208
|
-
Specifies the
|
1209
|
-
|
1210
|
-
Use `@conda_base` to set common libraries required by all
|
1211
|
-
steps and use `@conda` to specify step-specific additions.
|
1142
|
+
Specifies the PyPI packages for all steps of the flow.
|
1212
1143
|
|
1144
|
+
Use `@pypi_base` to set common packages required by all
|
1145
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1213
1146
|
|
1214
1147
|
Parameters
|
1215
1148
|
----------
|
1216
|
-
packages : Dict[str, str], default {}
|
1149
|
+
packages : Dict[str, str], default: {}
|
1217
1150
|
Packages to use for this flow. The key is the name of the package
|
1218
1151
|
and the value is the version to use.
|
1219
|
-
|
1220
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1221
|
-
python : str, optional, default None
|
1152
|
+
python : str, optional, default: None
|
1222
1153
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1223
1154
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1224
|
-
disabled : bool, default False
|
1225
|
-
If set to True, disables Conda.
|
1226
1155
|
"""
|
1227
1156
|
...
|
1228
1157
|
|
@@ -1269,6 +1198,41 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
|
|
1269
1198
|
"""
|
1270
1199
|
...
|
1271
1200
|
|
1201
|
+
def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1202
|
+
"""
|
1203
|
+
Specifies what flows belong to the same project.
|
1204
|
+
|
1205
|
+
A project-specific namespace is created for all flows that
|
1206
|
+
use the same `@project(name)`.
|
1207
|
+
|
1208
|
+
|
1209
|
+
Parameters
|
1210
|
+
----------
|
1211
|
+
name : str
|
1212
|
+
Project name. Make sure that the name is unique amongst all
|
1213
|
+
projects that use the same production scheduler. The name may
|
1214
|
+
contain only lowercase alphanumeric characters and underscores.
|
1215
|
+
|
1216
|
+
branch : Optional[str], default None
|
1217
|
+
The branch to use. If not specified, the branch is set to
|
1218
|
+
`user.<username>` unless `production` is set to `True`. This can
|
1219
|
+
also be set on the command line using `--branch` as a top-level option.
|
1220
|
+
It is an error to specify `branch` in the decorator and on the command line.
|
1221
|
+
|
1222
|
+
production : bool, default False
|
1223
|
+
Whether or not the branch is the production branch. This can also be set on the
|
1224
|
+
command line using `--production` as a top-level option. It is an error to specify
|
1225
|
+
`production` in the decorator and on the command line.
|
1226
|
+
The project branch name will be:
|
1227
|
+
- if `branch` is specified:
|
1228
|
+
- if `production` is True: `prod.<branch>`
|
1229
|
+
- if `production` is False: `test.<branch>`
|
1230
|
+
- if `branch` is not specified:
|
1231
|
+
- if `production` is True: `prod`
|
1232
|
+
- if `production` is False: `user.<username>`
|
1233
|
+
"""
|
1234
|
+
...
|
1235
|
+
|
1272
1236
|
@typing.overload
|
1273
1237
|
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1274
1238
|
"""
|
@@ -1320,38 +1284,117 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
|
|
1320
1284
|
"""
|
1321
1285
|
...
|
1322
1286
|
|
1323
|
-
def
|
1287
|
+
def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
|
1324
1288
|
"""
|
1325
|
-
|
1289
|
+
Allows setting external datastores to save data for the
|
1290
|
+
`@checkpoint`/`@model`/`@huggingface_hub` decorators.
|
1326
1291
|
|
1327
|
-
|
1328
|
-
|
1292
|
+
This decorator is useful when users wish to save data to a different datastore
|
1293
|
+
than what is configured in Metaflow. This can be for variety of reasons:
|
1329
1294
|
|
1295
|
+
1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
|
1296
|
+
2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
|
1297
|
+
- Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
|
1298
|
+
3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
|
1299
|
+
- Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
|
1330
1300
|
|
1331
|
-
|
1301
|
+
Usage:
|
1332
1302
|
----------
|
1333
|
-
name : str
|
1334
|
-
Project name. Make sure that the name is unique amongst all
|
1335
|
-
projects that use the same production scheduler. The name may
|
1336
|
-
contain only lowercase alphanumeric characters and underscores.
|
1337
1303
|
|
1338
|
-
|
1339
|
-
The branch to use. If not specified, the branch is set to
|
1340
|
-
`user.<username>` unless `production` is set to `True`. This can
|
1341
|
-
also be set on the command line using `--branch` as a top-level option.
|
1342
|
-
It is an error to specify `branch` in the decorator and on the command line.
|
1304
|
+
- Using a custom IAM role to access the datastore.
|
1343
1305
|
|
1344
|
-
|
1345
|
-
|
1346
|
-
|
1347
|
-
|
1348
|
-
|
1349
|
-
|
1350
|
-
|
1351
|
-
|
1352
|
-
|
1353
|
-
|
1354
|
-
|
1306
|
+
```python
|
1307
|
+
@with_artifact_store(
|
1308
|
+
type="s3",
|
1309
|
+
config=lambda: {
|
1310
|
+
"root": "s3://my-bucket-foo/path/to/root",
|
1311
|
+
"role_arn": ROLE,
|
1312
|
+
},
|
1313
|
+
)
|
1314
|
+
class MyFlow(FlowSpec):
|
1315
|
+
|
1316
|
+
@checkpoint
|
1317
|
+
@step
|
1318
|
+
def start(self):
|
1319
|
+
with open("my_file.txt", "w") as f:
|
1320
|
+
f.write("Hello, World!")
|
1321
|
+
self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
|
1322
|
+
self.next(self.end)
|
1323
|
+
|
1324
|
+
```
|
1325
|
+
|
1326
|
+
- Using credentials to access the s3-compatible datastore.
|
1327
|
+
|
1328
|
+
```python
|
1329
|
+
@with_artifact_store(
|
1330
|
+
type="s3",
|
1331
|
+
config=lambda: {
|
1332
|
+
"root": "s3://my-bucket-foo/path/to/root",
|
1333
|
+
"client_params": {
|
1334
|
+
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
1335
|
+
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
1336
|
+
},
|
1337
|
+
},
|
1338
|
+
)
|
1339
|
+
class MyFlow(FlowSpec):
|
1340
|
+
|
1341
|
+
@checkpoint
|
1342
|
+
@step
|
1343
|
+
def start(self):
|
1344
|
+
with open("my_file.txt", "w") as f:
|
1345
|
+
f.write("Hello, World!")
|
1346
|
+
self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
|
1347
|
+
self.next(self.end)
|
1348
|
+
|
1349
|
+
```
|
1350
|
+
|
1351
|
+
- Accessing objects stored in external datastores after task execution.
|
1352
|
+
|
1353
|
+
```python
|
1354
|
+
run = Run("CheckpointsTestsFlow/8992")
|
1355
|
+
with artifact_store_from(run=run, config={
|
1356
|
+
"client_params": {
|
1357
|
+
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
1358
|
+
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
1359
|
+
},
|
1360
|
+
}):
|
1361
|
+
with Checkpoint() as cp:
|
1362
|
+
latest = cp.list(
|
1363
|
+
task=run["start"].task
|
1364
|
+
)[0]
|
1365
|
+
print(latest)
|
1366
|
+
cp.load(
|
1367
|
+
latest,
|
1368
|
+
"test-checkpoints"
|
1369
|
+
)
|
1370
|
+
|
1371
|
+
task = Task("TorchTuneFlow/8484/train/53673")
|
1372
|
+
with artifact_store_from(run=run, config={
|
1373
|
+
"client_params": {
|
1374
|
+
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
1375
|
+
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
1376
|
+
},
|
1377
|
+
}):
|
1378
|
+
load_model(
|
1379
|
+
task.data.model_ref,
|
1380
|
+
"test-models"
|
1381
|
+
)
|
1382
|
+
```
|
1383
|
+
Parameters:
|
1384
|
+
----------
|
1385
|
+
|
1386
|
+
type: str
|
1387
|
+
The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
|
1388
|
+
|
1389
|
+
config: dict or Callable
|
1390
|
+
Dictionary of configuration options for the datastore. The following keys are required:
|
1391
|
+
- root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
|
1392
|
+
- example: 's3://bucket-name/path/to/root'
|
1393
|
+
- example: 'gs://bucket-name/path/to/root'
|
1394
|
+
- example: 'https://myblockacc.blob.core.windows.net/metaflow/'
|
1395
|
+
- role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
|
1396
|
+
- session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
|
1397
|
+
- client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
|
1355
1398
|
"""
|
1356
1399
|
...
|
1357
1400
|
|
@@ -1457,95 +1500,53 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
|
|
1457
1500
|
...
|
1458
1501
|
|
1459
1502
|
@typing.overload
|
1460
|
-
def
|
1503
|
+
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1461
1504
|
"""
|
1462
|
-
Specifies the
|
1463
|
-
|
1464
|
-
```
|
1465
|
-
@trigger(event='foo')
|
1466
|
-
```
|
1467
|
-
or
|
1468
|
-
```
|
1469
|
-
@trigger(events=['foo', 'bar'])
|
1470
|
-
```
|
1471
|
-
|
1472
|
-
Additionally, you can specify the parameter mappings
|
1473
|
-
to map event payload to Metaflow parameters for the flow.
|
1474
|
-
```
|
1475
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1476
|
-
```
|
1477
|
-
or
|
1478
|
-
```
|
1479
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1480
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1481
|
-
```
|
1505
|
+
Specifies the Conda environment for all steps of the flow.
|
1482
1506
|
|
1483
|
-
|
1484
|
-
|
1485
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1486
|
-
```
|
1487
|
-
This is equivalent to:
|
1488
|
-
```
|
1489
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1490
|
-
```
|
1507
|
+
Use `@conda_base` to set common libraries required by all
|
1508
|
+
steps and use `@conda` to specify step-specific additions.
|
1491
1509
|
|
1492
1510
|
|
1493
1511
|
Parameters
|
1494
1512
|
----------
|
1495
|
-
|
1496
|
-
|
1497
|
-
|
1498
|
-
|
1499
|
-
|
1500
|
-
|
1513
|
+
packages : Dict[str, str], default {}
|
1514
|
+
Packages to use for this flow. The key is the name of the package
|
1515
|
+
and the value is the version to use.
|
1516
|
+
libraries : Dict[str, str], default {}
|
1517
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1518
|
+
python : str, optional, default None
|
1519
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1520
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1521
|
+
disabled : bool, default False
|
1522
|
+
If set to True, disables Conda.
|
1501
1523
|
"""
|
1502
1524
|
...
|
1503
1525
|
|
1504
1526
|
@typing.overload
|
1505
|
-
def
|
1527
|
+
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1506
1528
|
...
|
1507
1529
|
|
1508
|
-
def
|
1530
|
+
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1509
1531
|
"""
|
1510
|
-
Specifies the
|
1511
|
-
|
1512
|
-
```
|
1513
|
-
@trigger(event='foo')
|
1514
|
-
```
|
1515
|
-
or
|
1516
|
-
```
|
1517
|
-
@trigger(events=['foo', 'bar'])
|
1518
|
-
```
|
1519
|
-
|
1520
|
-
Additionally, you can specify the parameter mappings
|
1521
|
-
to map event payload to Metaflow parameters for the flow.
|
1522
|
-
```
|
1523
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1524
|
-
```
|
1525
|
-
or
|
1526
|
-
```
|
1527
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1528
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1529
|
-
```
|
1532
|
+
Specifies the Conda environment for all steps of the flow.
|
1530
1533
|
|
1531
|
-
|
1532
|
-
|
1533
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1534
|
-
```
|
1535
|
-
This is equivalent to:
|
1536
|
-
```
|
1537
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1538
|
-
```
|
1534
|
+
Use `@conda_base` to set common libraries required by all
|
1535
|
+
steps and use `@conda` to specify step-specific additions.
|
1539
1536
|
|
1540
1537
|
|
1541
1538
|
Parameters
|
1542
1539
|
----------
|
1543
|
-
|
1544
|
-
|
1545
|
-
|
1546
|
-
|
1547
|
-
|
1548
|
-
|
1540
|
+
packages : Dict[str, str], default {}
|
1541
|
+
Packages to use for this flow. The key is the name of the package
|
1542
|
+
and the value is the version to use.
|
1543
|
+
libraries : Dict[str, str], default {}
|
1544
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1545
|
+
python : str, optional, default None
|
1546
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1547
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1548
|
+
disabled : bool, default False
|
1549
|
+
If set to True, disables Conda.
|
1549
1550
|
"""
|
1550
1551
|
...
|
1551
1552
|
|