ob-metaflow-stubs 6.0.3.180rc5__py2.py3-none-any.whl → 6.0.3.182__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +665 -666
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +6 -6
- metaflow-stubs/client/filecache.pyi +3 -3
- metaflow-stubs/events.pyi +3 -3
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +5 -5
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/info_file.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +3 -3
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -4
- metaflow-stubs/metaflow_current.pyi +110 -110
- metaflow-stubs/metaflow_git.pyi +2 -2
- metaflow-stubs/mf_extensions/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +5 -5
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +5 -5
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +13 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -3
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +4 -4
- metaflow-stubs/plugins/__init__.pyi +9 -9
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +6 -6
- metaflow-stubs/plugins/cards/card_client.pyi +2 -2
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -4
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +5 -5
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +2 -2
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
- metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/perimeters.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
- metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
- metaflow-stubs/profilers/__init__.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +29 -29
- metaflow-stubs/runner/deployer_impl.pyi +3 -3
- metaflow-stubs/runner/metaflow_runner.pyi +3 -3
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +3 -3
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +3 -3
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_decorators.pyi +5 -5
- metaflow-stubs/user_configs/config_options.pyi +3 -3
- metaflow-stubs/user_configs/config_parameters.pyi +7 -7
- {ob_metaflow_stubs-6.0.3.180rc5.dist-info → ob_metaflow_stubs-6.0.3.182.dist-info}/METADATA +1 -1
- ob_metaflow_stubs-6.0.3.182.dist-info/RECORD +215 -0
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +0 -6
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +0 -51
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +0 -65
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +0 -74
- metaflow-stubs/ob_internal.pyi +0 -11
- ob_metaflow_stubs-6.0.3.180rc5.dist-info/RECORD +0 -220
- {ob_metaflow_stubs-6.0.3.180rc5.dist-info → ob_metaflow_stubs-6.0.3.182.dist-info}/WHEEL +0 -0
- {ob_metaflow_stubs-6.0.3.180rc5.dist-info → ob_metaflow_stubs-6.0.3.182.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.15.
|
4
|
-
# Generated on 2025-06-
|
3
|
+
# MF version: 2.15.18.1+obcheckpoint(0.2.1);ob(v1) #
|
4
|
+
# Generated on 2025-06-19T23:04:39.675489 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
@@ -45,8 +45,8 @@ from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package imp
|
|
45
45
|
from . import includefile as includefile
|
46
46
|
from .includefile import IncludeFile as IncludeFile
|
47
47
|
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
48
|
-
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
49
48
|
from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
|
49
|
+
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
50
50
|
from . import client as client
|
51
51
|
from .client.core import namespace as namespace
|
52
52
|
from .client.core import get_namespace as get_namespace
|
@@ -78,7 +78,6 @@ from . import system as system
|
|
78
78
|
from . import pylint_wrapper as pylint_wrapper
|
79
79
|
from . import cli as cli
|
80
80
|
from . import profilers as profilers
|
81
|
-
from . import ob_internal as ob_internal
|
82
81
|
|
83
82
|
EXT_PKG: str
|
84
83
|
|
@@ -190,127 +189,91 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
190
189
|
"""
|
191
190
|
...
|
192
191
|
|
193
|
-
def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
194
|
-
"""
|
195
|
-
Specifies that this step should execute on DGX cloud.
|
196
|
-
|
197
|
-
|
198
|
-
Parameters
|
199
|
-
----------
|
200
|
-
gpu : int
|
201
|
-
Number of GPUs to use.
|
202
|
-
gpu_type : str
|
203
|
-
Type of Nvidia GPU to use.
|
204
|
-
queue_timeout : int
|
205
|
-
Time to keep the job in NVCF's queue.
|
206
|
-
"""
|
207
|
-
...
|
208
|
-
|
209
192
|
@typing.overload
|
210
|
-
def
|
193
|
+
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
211
194
|
"""
|
212
|
-
|
195
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
213
196
|
|
214
|
-
|
215
|
-
contains the exception raised. You can use it to detect the presence
|
216
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
217
|
-
are missing.
|
197
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
218
198
|
|
219
199
|
|
220
200
|
Parameters
|
221
201
|
----------
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
202
|
+
type : str, default 'default'
|
203
|
+
Card type.
|
204
|
+
id : str, optional, default None
|
205
|
+
If multiple cards are present, use this id to identify this card.
|
206
|
+
options : Dict[str, Any], default {}
|
207
|
+
Options passed to the card. The contents depend on the card type.
|
208
|
+
timeout : int, default 45
|
209
|
+
Interrupt reporting if it takes more than this many seconds.
|
228
210
|
"""
|
229
211
|
...
|
230
212
|
|
231
213
|
@typing.overload
|
232
|
-
def
|
214
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
233
215
|
...
|
234
216
|
|
235
217
|
@typing.overload
|
236
|
-
def
|
218
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
237
219
|
...
|
238
220
|
|
239
|
-
def
|
221
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
240
222
|
"""
|
241
|
-
|
223
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
242
224
|
|
243
|
-
|
244
|
-
contains the exception raised. You can use it to detect the presence
|
245
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
246
|
-
are missing.
|
225
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
247
226
|
|
248
227
|
|
249
228
|
Parameters
|
250
229
|
----------
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
230
|
+
type : str, default 'default'
|
231
|
+
Card type.
|
232
|
+
id : str, optional, default None
|
233
|
+
If multiple cards are present, use this id to identify this card.
|
234
|
+
options : Dict[str, Any], default {}
|
235
|
+
Options passed to the card. The contents depend on the card type.
|
236
|
+
timeout : int, default 45
|
237
|
+
Interrupt reporting if it takes more than this many seconds.
|
257
238
|
"""
|
258
239
|
...
|
259
240
|
|
260
|
-
|
261
|
-
def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
241
|
+
def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
262
242
|
"""
|
263
|
-
|
264
|
-
|
243
|
+
Decorator that helps cache, version and store models/datasets from huggingface hub.
|
265
244
|
|
266
245
|
|
267
246
|
Parameters
|
268
247
|
----------
|
269
|
-
|
270
|
-
|
271
|
-
These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
|
272
|
-
- `current.checkpoint`
|
273
|
-
- `current.model`
|
274
|
-
- `current.huggingface_hub`
|
248
|
+
temp_dir_root : str, optional
|
249
|
+
The root directory that will hold the temporary directory where objects will be downloaded.
|
275
250
|
|
276
|
-
|
277
|
-
|
278
|
-
If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
|
251
|
+
load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
|
252
|
+
The list of repos (models/datasets) to load.
|
279
253
|
|
280
|
-
|
281
|
-
|
254
|
+
Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
|
255
|
+
|
256
|
+
- If repo (model/dataset) is not found in the datastore:
|
257
|
+
- Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
|
258
|
+
- Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
|
259
|
+
- All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
|
260
|
+
|
261
|
+
- If repo is found in the datastore:
|
262
|
+
- Loads it directly from datastore to local path (can be temporary directory or specified path)
|
282
263
|
"""
|
283
264
|
...
|
284
265
|
|
285
|
-
|
286
|
-
def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
287
|
-
...
|
288
|
-
|
289
|
-
@typing.overload
|
290
|
-
def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
291
|
-
...
|
292
|
-
|
293
|
-
def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
|
266
|
+
def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
294
267
|
"""
|
295
|
-
|
296
|
-
|
268
|
+
Specifies that this step should execute on DGX cloud.
|
297
269
|
|
298
270
|
|
299
271
|
Parameters
|
300
272
|
----------
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
- `current.model`
|
306
|
-
- `current.huggingface_hub`
|
307
|
-
|
308
|
-
If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
|
309
|
-
the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
|
310
|
-
If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
|
311
|
-
|
312
|
-
temp_dir_root : str, default: None
|
313
|
-
The root directory under which `current.model.loaded` will store loaded models
|
273
|
+
gpu : int
|
274
|
+
Number of GPUs to use.
|
275
|
+
gpu_type : str
|
276
|
+
Type of Nvidia GPU to use.
|
314
277
|
"""
|
315
278
|
...
|
316
279
|
|
@@ -403,58 +366,110 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
|
|
403
366
|
"""
|
404
367
|
...
|
405
368
|
|
406
|
-
|
407
|
-
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
369
|
+
def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
408
370
|
"""
|
409
|
-
|
410
|
-
to a step needs to be retried.
|
371
|
+
This decorator is used to run Ollama APIs as Metaflow task sidecars.
|
411
372
|
|
412
|
-
|
413
|
-
|
414
|
-
|
373
|
+
User code call
|
374
|
+
--------------
|
375
|
+
@ollama(
|
376
|
+
models=[...],
|
377
|
+
...
|
378
|
+
)
|
415
379
|
|
416
|
-
|
417
|
-
|
418
|
-
|
380
|
+
Valid backend options
|
381
|
+
---------------------
|
382
|
+
- 'local': Run as a separate process on the local task machine.
|
383
|
+
- (TODO) 'managed': Outerbounds hosts and selects compute provider.
|
384
|
+
- (TODO) 'remote': Spin up separate instance to serve Ollama models.
|
385
|
+
|
386
|
+
Valid model options
|
387
|
+
-------------------
|
388
|
+
Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
|
419
389
|
|
420
390
|
|
421
391
|
Parameters
|
422
392
|
----------
|
423
|
-
|
424
|
-
|
425
|
-
|
426
|
-
|
393
|
+
models: list[str]
|
394
|
+
List of Ollama containers running models in sidecars.
|
395
|
+
backend: str
|
396
|
+
Determines where and how to run the Ollama process.
|
397
|
+
force_pull: bool
|
398
|
+
Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
|
399
|
+
cache_update_policy: str
|
400
|
+
Cache update policy: "auto", "force", or "never".
|
401
|
+
force_cache_update: bool
|
402
|
+
Simple override for "force" cache update policy.
|
403
|
+
debug: bool
|
404
|
+
Whether to turn on verbose debugging logs.
|
405
|
+
circuit_breaker_config: dict
|
406
|
+
Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
|
407
|
+
timeout_config: dict
|
408
|
+
Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
|
409
|
+
"""
|
410
|
+
...
|
411
|
+
|
412
|
+
def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
413
|
+
"""
|
414
|
+
Specifies that this step is used to deploy an instance of the app.
|
415
|
+
Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
|
416
|
+
|
417
|
+
|
418
|
+
Parameters
|
419
|
+
----------
|
420
|
+
app_port : int
|
421
|
+
Number of GPUs to use.
|
422
|
+
app_name : str
|
423
|
+
Name of the app to deploy.
|
427
424
|
"""
|
428
425
|
...
|
429
426
|
|
430
427
|
@typing.overload
|
431
|
-
def
|
428
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
429
|
+
"""
|
430
|
+
Specifies environment variables to be set prior to the execution of a step.
|
431
|
+
|
432
|
+
|
433
|
+
Parameters
|
434
|
+
----------
|
435
|
+
vars : Dict[str, str], default {}
|
436
|
+
Dictionary of environment variables to set.
|
437
|
+
"""
|
432
438
|
...
|
433
439
|
|
434
440
|
@typing.overload
|
435
|
-
def
|
441
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
436
442
|
...
|
437
443
|
|
438
|
-
|
444
|
+
@typing.overload
|
445
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
446
|
+
...
|
447
|
+
|
448
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
439
449
|
"""
|
440
|
-
Specifies
|
441
|
-
to a step needs to be retried.
|
450
|
+
Specifies environment variables to be set prior to the execution of a step.
|
442
451
|
|
443
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
444
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
445
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
446
452
|
|
447
|
-
|
448
|
-
|
449
|
-
|
453
|
+
Parameters
|
454
|
+
----------
|
455
|
+
vars : Dict[str, str], default {}
|
456
|
+
Dictionary of environment variables to set.
|
457
|
+
"""
|
458
|
+
...
|
459
|
+
|
460
|
+
def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
461
|
+
"""
|
462
|
+
Specifies that this step should execute on DGX cloud.
|
450
463
|
|
451
464
|
|
452
465
|
Parameters
|
453
466
|
----------
|
454
|
-
|
455
|
-
Number of
|
456
|
-
|
457
|
-
|
467
|
+
gpu : int
|
468
|
+
Number of GPUs to use.
|
469
|
+
gpu_type : str
|
470
|
+
Type of Nvidia GPU to use.
|
471
|
+
queue_timeout : int
|
472
|
+
Time to keep the job in NVCF's queue.
|
458
473
|
"""
|
459
474
|
...
|
460
475
|
|
@@ -518,338 +533,110 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
518
533
|
...
|
519
534
|
|
520
535
|
@typing.overload
|
521
|
-
def
|
536
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
522
537
|
"""
|
523
|
-
Specifies the
|
538
|
+
Specifies that the step will success under all circumstances.
|
524
539
|
|
525
|
-
|
526
|
-
|
527
|
-
|
528
|
-
|
540
|
+
The decorator will create an optional artifact, specified by `var`, which
|
541
|
+
contains the exception raised. You can use it to detect the presence
|
542
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
543
|
+
are missing.
|
529
544
|
|
530
545
|
|
531
546
|
Parameters
|
532
547
|
----------
|
533
|
-
|
534
|
-
|
535
|
-
|
536
|
-
|
537
|
-
|
538
|
-
|
539
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
540
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
541
|
-
disabled : bool, default False
|
542
|
-
If set to True, disables @conda.
|
543
|
-
"""
|
544
|
-
...
|
545
|
-
|
546
|
-
@typing.overload
|
547
|
-
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
548
|
-
...
|
549
|
-
|
550
|
-
@typing.overload
|
551
|
-
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
552
|
-
...
|
553
|
-
|
554
|
-
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
555
|
-
"""
|
556
|
-
Specifies the Conda environment for the step.
|
557
|
-
|
558
|
-
Information in this decorator will augment any
|
559
|
-
attributes set in the `@conda_base` flow-level decorator. Hence,
|
560
|
-
you can use `@conda_base` to set packages required by all
|
561
|
-
steps and use `@conda` to specify step-specific overrides.
|
562
|
-
|
563
|
-
|
564
|
-
Parameters
|
565
|
-
----------
|
566
|
-
packages : Dict[str, str], default {}
|
567
|
-
Packages to use for this step. The key is the name of the package
|
568
|
-
and the value is the version to use.
|
569
|
-
libraries : Dict[str, str], default {}
|
570
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
571
|
-
python : str, optional, default None
|
572
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
573
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
574
|
-
disabled : bool, default False
|
575
|
-
If set to True, disables @conda.
|
576
|
-
"""
|
577
|
-
...
|
578
|
-
|
579
|
-
@typing.overload
|
580
|
-
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
581
|
-
"""
|
582
|
-
Creates a human-readable report, a Metaflow Card, after this step completes.
|
583
|
-
|
584
|
-
Note that you may add multiple `@card` decorators in a step with different parameters.
|
585
|
-
|
586
|
-
|
587
|
-
Parameters
|
588
|
-
----------
|
589
|
-
type : str, default 'default'
|
590
|
-
Card type.
|
591
|
-
id : str, optional, default None
|
592
|
-
If multiple cards are present, use this id to identify this card.
|
593
|
-
options : Dict[str, Any], default {}
|
594
|
-
Options passed to the card. The contents depend on the card type.
|
595
|
-
timeout : int, default 45
|
596
|
-
Interrupt reporting if it takes more than this many seconds.
|
597
|
-
"""
|
598
|
-
...
|
599
|
-
|
600
|
-
@typing.overload
|
601
|
-
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
602
|
-
...
|
603
|
-
|
604
|
-
@typing.overload
|
605
|
-
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
606
|
-
...
|
607
|
-
|
608
|
-
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
609
|
-
"""
|
610
|
-
Creates a human-readable report, a Metaflow Card, after this step completes.
|
611
|
-
|
612
|
-
Note that you may add multiple `@card` decorators in a step with different parameters.
|
613
|
-
|
614
|
-
|
615
|
-
Parameters
|
616
|
-
----------
|
617
|
-
type : str, default 'default'
|
618
|
-
Card type.
|
619
|
-
id : str, optional, default None
|
620
|
-
If multiple cards are present, use this id to identify this card.
|
621
|
-
options : Dict[str, Any], default {}
|
622
|
-
Options passed to the card. The contents depend on the card type.
|
623
|
-
timeout : int, default 45
|
624
|
-
Interrupt reporting if it takes more than this many seconds.
|
625
|
-
"""
|
626
|
-
...
|
627
|
-
|
628
|
-
def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
629
|
-
"""
|
630
|
-
Specifies that this step should execute on DGX cloud.
|
631
|
-
|
632
|
-
|
633
|
-
Parameters
|
634
|
-
----------
|
635
|
-
gpu : int
|
636
|
-
Number of GPUs to use.
|
637
|
-
gpu_type : str
|
638
|
-
Type of Nvidia GPU to use.
|
639
|
-
"""
|
640
|
-
...
|
641
|
-
|
642
|
-
@typing.overload
|
643
|
-
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
644
|
-
"""
|
645
|
-
Specifies environment variables to be set prior to the execution of a step.
|
646
|
-
|
647
|
-
|
648
|
-
Parameters
|
649
|
-
----------
|
650
|
-
vars : Dict[str, str], default {}
|
651
|
-
Dictionary of environment variables to set.
|
548
|
+
var : str, optional, default None
|
549
|
+
Name of the artifact in which to store the caught exception.
|
550
|
+
If not specified, the exception is not stored.
|
551
|
+
print_exception : bool, default True
|
552
|
+
Determines whether or not the exception is printed to
|
553
|
+
stdout when caught.
|
652
554
|
"""
|
653
555
|
...
|
654
556
|
|
655
557
|
@typing.overload
|
656
|
-
def
|
558
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
657
559
|
...
|
658
560
|
|
659
561
|
@typing.overload
|
660
|
-
def
|
661
|
-
...
|
662
|
-
|
663
|
-
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
664
|
-
"""
|
665
|
-
Specifies environment variables to be set prior to the execution of a step.
|
666
|
-
|
667
|
-
|
668
|
-
Parameters
|
669
|
-
----------
|
670
|
-
vars : Dict[str, str], default {}
|
671
|
-
Dictionary of environment variables to set.
|
672
|
-
"""
|
562
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
673
563
|
...
|
674
564
|
|
675
|
-
|
676
|
-
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
565
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
677
566
|
"""
|
678
|
-
Specifies the
|
679
|
-
|
680
|
-
Use `@resources` to specify the resource requirements
|
681
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
567
|
+
Specifies that the step will success under all circumstances.
|
682
568
|
|
683
|
-
|
684
|
-
|
685
|
-
|
686
|
-
|
687
|
-
or
|
688
|
-
```
|
689
|
-
python myflow.py run --with kubernetes
|
690
|
-
```
|
691
|
-
which executes the flow on the desired system using the
|
692
|
-
requirements specified in `@resources`.
|
569
|
+
The decorator will create an optional artifact, specified by `var`, which
|
570
|
+
contains the exception raised. You can use it to detect the presence
|
571
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
572
|
+
are missing.
|
693
573
|
|
694
574
|
|
695
575
|
Parameters
|
696
576
|
----------
|
697
|
-
|
698
|
-
|
699
|
-
|
700
|
-
|
701
|
-
|
702
|
-
|
703
|
-
memory : int, default 4096
|
704
|
-
Memory size (in MB) required for this step.
|
705
|
-
shared_memory : int, optional, default None
|
706
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
707
|
-
This parameter maps to the `--shm-size` option in Docker.
|
577
|
+
var : str, optional, default None
|
578
|
+
Name of the artifact in which to store the caught exception.
|
579
|
+
If not specified, the exception is not stored.
|
580
|
+
print_exception : bool, default True
|
581
|
+
Determines whether or not the exception is printed to
|
582
|
+
stdout when caught.
|
708
583
|
"""
|
709
584
|
...
|
710
585
|
|
711
586
|
@typing.overload
|
712
|
-
def
|
713
|
-
...
|
714
|
-
|
715
|
-
@typing.overload
|
716
|
-
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
717
|
-
...
|
718
|
-
|
719
|
-
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
587
|
+
def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
720
588
|
"""
|
721
|
-
|
722
|
-
|
723
|
-
Use `@resources` to specify the resource requirements
|
724
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
589
|
+
Enables loading / saving of models within a step.
|
725
590
|
|
726
|
-
You can choose the compute layer on the command line by executing e.g.
|
727
|
-
```
|
728
|
-
python myflow.py run --with batch
|
729
|
-
```
|
730
|
-
or
|
731
|
-
```
|
732
|
-
python myflow.py run --with kubernetes
|
733
|
-
```
|
734
|
-
which executes the flow on the desired system using the
|
735
|
-
requirements specified in `@resources`.
|
736
591
|
|
737
592
|
|
738
593
|
Parameters
|
739
594
|
----------
|
740
|
-
|
741
|
-
|
742
|
-
|
743
|
-
|
744
|
-
|
745
|
-
|
746
|
-
memory : int, default 4096
|
747
|
-
Memory size (in MB) required for this step.
|
748
|
-
shared_memory : int, optional, default None
|
749
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
750
|
-
This parameter maps to the `--shm-size` option in Docker.
|
751
|
-
"""
|
752
|
-
...
|
753
|
-
|
754
|
-
def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
755
|
-
"""
|
756
|
-
Specifies that this step is used to deploy an instance of the app.
|
757
|
-
Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
|
595
|
+
load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
|
596
|
+
Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
|
597
|
+
These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
|
598
|
+
- `current.checkpoint`
|
599
|
+
- `current.model`
|
600
|
+
- `current.huggingface_hub`
|
758
601
|
|
602
|
+
If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
|
603
|
+
the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
|
604
|
+
If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
|
759
605
|
|
760
|
-
|
761
|
-
|
762
|
-
app_port : int
|
763
|
-
Number of GPUs to use.
|
764
|
-
app_name : str
|
765
|
-
Name of the app to deploy.
|
606
|
+
temp_dir_root : str, default: None
|
607
|
+
The root directory under which `current.model.loaded` will store loaded models
|
766
608
|
"""
|
767
609
|
...
|
768
610
|
|
769
611
|
@typing.overload
|
770
|
-
def
|
771
|
-
"""
|
772
|
-
Decorator prototype for all step decorators. This function gets specialized
|
773
|
-
and imported for all decorators types by _import_plugin_decorators().
|
774
|
-
"""
|
612
|
+
def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
775
613
|
...
|
776
614
|
|
777
615
|
@typing.overload
|
778
|
-
def
|
779
|
-
...
|
780
|
-
|
781
|
-
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
782
|
-
"""
|
783
|
-
Decorator prototype for all step decorators. This function gets specialized
|
784
|
-
and imported for all decorators types by _import_plugin_decorators().
|
785
|
-
"""
|
786
|
-
...
|
787
|
-
|
788
|
-
def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
789
|
-
"""
|
790
|
-
This decorator is used to run Ollama APIs as Metaflow task sidecars.
|
791
|
-
|
792
|
-
User code call
|
793
|
-
--------------
|
794
|
-
@ollama(
|
795
|
-
models=[...],
|
796
|
-
...
|
797
|
-
)
|
798
|
-
|
799
|
-
Valid backend options
|
800
|
-
---------------------
|
801
|
-
- 'local': Run as a separate process on the local task machine.
|
802
|
-
- (TODO) 'managed': Outerbounds hosts and selects compute provider.
|
803
|
-
- (TODO) 'remote': Spin up separate instance to serve Ollama models.
|
804
|
-
|
805
|
-
Valid model options
|
806
|
-
-------------------
|
807
|
-
Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
|
808
|
-
|
809
|
-
|
810
|
-
Parameters
|
811
|
-
----------
|
812
|
-
models: list[str]
|
813
|
-
List of Ollama containers running models in sidecars.
|
814
|
-
backend: str
|
815
|
-
Determines where and how to run the Ollama process.
|
816
|
-
force_pull: bool
|
817
|
-
Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
|
818
|
-
cache_update_policy: str
|
819
|
-
Cache update policy: "auto", "force", or "never".
|
820
|
-
force_cache_update: bool
|
821
|
-
Simple override for "force" cache update policy.
|
822
|
-
debug: bool
|
823
|
-
Whether to turn on verbose debugging logs.
|
824
|
-
circuit_breaker_config: dict
|
825
|
-
Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
|
826
|
-
timeout_config: dict
|
827
|
-
Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
|
828
|
-
"""
|
616
|
+
def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
829
617
|
...
|
830
618
|
|
831
|
-
def
|
619
|
+
def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
|
832
620
|
"""
|
833
|
-
|
621
|
+
Enables loading / saving of models within a step.
|
834
622
|
|
835
623
|
|
836
|
-
Parameters
|
837
|
-
----------
|
838
|
-
temp_dir_root : str, optional
|
839
|
-
The root directory that will hold the temporary directory where objects will be downloaded.
|
840
|
-
|
841
|
-
load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
|
842
|
-
The list of repos (models/datasets) to load.
|
843
624
|
|
844
|
-
|
625
|
+
Parameters
|
626
|
+
----------
|
627
|
+
load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
|
628
|
+
Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
|
629
|
+
These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
|
630
|
+
- `current.checkpoint`
|
631
|
+
- `current.model`
|
632
|
+
- `current.huggingface_hub`
|
845
633
|
|
846
|
-
|
847
|
-
|
848
|
-
|
849
|
-
- All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
|
634
|
+
If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
|
635
|
+
the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
|
636
|
+
If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
|
850
637
|
|
851
|
-
|
852
|
-
|
638
|
+
temp_dir_root : str, default: None
|
639
|
+
The root directory under which `current.model.loaded` will store loaded models
|
853
640
|
"""
|
854
641
|
...
|
855
642
|
|
@@ -911,53 +698,80 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
|
|
911
698
|
...
|
912
699
|
|
913
700
|
@typing.overload
|
914
|
-
def
|
701
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
915
702
|
"""
|
916
|
-
|
703
|
+
Decorator prototype for all step decorators. This function gets specialized
|
704
|
+
and imported for all decorators types by _import_plugin_decorators().
|
705
|
+
"""
|
706
|
+
...
|
707
|
+
|
708
|
+
@typing.overload
|
709
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
710
|
+
...
|
711
|
+
|
712
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
713
|
+
"""
|
714
|
+
Decorator prototype for all step decorators. This function gets specialized
|
715
|
+
and imported for all decorators types by _import_plugin_decorators().
|
716
|
+
"""
|
717
|
+
...
|
718
|
+
|
719
|
+
@typing.overload
|
720
|
+
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
721
|
+
"""
|
722
|
+
Specifies the Conda environment for the step.
|
917
723
|
|
918
724
|
Information in this decorator will augment any
|
919
|
-
attributes set in the `@
|
920
|
-
you can use `@
|
921
|
-
steps and use `@
|
725
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
726
|
+
you can use `@conda_base` to set packages required by all
|
727
|
+
steps and use `@conda` to specify step-specific overrides.
|
922
728
|
|
923
729
|
|
924
730
|
Parameters
|
925
731
|
----------
|
926
|
-
packages : Dict[str, str], default
|
732
|
+
packages : Dict[str, str], default {}
|
927
733
|
Packages to use for this step. The key is the name of the package
|
928
734
|
and the value is the version to use.
|
929
|
-
|
735
|
+
libraries : Dict[str, str], default {}
|
736
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
737
|
+
python : str, optional, default None
|
930
738
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
931
739
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
740
|
+
disabled : bool, default False
|
741
|
+
If set to True, disables @conda.
|
932
742
|
"""
|
933
743
|
...
|
934
744
|
|
935
745
|
@typing.overload
|
936
|
-
def
|
746
|
+
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
937
747
|
...
|
938
748
|
|
939
749
|
@typing.overload
|
940
|
-
def
|
750
|
+
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
941
751
|
...
|
942
752
|
|
943
|
-
def
|
753
|
+
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
944
754
|
"""
|
945
|
-
Specifies the
|
755
|
+
Specifies the Conda environment for the step.
|
946
756
|
|
947
757
|
Information in this decorator will augment any
|
948
|
-
attributes set in the `@
|
949
|
-
you can use `@
|
950
|
-
steps and use `@
|
758
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
759
|
+
you can use `@conda_base` to set packages required by all
|
760
|
+
steps and use `@conda` to specify step-specific overrides.
|
951
761
|
|
952
762
|
|
953
763
|
Parameters
|
954
764
|
----------
|
955
|
-
packages : Dict[str, str], default
|
765
|
+
packages : Dict[str, str], default {}
|
956
766
|
Packages to use for this step. The key is the name of the package
|
957
767
|
and the value is the version to use.
|
958
|
-
|
768
|
+
libraries : Dict[str, str], default {}
|
769
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
770
|
+
python : str, optional, default None
|
959
771
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
960
772
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
773
|
+
disabled : bool, default False
|
774
|
+
If set to True, disables @conda.
|
961
775
|
"""
|
962
776
|
...
|
963
777
|
|
@@ -999,6 +813,57 @@ def vllm(*, model: str, backend: str, debug: bool, kwargs: typing.Any) -> typing
|
|
999
813
|
"""
|
1000
814
|
...
|
1001
815
|
|
816
|
+
@typing.overload
|
817
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
818
|
+
"""
|
819
|
+
Specifies the PyPI packages for the step.
|
820
|
+
|
821
|
+
Information in this decorator will augment any
|
822
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
823
|
+
you can use `@pypi_base` to set packages required by all
|
824
|
+
steps and use `@pypi` to specify step-specific overrides.
|
825
|
+
|
826
|
+
|
827
|
+
Parameters
|
828
|
+
----------
|
829
|
+
packages : Dict[str, str], default: {}
|
830
|
+
Packages to use for this step. The key is the name of the package
|
831
|
+
and the value is the version to use.
|
832
|
+
python : str, optional, default: None
|
833
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
834
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
835
|
+
"""
|
836
|
+
...
|
837
|
+
|
838
|
+
@typing.overload
|
839
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
840
|
+
...
|
841
|
+
|
842
|
+
@typing.overload
|
843
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
844
|
+
...
|
845
|
+
|
846
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
847
|
+
"""
|
848
|
+
Specifies the PyPI packages for the step.
|
849
|
+
|
850
|
+
Information in this decorator will augment any
|
851
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
852
|
+
you can use `@pypi_base` to set packages required by all
|
853
|
+
steps and use `@pypi` to specify step-specific overrides.
|
854
|
+
|
855
|
+
|
856
|
+
Parameters
|
857
|
+
----------
|
858
|
+
packages : Dict[str, str], default: {}
|
859
|
+
Packages to use for this step. The key is the name of the package
|
860
|
+
and the value is the version to use.
|
861
|
+
python : str, optional, default: None
|
862
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
863
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
864
|
+
"""
|
865
|
+
...
|
866
|
+
|
1002
867
|
@typing.overload
|
1003
868
|
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1004
869
|
"""
|
@@ -1017,43 +882,136 @@ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepF
|
|
1017
882
|
...
|
1018
883
|
|
1019
884
|
@typing.overload
|
1020
|
-
def
|
885
|
+
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1021
886
|
"""
|
1022
|
-
Specifies the
|
887
|
+
Specifies the resources needed when executing this step.
|
888
|
+
|
889
|
+
Use `@resources` to specify the resource requirements
|
890
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
891
|
+
|
892
|
+
You can choose the compute layer on the command line by executing e.g.
|
893
|
+
```
|
894
|
+
python myflow.py run --with batch
|
895
|
+
```
|
896
|
+
or
|
897
|
+
```
|
898
|
+
python myflow.py run --with kubernetes
|
899
|
+
```
|
900
|
+
which executes the flow on the desired system using the
|
901
|
+
requirements specified in `@resources`.
|
1023
902
|
|
1024
|
-
Use `@pypi_base` to set common packages required by all
|
1025
|
-
steps and use `@pypi` to specify step-specific overrides.
|
1026
903
|
|
1027
904
|
Parameters
|
1028
905
|
----------
|
1029
|
-
|
1030
|
-
|
1031
|
-
|
1032
|
-
|
1033
|
-
|
1034
|
-
|
906
|
+
cpu : int, default 1
|
907
|
+
Number of CPUs required for this step.
|
908
|
+
gpu : int, optional, default None
|
909
|
+
Number of GPUs required for this step.
|
910
|
+
disk : int, optional, default None
|
911
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
912
|
+
memory : int, default 4096
|
913
|
+
Memory size (in MB) required for this step.
|
914
|
+
shared_memory : int, optional, default None
|
915
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
916
|
+
This parameter maps to the `--shm-size` option in Docker.
|
1035
917
|
"""
|
1036
918
|
...
|
1037
919
|
|
1038
920
|
@typing.overload
|
1039
|
-
def
|
921
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1040
922
|
...
|
1041
923
|
|
1042
|
-
|
924
|
+
@typing.overload
|
925
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
926
|
+
...
|
927
|
+
|
928
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
1043
929
|
"""
|
1044
|
-
Specifies the
|
930
|
+
Specifies the resources needed when executing this step.
|
931
|
+
|
932
|
+
Use `@resources` to specify the resource requirements
|
933
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
934
|
+
|
935
|
+
You can choose the compute layer on the command line by executing e.g.
|
936
|
+
```
|
937
|
+
python myflow.py run --with batch
|
938
|
+
```
|
939
|
+
or
|
940
|
+
```
|
941
|
+
python myflow.py run --with kubernetes
|
942
|
+
```
|
943
|
+
which executes the flow on the desired system using the
|
944
|
+
requirements specified in `@resources`.
|
1045
945
|
|
1046
|
-
Use `@pypi_base` to set common packages required by all
|
1047
|
-
steps and use `@pypi` to specify step-specific overrides.
|
1048
946
|
|
1049
947
|
Parameters
|
1050
948
|
----------
|
1051
|
-
|
1052
|
-
|
1053
|
-
|
1054
|
-
|
1055
|
-
|
1056
|
-
|
949
|
+
cpu : int, default 1
|
950
|
+
Number of CPUs required for this step.
|
951
|
+
gpu : int, optional, default None
|
952
|
+
Number of GPUs required for this step.
|
953
|
+
disk : int, optional, default None
|
954
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
955
|
+
memory : int, default 4096
|
956
|
+
Memory size (in MB) required for this step.
|
957
|
+
shared_memory : int, optional, default None
|
958
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
959
|
+
This parameter maps to the `--shm-size` option in Docker.
|
960
|
+
"""
|
961
|
+
...
|
962
|
+
|
963
|
+
@typing.overload
|
964
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
965
|
+
"""
|
966
|
+
Specifies the number of times the task corresponding
|
967
|
+
to a step needs to be retried.
|
968
|
+
|
969
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
970
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
971
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
972
|
+
|
973
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
974
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
975
|
+
ensuring that the flow execution can continue.
|
976
|
+
|
977
|
+
|
978
|
+
Parameters
|
979
|
+
----------
|
980
|
+
times : int, default 3
|
981
|
+
Number of times to retry this task.
|
982
|
+
minutes_between_retries : int, default 2
|
983
|
+
Number of minutes between retries.
|
984
|
+
"""
|
985
|
+
...
|
986
|
+
|
987
|
+
@typing.overload
|
988
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
989
|
+
...
|
990
|
+
|
991
|
+
@typing.overload
|
992
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
993
|
+
...
|
994
|
+
|
995
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
996
|
+
"""
|
997
|
+
Specifies the number of times the task corresponding
|
998
|
+
to a step needs to be retried.
|
999
|
+
|
1000
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
1001
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
1002
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
1003
|
+
|
1004
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
1005
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
1006
|
+
ensuring that the flow execution can continue.
|
1007
|
+
|
1008
|
+
|
1009
|
+
Parameters
|
1010
|
+
----------
|
1011
|
+
times : int, default 3
|
1012
|
+
Number of times to retry this task.
|
1013
|
+
minutes_between_retries : int, default 2
|
1014
|
+
Number of minutes between retries.
|
1057
1015
|
"""
|
1058
1016
|
...
|
1059
1017
|
|
@@ -1100,38 +1058,117 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
|
|
1100
1058
|
"""
|
1101
1059
|
...
|
1102
1060
|
|
1103
|
-
def
|
1061
|
+
def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
|
1104
1062
|
"""
|
1105
|
-
|
1063
|
+
Allows setting external datastores to save data for the
|
1064
|
+
`@checkpoint`/`@model`/`@huggingface_hub` decorators.
|
1106
1065
|
|
1107
|
-
|
1108
|
-
|
1066
|
+
This decorator is useful when users wish to save data to a different datastore
|
1067
|
+
than what is configured in Metaflow. This can be for variety of reasons:
|
1109
1068
|
|
1069
|
+
1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
|
1070
|
+
2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
|
1071
|
+
- Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
|
1072
|
+
3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
|
1073
|
+
- Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
|
1110
1074
|
|
1111
|
-
|
1075
|
+
Usage:
|
1112
1076
|
----------
|
1113
|
-
name : str
|
1114
|
-
Project name. Make sure that the name is unique amongst all
|
1115
|
-
projects that use the same production scheduler. The name may
|
1116
|
-
contain only lowercase alphanumeric characters and underscores.
|
1117
1077
|
|
1118
|
-
|
1119
|
-
The branch to use. If not specified, the branch is set to
|
1120
|
-
`user.<username>` unless `production` is set to `True`. This can
|
1121
|
-
also be set on the command line using `--branch` as a top-level option.
|
1122
|
-
It is an error to specify `branch` in the decorator and on the command line.
|
1078
|
+
- Using a custom IAM role to access the datastore.
|
1123
1079
|
|
1124
|
-
|
1125
|
-
|
1126
|
-
|
1127
|
-
|
1128
|
-
|
1129
|
-
|
1130
|
-
|
1131
|
-
|
1132
|
-
|
1133
|
-
|
1134
|
-
|
1080
|
+
```python
|
1081
|
+
@with_artifact_store(
|
1082
|
+
type="s3",
|
1083
|
+
config=lambda: {
|
1084
|
+
"root": "s3://my-bucket-foo/path/to/root",
|
1085
|
+
"role_arn": ROLE,
|
1086
|
+
},
|
1087
|
+
)
|
1088
|
+
class MyFlow(FlowSpec):
|
1089
|
+
|
1090
|
+
@checkpoint
|
1091
|
+
@step
|
1092
|
+
def start(self):
|
1093
|
+
with open("my_file.txt", "w") as f:
|
1094
|
+
f.write("Hello, World!")
|
1095
|
+
self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
|
1096
|
+
self.next(self.end)
|
1097
|
+
|
1098
|
+
```
|
1099
|
+
|
1100
|
+
- Using credentials to access the s3-compatible datastore.
|
1101
|
+
|
1102
|
+
```python
|
1103
|
+
@with_artifact_store(
|
1104
|
+
type="s3",
|
1105
|
+
config=lambda: {
|
1106
|
+
"root": "s3://my-bucket-foo/path/to/root",
|
1107
|
+
"client_params": {
|
1108
|
+
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
1109
|
+
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
1110
|
+
},
|
1111
|
+
},
|
1112
|
+
)
|
1113
|
+
class MyFlow(FlowSpec):
|
1114
|
+
|
1115
|
+
@checkpoint
|
1116
|
+
@step
|
1117
|
+
def start(self):
|
1118
|
+
with open("my_file.txt", "w") as f:
|
1119
|
+
f.write("Hello, World!")
|
1120
|
+
self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
|
1121
|
+
self.next(self.end)
|
1122
|
+
|
1123
|
+
```
|
1124
|
+
|
1125
|
+
- Accessing objects stored in external datastores after task execution.
|
1126
|
+
|
1127
|
+
```python
|
1128
|
+
run = Run("CheckpointsTestsFlow/8992")
|
1129
|
+
with artifact_store_from(run=run, config={
|
1130
|
+
"client_params": {
|
1131
|
+
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
1132
|
+
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
1133
|
+
},
|
1134
|
+
}):
|
1135
|
+
with Checkpoint() as cp:
|
1136
|
+
latest = cp.list(
|
1137
|
+
task=run["start"].task
|
1138
|
+
)[0]
|
1139
|
+
print(latest)
|
1140
|
+
cp.load(
|
1141
|
+
latest,
|
1142
|
+
"test-checkpoints"
|
1143
|
+
)
|
1144
|
+
|
1145
|
+
task = Task("TorchTuneFlow/8484/train/53673")
|
1146
|
+
with artifact_store_from(run=run, config={
|
1147
|
+
"client_params": {
|
1148
|
+
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
1149
|
+
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
1150
|
+
},
|
1151
|
+
}):
|
1152
|
+
load_model(
|
1153
|
+
task.data.model_ref,
|
1154
|
+
"test-models"
|
1155
|
+
)
|
1156
|
+
```
|
1157
|
+
Parameters:
|
1158
|
+
----------
|
1159
|
+
|
1160
|
+
type: str
|
1161
|
+
The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
|
1162
|
+
|
1163
|
+
config: dict or Callable
|
1164
|
+
Dictionary of configuration options for the datastore. The following keys are required:
|
1165
|
+
- root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
|
1166
|
+
- example: 's3://bucket-name/path/to/root'
|
1167
|
+
- example: 'gs://bucket-name/path/to/root'
|
1168
|
+
- example: 'https://myblockacc.blob.core.windows.net/metaflow/'
|
1169
|
+
- role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
|
1170
|
+
- session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
|
1171
|
+
- client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
|
1135
1172
|
"""
|
1136
1173
|
...
|
1137
1174
|
|
@@ -1185,54 +1222,146 @@ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] =
|
|
1185
1222
|
...
|
1186
1223
|
|
1187
1224
|
@typing.overload
|
1188
|
-
def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1225
|
+
def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1226
|
+
...
|
1227
|
+
|
1228
|
+
def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
1229
|
+
"""
|
1230
|
+
Specifies the flow(s) that this flow depends on.
|
1231
|
+
|
1232
|
+
```
|
1233
|
+
@trigger_on_finish(flow='FooFlow')
|
1234
|
+
```
|
1235
|
+
or
|
1236
|
+
```
|
1237
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
1238
|
+
```
|
1239
|
+
This decorator respects the @project decorator and triggers the flow
|
1240
|
+
when upstream runs within the same namespace complete successfully
|
1241
|
+
|
1242
|
+
Additionally, you can specify project aware upstream flow dependencies
|
1243
|
+
by specifying the fully qualified project_flow_name.
|
1244
|
+
```
|
1245
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
1246
|
+
```
|
1247
|
+
or
|
1248
|
+
```
|
1249
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
1250
|
+
```
|
1251
|
+
|
1252
|
+
You can also specify just the project or project branch (other values will be
|
1253
|
+
inferred from the current project or project branch):
|
1254
|
+
```
|
1255
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
1256
|
+
```
|
1257
|
+
|
1258
|
+
Note that `branch` is typically one of:
|
1259
|
+
- `prod`
|
1260
|
+
- `user.bob`
|
1261
|
+
- `test.my_experiment`
|
1262
|
+
- `prod.staging`
|
1263
|
+
|
1264
|
+
|
1265
|
+
Parameters
|
1266
|
+
----------
|
1267
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
1268
|
+
Upstream flow dependency for this flow.
|
1269
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
1270
|
+
Upstream flow dependencies for this flow.
|
1271
|
+
options : Dict[str, Any], default {}
|
1272
|
+
Backend-specific configuration for tuning eventing behavior.
|
1273
|
+
"""
|
1274
|
+
...
|
1275
|
+
|
1276
|
+
@typing.overload
|
1277
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1278
|
+
"""
|
1279
|
+
Specifies the times when the flow should be run when running on a
|
1280
|
+
production scheduler.
|
1281
|
+
|
1282
|
+
|
1283
|
+
Parameters
|
1284
|
+
----------
|
1285
|
+
hourly : bool, default False
|
1286
|
+
Run the workflow hourly.
|
1287
|
+
daily : bool, default True
|
1288
|
+
Run the workflow daily.
|
1289
|
+
weekly : bool, default False
|
1290
|
+
Run the workflow weekly.
|
1291
|
+
cron : str, optional, default None
|
1292
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1293
|
+
specified by this expression.
|
1294
|
+
timezone : str, optional, default None
|
1295
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1296
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1297
|
+
"""
|
1298
|
+
...
|
1299
|
+
|
1300
|
+
@typing.overload
|
1301
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1302
|
+
...
|
1303
|
+
|
1304
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
1305
|
+
"""
|
1306
|
+
Specifies the times when the flow should be run when running on a
|
1307
|
+
production scheduler.
|
1308
|
+
|
1309
|
+
|
1310
|
+
Parameters
|
1311
|
+
----------
|
1312
|
+
hourly : bool, default False
|
1313
|
+
Run the workflow hourly.
|
1314
|
+
daily : bool, default True
|
1315
|
+
Run the workflow daily.
|
1316
|
+
weekly : bool, default False
|
1317
|
+
Run the workflow weekly.
|
1318
|
+
cron : str, optional, default None
|
1319
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1320
|
+
specified by this expression.
|
1321
|
+
timezone : str, optional, default None
|
1322
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1323
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1324
|
+
"""
|
1325
|
+
...
|
1326
|
+
|
1327
|
+
@typing.overload
|
1328
|
+
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1329
|
+
"""
|
1330
|
+
Specifies the PyPI packages for all steps of the flow.
|
1331
|
+
|
1332
|
+
Use `@pypi_base` to set common packages required by all
|
1333
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1334
|
+
|
1335
|
+
Parameters
|
1336
|
+
----------
|
1337
|
+
packages : Dict[str, str], default: {}
|
1338
|
+
Packages to use for this flow. The key is the name of the package
|
1339
|
+
and the value is the version to use.
|
1340
|
+
python : str, optional, default: None
|
1341
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1342
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1343
|
+
"""
|
1344
|
+
...
|
1345
|
+
|
1346
|
+
@typing.overload
|
1347
|
+
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1189
1348
|
...
|
1190
1349
|
|
1191
|
-
def
|
1350
|
+
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
1192
1351
|
"""
|
1193
|
-
Specifies the
|
1194
|
-
|
1195
|
-
```
|
1196
|
-
@trigger_on_finish(flow='FooFlow')
|
1197
|
-
```
|
1198
|
-
or
|
1199
|
-
```
|
1200
|
-
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
1201
|
-
```
|
1202
|
-
This decorator respects the @project decorator and triggers the flow
|
1203
|
-
when upstream runs within the same namespace complete successfully
|
1204
|
-
|
1205
|
-
Additionally, you can specify project aware upstream flow dependencies
|
1206
|
-
by specifying the fully qualified project_flow_name.
|
1207
|
-
```
|
1208
|
-
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
1209
|
-
```
|
1210
|
-
or
|
1211
|
-
```
|
1212
|
-
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
1213
|
-
```
|
1214
|
-
|
1215
|
-
You can also specify just the project or project branch (other values will be
|
1216
|
-
inferred from the current project or project branch):
|
1217
|
-
```
|
1218
|
-
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
1219
|
-
```
|
1220
|
-
|
1221
|
-
Note that `branch` is typically one of:
|
1222
|
-
- `prod`
|
1223
|
-
- `user.bob`
|
1224
|
-
- `test.my_experiment`
|
1225
|
-
- `prod.staging`
|
1352
|
+
Specifies the PyPI packages for all steps of the flow.
|
1226
1353
|
|
1354
|
+
Use `@pypi_base` to set common packages required by all
|
1355
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1227
1356
|
|
1228
1357
|
Parameters
|
1229
1358
|
----------
|
1230
|
-
|
1231
|
-
|
1232
|
-
|
1233
|
-
|
1234
|
-
|
1235
|
-
|
1359
|
+
packages : Dict[str, str], default: {}
|
1360
|
+
Packages to use for this flow. The key is the name of the package
|
1361
|
+
and the value is the version to use.
|
1362
|
+
python : str, optional, default: None
|
1363
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1364
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1236
1365
|
"""
|
1237
1366
|
...
|
1238
1367
|
|
@@ -1329,171 +1458,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
|
|
1329
1458
|
"""
|
1330
1459
|
...
|
1331
1460
|
|
1332
|
-
@typing.overload
|
1333
|
-
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1334
|
-
"""
|
1335
|
-
Specifies the times when the flow should be run when running on a
|
1336
|
-
production scheduler.
|
1337
|
-
|
1338
|
-
|
1339
|
-
Parameters
|
1340
|
-
----------
|
1341
|
-
hourly : bool, default False
|
1342
|
-
Run the workflow hourly.
|
1343
|
-
daily : bool, default True
|
1344
|
-
Run the workflow daily.
|
1345
|
-
weekly : bool, default False
|
1346
|
-
Run the workflow weekly.
|
1347
|
-
cron : str, optional, default None
|
1348
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1349
|
-
specified by this expression.
|
1350
|
-
timezone : str, optional, default None
|
1351
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1352
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1353
|
-
"""
|
1354
|
-
...
|
1355
|
-
|
1356
|
-
@typing.overload
|
1357
|
-
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1358
|
-
...
|
1359
|
-
|
1360
|
-
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
1361
|
-
"""
|
1362
|
-
Specifies the times when the flow should be run when running on a
|
1363
|
-
production scheduler.
|
1364
|
-
|
1365
|
-
|
1366
|
-
Parameters
|
1367
|
-
----------
|
1368
|
-
hourly : bool, default False
|
1369
|
-
Run the workflow hourly.
|
1370
|
-
daily : bool, default True
|
1371
|
-
Run the workflow daily.
|
1372
|
-
weekly : bool, default False
|
1373
|
-
Run the workflow weekly.
|
1374
|
-
cron : str, optional, default None
|
1375
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1376
|
-
specified by this expression.
|
1377
|
-
timezone : str, optional, default None
|
1378
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1379
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1380
|
-
"""
|
1381
|
-
...
|
1382
|
-
|
1383
|
-
def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
|
1384
|
-
"""
|
1385
|
-
Allows setting external datastores to save data for the
|
1386
|
-
`@checkpoint`/`@model`/`@huggingface_hub` decorators.
|
1387
|
-
|
1388
|
-
This decorator is useful when users wish to save data to a different datastore
|
1389
|
-
than what is configured in Metaflow. This can be for variety of reasons:
|
1390
|
-
|
1391
|
-
1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
|
1392
|
-
2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
|
1393
|
-
- Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
|
1394
|
-
3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
|
1395
|
-
- Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
|
1396
|
-
|
1397
|
-
Usage:
|
1398
|
-
----------
|
1399
|
-
|
1400
|
-
- Using a custom IAM role to access the datastore.
|
1401
|
-
|
1402
|
-
```python
|
1403
|
-
@with_artifact_store(
|
1404
|
-
type="s3",
|
1405
|
-
config=lambda: {
|
1406
|
-
"root": "s3://my-bucket-foo/path/to/root",
|
1407
|
-
"role_arn": ROLE,
|
1408
|
-
},
|
1409
|
-
)
|
1410
|
-
class MyFlow(FlowSpec):
|
1411
|
-
|
1412
|
-
@checkpoint
|
1413
|
-
@step
|
1414
|
-
def start(self):
|
1415
|
-
with open("my_file.txt", "w") as f:
|
1416
|
-
f.write("Hello, World!")
|
1417
|
-
self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
|
1418
|
-
self.next(self.end)
|
1419
|
-
|
1420
|
-
```
|
1421
|
-
|
1422
|
-
- Using credentials to access the s3-compatible datastore.
|
1423
|
-
|
1424
|
-
```python
|
1425
|
-
@with_artifact_store(
|
1426
|
-
type="s3",
|
1427
|
-
config=lambda: {
|
1428
|
-
"root": "s3://my-bucket-foo/path/to/root",
|
1429
|
-
"client_params": {
|
1430
|
-
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
1431
|
-
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
1432
|
-
},
|
1433
|
-
},
|
1434
|
-
)
|
1435
|
-
class MyFlow(FlowSpec):
|
1436
|
-
|
1437
|
-
@checkpoint
|
1438
|
-
@step
|
1439
|
-
def start(self):
|
1440
|
-
with open("my_file.txt", "w") as f:
|
1441
|
-
f.write("Hello, World!")
|
1442
|
-
self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
|
1443
|
-
self.next(self.end)
|
1444
|
-
|
1445
|
-
```
|
1446
|
-
|
1447
|
-
- Accessing objects stored in external datastores after task execution.
|
1448
|
-
|
1449
|
-
```python
|
1450
|
-
run = Run("CheckpointsTestsFlow/8992")
|
1451
|
-
with artifact_store_from(run=run, config={
|
1452
|
-
"client_params": {
|
1453
|
-
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
1454
|
-
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
1455
|
-
},
|
1456
|
-
}):
|
1457
|
-
with Checkpoint() as cp:
|
1458
|
-
latest = cp.list(
|
1459
|
-
task=run["start"].task
|
1460
|
-
)[0]
|
1461
|
-
print(latest)
|
1462
|
-
cp.load(
|
1463
|
-
latest,
|
1464
|
-
"test-checkpoints"
|
1465
|
-
)
|
1466
|
-
|
1467
|
-
task = Task("TorchTuneFlow/8484/train/53673")
|
1468
|
-
with artifact_store_from(run=run, config={
|
1469
|
-
"client_params": {
|
1470
|
-
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
1471
|
-
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
1472
|
-
},
|
1473
|
-
}):
|
1474
|
-
load_model(
|
1475
|
-
task.data.model_ref,
|
1476
|
-
"test-models"
|
1477
|
-
)
|
1478
|
-
```
|
1479
|
-
Parameters:
|
1480
|
-
----------
|
1481
|
-
|
1482
|
-
type: str
|
1483
|
-
The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
|
1484
|
-
|
1485
|
-
config: dict or Callable
|
1486
|
-
Dictionary of configuration options for the datastore. The following keys are required:
|
1487
|
-
- root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
|
1488
|
-
- example: 's3://bucket-name/path/to/root'
|
1489
|
-
- example: 'gs://bucket-name/path/to/root'
|
1490
|
-
- example: 'https://myblockacc.blob.core.windows.net/metaflow/'
|
1491
|
-
- role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
|
1492
|
-
- session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
|
1493
|
-
- client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
|
1494
|
-
"""
|
1495
|
-
...
|
1496
|
-
|
1497
1461
|
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1498
1462
|
"""
|
1499
1463
|
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
@@ -1537,6 +1501,41 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
|
|
1537
1501
|
"""
|
1538
1502
|
...
|
1539
1503
|
|
1504
|
+
def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1505
|
+
"""
|
1506
|
+
Specifies what flows belong to the same project.
|
1507
|
+
|
1508
|
+
A project-specific namespace is created for all flows that
|
1509
|
+
use the same `@project(name)`.
|
1510
|
+
|
1511
|
+
|
1512
|
+
Parameters
|
1513
|
+
----------
|
1514
|
+
name : str
|
1515
|
+
Project name. Make sure that the name is unique amongst all
|
1516
|
+
projects that use the same production scheduler. The name may
|
1517
|
+
contain only lowercase alphanumeric characters and underscores.
|
1518
|
+
|
1519
|
+
branch : Optional[str], default None
|
1520
|
+
The branch to use. If not specified, the branch is set to
|
1521
|
+
`user.<username>` unless `production` is set to `True`. This can
|
1522
|
+
also be set on the command line using `--branch` as a top-level option.
|
1523
|
+
It is an error to specify `branch` in the decorator and on the command line.
|
1524
|
+
|
1525
|
+
production : bool, default False
|
1526
|
+
Whether or not the branch is the production branch. This can also be set on the
|
1527
|
+
command line using `--production` as a top-level option. It is an error to specify
|
1528
|
+
`production` in the decorator and on the command line.
|
1529
|
+
The project branch name will be:
|
1530
|
+
- if `branch` is specified:
|
1531
|
+
- if `production` is True: `prod.<branch>`
|
1532
|
+
- if `production` is False: `test.<branch>`
|
1533
|
+
- if `branch` is not specified:
|
1534
|
+
- if `production` is True: `prod`
|
1535
|
+
- if `production` is False: `user.<username>`
|
1536
|
+
"""
|
1537
|
+
...
|
1538
|
+
|
1540
1539
|
@typing.overload
|
1541
1540
|
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1542
1541
|
"""
|