ob-metaflow-stubs 6.0.10.3__py2.py3-none-any.whl → 6.0.10.4__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ob-metaflow-stubs might be problematic. Click here for more details.
- metaflow-stubs/__init__.pyi +984 -968
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +6 -6
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/events.pyi +3 -3
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +4 -4
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +2 -2
- metaflow-stubs/meta_files.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +2 -2
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +76 -32
- metaflow-stubs/metaflow_git.pyi +2 -2
- metaflow-stubs/mf_extensions/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +91 -7
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +4 -4
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +5 -5
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +5 -5
- metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/ob_internal.pyi +2 -2
- metaflow-stubs/packaging_sys/__init__.pyi +7 -7
- metaflow-stubs/packaging_sys/backend.pyi +3 -3
- metaflow-stubs/packaging_sys/distribution_support.pyi +5 -5
- metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
- metaflow-stubs/packaging_sys/utils.pyi +2 -2
- metaflow-stubs/packaging_sys/v1.pyi +3 -3
- metaflow-stubs/parameters.pyi +2 -2
- metaflow-stubs/plugins/__init__.pyi +15 -15
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +5 -5
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +5 -5
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +2 -2
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +3 -3
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
- metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +5 -5
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
- metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
- metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/perimeters.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
- metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
- metaflow-stubs/plugins/secrets/utils.pyi +2 -2
- metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
- metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
- metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
- metaflow-stubs/profilers/__init__.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +33 -33
- metaflow-stubs/runner/deployer_impl.pyi +3 -3
- metaflow-stubs/runner/metaflow_runner.pyi +3 -3
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +3 -3
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +3 -3
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_options.pyi +2 -2
- metaflow-stubs/user_configs/config_parameters.pyi +6 -6
- metaflow-stubs/user_decorators/__init__.pyi +2 -2
- metaflow-stubs/user_decorators/common.pyi +2 -2
- metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
- metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
- metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
- metaflow-stubs/user_decorators/user_step_decorator.pyi +5 -5
- {ob_metaflow_stubs-6.0.10.3.dist-info → ob_metaflow_stubs-6.0.10.4.dist-info}/METADATA +1 -1
- ob_metaflow_stubs-6.0.10.4.dist-info/RECORD +262 -0
- ob_metaflow_stubs-6.0.10.3.dist-info/RECORD +0 -262
- {ob_metaflow_stubs-6.0.10.3.dist-info → ob_metaflow_stubs-6.0.10.4.dist-info}/WHEEL +0 -0
- {ob_metaflow_stubs-6.0.10.3.dist-info → ob_metaflow_stubs-6.0.10.4.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
######################################################################################################
|
|
2
2
|
# Auto-generated Metaflow stub file #
|
|
3
|
-
# MF version: 2.18.5.1+obcheckpoint(0.2.
|
|
4
|
-
# Generated on 2025-09-
|
|
3
|
+
# MF version: 2.18.5.1+obcheckpoint(0.2.6);ob(v1) #
|
|
4
|
+
# Generated on 2025-09-16T23:23:08.891416 #
|
|
5
5
|
######################################################################################################
|
|
6
6
|
|
|
7
7
|
from __future__ import annotations
|
|
8
8
|
|
|
9
9
|
import typing
|
|
10
10
|
if typing.TYPE_CHECKING:
|
|
11
|
-
import typing
|
|
12
11
|
import datetime
|
|
12
|
+
import typing
|
|
13
13
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
|
14
14
|
StepFlag = typing.NewType("StepFlag", bool)
|
|
15
15
|
|
|
@@ -40,17 +40,17 @@ from .user_decorators.user_step_decorator import StepMutator as StepMutator
|
|
|
40
40
|
from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
|
|
41
41
|
from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
|
|
42
42
|
from . import cards as cards
|
|
43
|
-
from . import events as events
|
|
44
|
-
from . import metaflow_git as metaflow_git
|
|
45
43
|
from . import tuple_util as tuple_util
|
|
44
|
+
from . import metaflow_git as metaflow_git
|
|
45
|
+
from . import events as events
|
|
46
46
|
from . import runner as runner
|
|
47
47
|
from . import plugins as plugins
|
|
48
48
|
from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
|
|
49
49
|
from . import includefile as includefile
|
|
50
50
|
from .includefile import IncludeFile as IncludeFile
|
|
51
51
|
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
|
52
|
-
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
|
53
52
|
from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
|
|
53
|
+
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
|
54
54
|
from . import client as client
|
|
55
55
|
from .client.core import namespace as namespace
|
|
56
56
|
from .client.core import get_namespace as get_namespace
|
|
@@ -167,6 +167,63 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
|
167
167
|
"""
|
|
168
168
|
...
|
|
169
169
|
|
|
170
|
+
@typing.overload
|
|
171
|
+
def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
172
|
+
"""
|
|
173
|
+
CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
174
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
175
|
+
a Neo Cloud like CoreWeave.
|
|
176
|
+
"""
|
|
177
|
+
...
|
|
178
|
+
|
|
179
|
+
@typing.overload
|
|
180
|
+
def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
181
|
+
...
|
|
182
|
+
|
|
183
|
+
def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
184
|
+
"""
|
|
185
|
+
CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
186
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
187
|
+
a Neo Cloud like CoreWeave.
|
|
188
|
+
"""
|
|
189
|
+
...
|
|
190
|
+
|
|
191
|
+
@typing.overload
|
|
192
|
+
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
193
|
+
"""
|
|
194
|
+
Internal decorator to support Fast bakery
|
|
195
|
+
"""
|
|
196
|
+
...
|
|
197
|
+
|
|
198
|
+
@typing.overload
|
|
199
|
+
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
200
|
+
...
|
|
201
|
+
|
|
202
|
+
def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
203
|
+
"""
|
|
204
|
+
Internal decorator to support Fast bakery
|
|
205
|
+
"""
|
|
206
|
+
...
|
|
207
|
+
|
|
208
|
+
@typing.overload
|
|
209
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
210
|
+
"""
|
|
211
|
+
Decorator prototype for all step decorators. This function gets specialized
|
|
212
|
+
and imported for all decorators types by _import_plugin_decorators().
|
|
213
|
+
"""
|
|
214
|
+
...
|
|
215
|
+
|
|
216
|
+
@typing.overload
|
|
217
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
218
|
+
...
|
|
219
|
+
|
|
220
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
221
|
+
"""
|
|
222
|
+
Decorator prototype for all step decorators. This function gets specialized
|
|
223
|
+
and imported for all decorators types by _import_plugin_decorators().
|
|
224
|
+
"""
|
|
225
|
+
...
|
|
226
|
+
|
|
170
227
|
@typing.overload
|
|
171
228
|
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
172
229
|
"""
|
|
@@ -222,6 +279,182 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
|
222
279
|
"""
|
|
223
280
|
...
|
|
224
281
|
|
|
282
|
+
@typing.overload
|
|
283
|
+
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
284
|
+
"""
|
|
285
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
|
286
|
+
|
|
287
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
Parameters
|
|
291
|
+
----------
|
|
292
|
+
type : str, default 'default'
|
|
293
|
+
Card type.
|
|
294
|
+
id : str, optional, default None
|
|
295
|
+
If multiple cards are present, use this id to identify this card.
|
|
296
|
+
options : Dict[str, Any], default {}
|
|
297
|
+
Options passed to the card. The contents depend on the card type.
|
|
298
|
+
timeout : int, default 45
|
|
299
|
+
Interrupt reporting if it takes more than this many seconds.
|
|
300
|
+
"""
|
|
301
|
+
...
|
|
302
|
+
|
|
303
|
+
@typing.overload
|
|
304
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
305
|
+
...
|
|
306
|
+
|
|
307
|
+
@typing.overload
|
|
308
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
309
|
+
...
|
|
310
|
+
|
|
311
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
|
312
|
+
"""
|
|
313
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
|
314
|
+
|
|
315
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
Parameters
|
|
319
|
+
----------
|
|
320
|
+
type : str, default 'default'
|
|
321
|
+
Card type.
|
|
322
|
+
id : str, optional, default None
|
|
323
|
+
If multiple cards are present, use this id to identify this card.
|
|
324
|
+
options : Dict[str, Any], default {}
|
|
325
|
+
Options passed to the card. The contents depend on the card type.
|
|
326
|
+
timeout : int, default 45
|
|
327
|
+
Interrupt reporting if it takes more than this many seconds.
|
|
328
|
+
"""
|
|
329
|
+
...
|
|
330
|
+
|
|
331
|
+
def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
332
|
+
"""
|
|
333
|
+
Decorator that helps cache, version, and store models/datasets from the Hugging Face Hub.
|
|
334
|
+
|
|
335
|
+
> Examples
|
|
336
|
+
|
|
337
|
+
**Usage: creating references to models from the Hugging Face Hub that may be loaded in downstream steps**
|
|
338
|
+
```python
|
|
339
|
+
@huggingface_hub
|
|
340
|
+
@step
|
|
341
|
+
def pull_model_from_huggingface(self):
|
|
342
|
+
# `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
|
|
343
|
+
# and saves it in the backend storage based on the model's `repo_id`. If there exists a model
|
|
344
|
+
# with the same `repo_id` in the backend storage, it will not download the model again. The return
|
|
345
|
+
# value of the function is a reference to the model in the backend storage.
|
|
346
|
+
# This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
|
|
347
|
+
|
|
348
|
+
self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
|
|
349
|
+
self.llama_model = current.huggingface_hub.snapshot_download(
|
|
350
|
+
repo_id=self.model_id,
|
|
351
|
+
allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
|
|
352
|
+
)
|
|
353
|
+
self.next(self.train)
|
|
354
|
+
```
|
|
355
|
+
|
|
356
|
+
**Usage: explicitly loading models at runtime from the Hugging Face Hub or from cache (from Metaflow's datastore)**
|
|
357
|
+
```python
|
|
358
|
+
@huggingface_hub
|
|
359
|
+
@step
|
|
360
|
+
def run_training(self):
|
|
361
|
+
# Temporary directory (auto-cleaned on exit)
|
|
362
|
+
with current.huggingface_hub.load(
|
|
363
|
+
repo_id="google-bert/bert-base-uncased",
|
|
364
|
+
allow_patterns=["*.bin"],
|
|
365
|
+
) as local_path:
|
|
366
|
+
# Use files under local_path
|
|
367
|
+
train_model(local_path)
|
|
368
|
+
...
|
|
369
|
+
|
|
370
|
+
```
|
|
371
|
+
|
|
372
|
+
**Usage: loading models directly from the Hugging Face Hub or from cache (from Metaflow's datastore)**
|
|
373
|
+
```python
|
|
374
|
+
@huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
|
|
375
|
+
@step
|
|
376
|
+
def pull_model_from_huggingface(self):
|
|
377
|
+
path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
|
|
378
|
+
```
|
|
379
|
+
|
|
380
|
+
```python
|
|
381
|
+
@huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora", "/my-lora-directory")])
|
|
382
|
+
@step
|
|
383
|
+
def finetune_model(self):
|
|
384
|
+
path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
|
|
385
|
+
# path_to_model will be /my-directory
|
|
386
|
+
```
|
|
387
|
+
|
|
388
|
+
```python
|
|
389
|
+
# Takes all the arguments passed to `snapshot_download`
|
|
390
|
+
# except for `local_dir`
|
|
391
|
+
@huggingface_hub(load=[
|
|
392
|
+
{
|
|
393
|
+
"repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
|
|
394
|
+
},
|
|
395
|
+
{
|
|
396
|
+
"repo_id": "myorg/mistral-lora",
|
|
397
|
+
"repo_type": "model",
|
|
398
|
+
},
|
|
399
|
+
])
|
|
400
|
+
@step
|
|
401
|
+
def finetune_model(self):
|
|
402
|
+
path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
|
|
403
|
+
# path_to_model will be /my-directory
|
|
404
|
+
```
|
|
405
|
+
|
|
406
|
+
|
|
407
|
+
Parameters
|
|
408
|
+
----------
|
|
409
|
+
temp_dir_root : str, optional
|
|
410
|
+
The root directory that will hold the temporary directory where objects will be downloaded.
|
|
411
|
+
|
|
412
|
+
cache_scope : str, optional
|
|
413
|
+
The scope of the cache. Can be `checkpoint` / `flow` / `global`.
|
|
414
|
+
|
|
415
|
+
- `checkpoint` (default): All repos are stored like objects saved by `@checkpoint`.
|
|
416
|
+
i.e., the cached path is derived from the namespace, flow, step, and Metaflow foreach iteration.
|
|
417
|
+
Any repo downloaded under this scope will only be retrieved from the cache when the step runs under the same namespace in the same flow (at the same foreach index).
|
|
418
|
+
|
|
419
|
+
- `flow`: All repos are cached under the flow, regardless of namespace.
|
|
420
|
+
i.e., the cached path is derived solely from the flow name.
|
|
421
|
+
When to use this mode:
|
|
422
|
+
- Multiple users are executing the same flow and want shared access to the repos cached by the decorator.
|
|
423
|
+
- Multiple versions of a flow are deployed, all needing access to the same repos cached by the decorator.
|
|
424
|
+
|
|
425
|
+
- `global`: All repos are cached under a globally static path.
|
|
426
|
+
i.e., the base path of the cache is static and all repos are stored under it.
|
|
427
|
+
When to use this mode:
|
|
428
|
+
- All repos from the Hugging Face Hub need to be shared by users across all flow executions.
|
|
429
|
+
|
|
430
|
+
Each caching scope comes with its own trade-offs:
|
|
431
|
+
- `checkpoint`:
|
|
432
|
+
- Has explicit control over when caches are populated (controlled by the same flow that has the `@huggingface_hub` decorator) but ends up hitting the Hugging Face Hub more often if there are many users/namespaces/steps.
|
|
433
|
+
- Since objects are written on a `namespace/flow/step` basis, the blast radius of a bad checkpoint is limited to a particular flow in a namespace.
|
|
434
|
+
- `flow`:
|
|
435
|
+
- Has less control over when caches are populated (can be written by any execution instance of a flow from any namespace) but results in more cache hits.
|
|
436
|
+
- The blast radius of a bad checkpoint is limited to all runs of a particular flow.
|
|
437
|
+
- It doesn't promote cache reuse across flows.
|
|
438
|
+
- `global`:
|
|
439
|
+
- Has no control over when caches are populated (can be written by any flow execution) but has the highest cache hit rate.
|
|
440
|
+
- It promotes cache reuse across flows.
|
|
441
|
+
- The blast radius of a bad checkpoint spans every flow that could be using a particular repo.
|
|
442
|
+
|
|
443
|
+
load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
|
|
444
|
+
The list of repos (models/datasets) to load.
|
|
445
|
+
|
|
446
|
+
Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
|
|
447
|
+
|
|
448
|
+
- If repo (model/dataset) is not found in the datastore:
|
|
449
|
+
- Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
|
|
450
|
+
- Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
|
|
451
|
+
- All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
|
|
452
|
+
|
|
453
|
+
- If repo is found in the datastore:
|
|
454
|
+
- Loads it directly from datastore to local path (can be temporary directory or specified path)
|
|
455
|
+
"""
|
|
456
|
+
...
|
|
457
|
+
|
|
225
458
|
def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
226
459
|
"""
|
|
227
460
|
This decorator is used to run vllm APIs as Metaflow task sidecars.
|
|
@@ -273,28 +506,125 @@ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card
|
|
|
273
506
|
...
|
|
274
507
|
|
|
275
508
|
@typing.overload
|
|
276
|
-
def
|
|
509
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
277
510
|
"""
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
511
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
|
512
|
+
the execution of a step.
|
|
513
|
+
|
|
514
|
+
|
|
515
|
+
Parameters
|
|
516
|
+
----------
|
|
517
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
518
|
+
List of secret specs, defining how the secrets are to be retrieved
|
|
519
|
+
role : str, optional, default: None
|
|
520
|
+
Role to use for fetching secrets
|
|
281
521
|
"""
|
|
282
522
|
...
|
|
283
523
|
|
|
284
524
|
@typing.overload
|
|
285
|
-
def
|
|
525
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
286
526
|
...
|
|
287
527
|
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
"""
|
|
528
|
+
@typing.overload
|
|
529
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
530
|
+
...
|
|
531
|
+
|
|
532
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
|
|
533
|
+
"""
|
|
534
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
|
535
|
+
the execution of a step.
|
|
536
|
+
|
|
537
|
+
|
|
538
|
+
Parameters
|
|
539
|
+
----------
|
|
540
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
541
|
+
List of secret specs, defining how the secrets are to be retrieved
|
|
542
|
+
role : str, optional, default: None
|
|
543
|
+
Role to use for fetching secrets
|
|
544
|
+
"""
|
|
545
|
+
...
|
|
546
|
+
|
|
547
|
+
def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
548
|
+
"""
|
|
549
|
+
S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
550
|
+
|
|
551
|
+
|
|
552
|
+
Parameters
|
|
553
|
+
----------
|
|
554
|
+
integration_name : str, optional
|
|
555
|
+
Name of the S3 proxy integration. If not specified, will use the only
|
|
556
|
+
available S3 proxy integration in the namespace (fails if multiple exist).
|
|
557
|
+
write_mode : str, optional
|
|
558
|
+
The desired behavior during write operations to target (origin) S3 bucket.
|
|
559
|
+
allowed options are:
|
|
560
|
+
"origin-and-cache" -> write to both the target S3 bucket and local object
|
|
561
|
+
storage
|
|
562
|
+
"origin" -> only write to the target S3 bucket
|
|
563
|
+
"cache" -> only write to the object storage service used for caching
|
|
564
|
+
debug : bool, optional
|
|
565
|
+
Enable debug logging for proxy operations.
|
|
566
|
+
"""
|
|
567
|
+
...
|
|
568
|
+
|
|
569
|
+
def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
570
|
+
"""
|
|
571
|
+
This decorator is used to run Ollama APIs as Metaflow task sidecars.
|
|
572
|
+
|
|
573
|
+
User code call
|
|
574
|
+
--------------
|
|
575
|
+
@ollama(
|
|
576
|
+
models=[...],
|
|
577
|
+
...
|
|
578
|
+
)
|
|
579
|
+
|
|
580
|
+
Valid backend options
|
|
581
|
+
---------------------
|
|
582
|
+
- 'local': Run as a separate process on the local task machine.
|
|
583
|
+
- (TODO) 'managed': Outerbounds hosts and selects compute provider.
|
|
584
|
+
- (TODO) 'remote': Spin up separate instance to serve Ollama models.
|
|
585
|
+
|
|
586
|
+
Valid model options
|
|
587
|
+
-------------------
|
|
588
|
+
Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
|
|
589
|
+
|
|
590
|
+
|
|
591
|
+
Parameters
|
|
592
|
+
----------
|
|
593
|
+
models: list[str]
|
|
594
|
+
List of Ollama containers running models in sidecars.
|
|
595
|
+
backend: str
|
|
596
|
+
Determines where and how to run the Ollama process.
|
|
597
|
+
force_pull: bool
|
|
598
|
+
Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
|
|
599
|
+
cache_update_policy: str
|
|
600
|
+
Cache update policy: "auto", "force", or "never".
|
|
601
|
+
force_cache_update: bool
|
|
602
|
+
Simple override for "force" cache update policy.
|
|
603
|
+
debug: bool
|
|
604
|
+
Whether to turn on verbose debugging logs.
|
|
605
|
+
circuit_breaker_config: dict
|
|
606
|
+
Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
|
|
607
|
+
timeout_config: dict
|
|
608
|
+
Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
|
|
609
|
+
"""
|
|
610
|
+
...
|
|
611
|
+
|
|
612
|
+
def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
613
|
+
"""
|
|
614
|
+
Specifies that this step should execute on DGX cloud.
|
|
615
|
+
|
|
616
|
+
|
|
617
|
+
Parameters
|
|
618
|
+
----------
|
|
619
|
+
gpu : int
|
|
620
|
+
Number of GPUs to use.
|
|
621
|
+
gpu_type : str
|
|
622
|
+
Type of Nvidia GPU to use.
|
|
623
|
+
"""
|
|
294
624
|
...
|
|
295
625
|
|
|
296
626
|
@typing.overload
|
|
297
|
-
def
|
|
627
|
+
def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
298
628
|
"""
|
|
299
629
|
Decorator prototype for all step decorators. This function gets specialized
|
|
300
630
|
and imported for all decorators types by _import_plugin_decorators().
|
|
@@ -302,10 +632,10 @@ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Ca
|
|
|
302
632
|
...
|
|
303
633
|
|
|
304
634
|
@typing.overload
|
|
305
|
-
def
|
|
635
|
+
def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
306
636
|
...
|
|
307
637
|
|
|
308
|
-
def
|
|
638
|
+
def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
309
639
|
"""
|
|
310
640
|
Decorator prototype for all step decorators. This function gets specialized
|
|
311
641
|
and imported for all decorators types by _import_plugin_decorators().
|
|
@@ -313,448 +643,429 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
|
313
643
|
...
|
|
314
644
|
|
|
315
645
|
@typing.overload
|
|
316
|
-
def
|
|
646
|
+
def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
317
647
|
"""
|
|
318
|
-
|
|
319
|
-
to
|
|
648
|
+
Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
649
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
650
|
+
a Neo Cloud like Nebius.
|
|
320
651
|
"""
|
|
321
652
|
...
|
|
322
653
|
|
|
323
654
|
@typing.overload
|
|
324
|
-
def
|
|
655
|
+
def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
325
656
|
...
|
|
326
657
|
|
|
327
|
-
def
|
|
658
|
+
def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
328
659
|
"""
|
|
329
|
-
|
|
330
|
-
to
|
|
660
|
+
Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
661
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
662
|
+
a Neo Cloud like Nebius.
|
|
331
663
|
"""
|
|
332
664
|
...
|
|
333
665
|
|
|
334
666
|
@typing.overload
|
|
335
|
-
def
|
|
667
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
336
668
|
"""
|
|
337
|
-
Specifies
|
|
338
|
-
|
|
339
|
-
This decorator is useful if this step may hang indefinitely.
|
|
340
|
-
|
|
341
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
|
342
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
|
343
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
|
669
|
+
Specifies the PyPI packages for the step.
|
|
344
670
|
|
|
345
|
-
|
|
346
|
-
|
|
671
|
+
Information in this decorator will augment any
|
|
672
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
|
673
|
+
you can use `@pypi_base` to set packages required by all
|
|
674
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
347
675
|
|
|
348
676
|
|
|
349
677
|
Parameters
|
|
350
678
|
----------
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
679
|
+
packages : Dict[str, str], default: {}
|
|
680
|
+
Packages to use for this step. The key is the name of the package
|
|
681
|
+
and the value is the version to use.
|
|
682
|
+
python : str, optional, default: None
|
|
683
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
684
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
357
685
|
"""
|
|
358
686
|
...
|
|
359
687
|
|
|
360
688
|
@typing.overload
|
|
361
|
-
def
|
|
689
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
362
690
|
...
|
|
363
691
|
|
|
364
692
|
@typing.overload
|
|
365
|
-
def
|
|
693
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
366
694
|
...
|
|
367
695
|
|
|
368
|
-
def
|
|
696
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
|
369
697
|
"""
|
|
370
|
-
Specifies
|
|
371
|
-
|
|
372
|
-
This decorator is useful if this step may hang indefinitely.
|
|
373
|
-
|
|
374
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
|
375
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
|
376
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
|
698
|
+
Specifies the PyPI packages for the step.
|
|
377
699
|
|
|
378
|
-
|
|
379
|
-
|
|
700
|
+
Information in this decorator will augment any
|
|
701
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
|
702
|
+
you can use `@pypi_base` to set packages required by all
|
|
703
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
380
704
|
|
|
381
705
|
|
|
382
706
|
Parameters
|
|
383
707
|
----------
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
708
|
+
packages : Dict[str, str], default: {}
|
|
709
|
+
Packages to use for this step. The key is the name of the package
|
|
710
|
+
and the value is the version to use.
|
|
711
|
+
python : str, optional, default: None
|
|
712
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
713
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
390
714
|
"""
|
|
391
715
|
...
|
|
392
716
|
|
|
393
|
-
|
|
717
|
+
@typing.overload
|
|
718
|
+
def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
394
719
|
"""
|
|
395
|
-
|
|
720
|
+
Enables checkpointing for a step.
|
|
721
|
+
|
|
722
|
+
> Examples
|
|
723
|
+
|
|
724
|
+
- Saving Checkpoints
|
|
725
|
+
|
|
726
|
+
```python
|
|
727
|
+
@checkpoint
|
|
728
|
+
@step
|
|
729
|
+
def train(self):
|
|
730
|
+
model = create_model(self.parameters, checkpoint_path = None)
|
|
731
|
+
for i in range(self.epochs):
|
|
732
|
+
# some training logic
|
|
733
|
+
loss = model.train(self.dataset)
|
|
734
|
+
if i % 10 == 0:
|
|
735
|
+
model.save(
|
|
736
|
+
current.checkpoint.directory,
|
|
737
|
+
)
|
|
738
|
+
# saves the contents of the `current.checkpoint.directory` as a checkpoint
|
|
739
|
+
# and returns a reference dictionary to the checkpoint saved in the datastore
|
|
740
|
+
self.latest_checkpoint = current.checkpoint.save(
|
|
741
|
+
name="epoch_checkpoint",
|
|
742
|
+
metadata={
|
|
743
|
+
"epoch": i,
|
|
744
|
+
"loss": loss,
|
|
745
|
+
}
|
|
746
|
+
)
|
|
747
|
+
```
|
|
748
|
+
|
|
749
|
+
- Using Loaded Checkpoints
|
|
750
|
+
|
|
751
|
+
```python
|
|
752
|
+
@retry(times=3)
|
|
753
|
+
@checkpoint
|
|
754
|
+
@step
|
|
755
|
+
def train(self):
|
|
756
|
+
# Assume that the task has restarted and the previous attempt of the task
|
|
757
|
+
# saved a checkpoint
|
|
758
|
+
checkpoint_path = None
|
|
759
|
+
if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
|
|
760
|
+
print("Loaded checkpoint from the previous attempt")
|
|
761
|
+
checkpoint_path = current.checkpoint.directory
|
|
762
|
+
|
|
763
|
+
model = create_model(self.parameters, checkpoint_path = checkpoint_path)
|
|
764
|
+
for i in range(self.epochs):
|
|
765
|
+
...
|
|
766
|
+
```
|
|
396
767
|
|
|
397
768
|
|
|
398
769
|
Parameters
|
|
399
770
|
----------
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
used.
|
|
411
|
-
image : str, optional, default None
|
|
412
|
-
Docker image to use when launching on Kubernetes. If not specified, and
|
|
413
|
-
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
|
414
|
-
not, a default Docker image mapping to the current version of Python is used.
|
|
415
|
-
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
|
416
|
-
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
|
417
|
-
image_pull_secrets: List[str], default []
|
|
418
|
-
The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
|
|
419
|
-
Kubernetes image pull secrets to use when pulling container images
|
|
420
|
-
in Kubernetes.
|
|
421
|
-
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
|
422
|
-
Kubernetes service account to use when launching pod in Kubernetes.
|
|
423
|
-
secrets : List[str], optional, default None
|
|
424
|
-
Kubernetes secrets to use when launching pod in Kubernetes. These
|
|
425
|
-
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
|
426
|
-
in Metaflow configuration.
|
|
427
|
-
node_selector: Union[Dict[str,str], str], optional, default None
|
|
428
|
-
Kubernetes node selector(s) to apply to the pod running the task.
|
|
429
|
-
Can be passed in as a comma separated string of values e.g.
|
|
430
|
-
'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
|
|
431
|
-
{'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
|
|
432
|
-
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
|
433
|
-
Kubernetes namespace to use when launching pod in Kubernetes.
|
|
434
|
-
gpu : int, optional, default None
|
|
435
|
-
Number of GPUs required for this step. A value of zero implies that
|
|
436
|
-
the scheduled node should not have GPUs.
|
|
437
|
-
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
|
438
|
-
The vendor of the GPUs to be used for this step.
|
|
439
|
-
tolerations : List[Dict[str,str]], default []
|
|
440
|
-
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
|
441
|
-
Kubernetes tolerations to use when launching pod in Kubernetes.
|
|
442
|
-
labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
|
|
443
|
-
Kubernetes labels to use when launching pod in Kubernetes.
|
|
444
|
-
annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
|
|
445
|
-
Kubernetes annotations to use when launching pod in Kubernetes.
|
|
446
|
-
use_tmpfs : bool, default False
|
|
447
|
-
This enables an explicit tmpfs mount for this step.
|
|
448
|
-
tmpfs_tempdir : bool, default True
|
|
449
|
-
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
|
450
|
-
tmpfs_size : int, optional, default: None
|
|
451
|
-
The value for the size (in MiB) of the tmpfs mount for this step.
|
|
452
|
-
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
|
453
|
-
memory allocated for this step.
|
|
454
|
-
tmpfs_path : str, optional, default /metaflow_temp
|
|
455
|
-
Path to tmpfs mount for this step.
|
|
456
|
-
persistent_volume_claims : Dict[str, str], optional, default None
|
|
457
|
-
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
|
458
|
-
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
|
459
|
-
shared_memory: int, optional
|
|
460
|
-
Shared memory size (in MiB) required for this step
|
|
461
|
-
port: int, optional
|
|
462
|
-
Port number to specify in the Kubernetes job object
|
|
463
|
-
compute_pool : str, optional, default None
|
|
464
|
-
Compute pool to be used for for this step.
|
|
465
|
-
If not specified, any accessible compute pool within the perimeter is used.
|
|
466
|
-
hostname_resolution_timeout: int, default 10 * 60
|
|
467
|
-
Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
|
|
468
|
-
Only applicable when @parallel is used.
|
|
469
|
-
qos: str, default: Burstable
|
|
470
|
-
Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
|
|
771
|
+
load_policy : str, default: "fresh"
|
|
772
|
+
The policy for loading the checkpoint. The following policies are supported:
|
|
773
|
+
- "eager": Loads the the latest available checkpoint within the namespace.
|
|
774
|
+
With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
|
|
775
|
+
will be loaded at the start of the task.
|
|
776
|
+
- "none": Do not load any checkpoint
|
|
777
|
+
- "fresh": Loads the lastest checkpoint created within the running Task.
|
|
778
|
+
This mode helps loading checkpoints across various retry attempts of the same task.
|
|
779
|
+
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
|
780
|
+
created within the task will be loaded when the task is retries execution on failure.
|
|
471
781
|
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
- privileged: bool, optional, default None
|
|
475
|
-
- allow_privilege_escalation: bool, optional, default None
|
|
476
|
-
- run_as_user: int, optional, default None
|
|
477
|
-
- run_as_group: int, optional, default None
|
|
478
|
-
- run_as_non_root: bool, optional, default None
|
|
782
|
+
temp_dir_root : str, default: None
|
|
783
|
+
The root directory under which `current.checkpoint.directory` will be created.
|
|
479
784
|
"""
|
|
480
785
|
...
|
|
481
786
|
|
|
482
787
|
@typing.overload
|
|
483
|
-
def
|
|
788
|
+
def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
789
|
+
...
|
|
790
|
+
|
|
791
|
+
@typing.overload
|
|
792
|
+
def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
793
|
+
...
|
|
794
|
+
|
|
795
|
+
def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
|
|
484
796
|
"""
|
|
485
|
-
|
|
797
|
+
Enables checkpointing for a step.
|
|
486
798
|
|
|
487
|
-
|
|
799
|
+
> Examples
|
|
800
|
+
|
|
801
|
+
- Saving Checkpoints
|
|
802
|
+
|
|
803
|
+
```python
|
|
804
|
+
@checkpoint
|
|
805
|
+
@step
|
|
806
|
+
def train(self):
|
|
807
|
+
model = create_model(self.parameters, checkpoint_path = None)
|
|
808
|
+
for i in range(self.epochs):
|
|
809
|
+
# some training logic
|
|
810
|
+
loss = model.train(self.dataset)
|
|
811
|
+
if i % 10 == 0:
|
|
812
|
+
model.save(
|
|
813
|
+
current.checkpoint.directory,
|
|
814
|
+
)
|
|
815
|
+
# saves the contents of the `current.checkpoint.directory` as a checkpoint
|
|
816
|
+
# and returns a reference dictionary to the checkpoint saved in the datastore
|
|
817
|
+
self.latest_checkpoint = current.checkpoint.save(
|
|
818
|
+
name="epoch_checkpoint",
|
|
819
|
+
metadata={
|
|
820
|
+
"epoch": i,
|
|
821
|
+
"loss": loss,
|
|
822
|
+
}
|
|
823
|
+
)
|
|
824
|
+
```
|
|
825
|
+
|
|
826
|
+
- Using Loaded Checkpoints
|
|
827
|
+
|
|
828
|
+
```python
|
|
829
|
+
@retry(times=3)
|
|
830
|
+
@checkpoint
|
|
831
|
+
@step
|
|
832
|
+
def train(self):
|
|
833
|
+
# Assume that the task has restarted and the previous attempt of the task
|
|
834
|
+
# saved a checkpoint
|
|
835
|
+
checkpoint_path = None
|
|
836
|
+
if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
|
|
837
|
+
print("Loaded checkpoint from the previous attempt")
|
|
838
|
+
checkpoint_path = current.checkpoint.directory
|
|
839
|
+
|
|
840
|
+
model = create_model(self.parameters, checkpoint_path = checkpoint_path)
|
|
841
|
+
for i in range(self.epochs):
|
|
842
|
+
...
|
|
843
|
+
```
|
|
488
844
|
|
|
489
845
|
|
|
490
846
|
Parameters
|
|
491
847
|
----------
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
848
|
+
load_policy : str, default: "fresh"
|
|
849
|
+
The policy for loading the checkpoint. The following policies are supported:
|
|
850
|
+
- "eager": Loads the the latest available checkpoint within the namespace.
|
|
851
|
+
With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
|
|
852
|
+
will be loaded at the start of the task.
|
|
853
|
+
- "none": Do not load any checkpoint
|
|
854
|
+
- "fresh": Loads the lastest checkpoint created within the running Task.
|
|
855
|
+
This mode helps loading checkpoints across various retry attempts of the same task.
|
|
856
|
+
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
|
857
|
+
created within the task will be loaded when the task is retries execution on failure.
|
|
858
|
+
|
|
859
|
+
temp_dir_root : str, default: None
|
|
860
|
+
The root directory under which `current.checkpoint.directory` will be created.
|
|
500
861
|
"""
|
|
501
862
|
...
|
|
502
863
|
|
|
503
864
|
@typing.overload
|
|
504
|
-
def
|
|
865
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
866
|
+
"""
|
|
867
|
+
Specifies a timeout for your step.
|
|
868
|
+
|
|
869
|
+
This decorator is useful if this step may hang indefinitely.
|
|
870
|
+
|
|
871
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
|
872
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
|
873
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
|
874
|
+
|
|
875
|
+
Note that all the values specified in parameters are added together so if you specify
|
|
876
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
|
877
|
+
|
|
878
|
+
|
|
879
|
+
Parameters
|
|
880
|
+
----------
|
|
881
|
+
seconds : int, default 0
|
|
882
|
+
Number of seconds to wait prior to timing out.
|
|
883
|
+
minutes : int, default 0
|
|
884
|
+
Number of minutes to wait prior to timing out.
|
|
885
|
+
hours : int, default 0
|
|
886
|
+
Number of hours to wait prior to timing out.
|
|
887
|
+
"""
|
|
505
888
|
...
|
|
506
889
|
|
|
507
890
|
@typing.overload
|
|
508
|
-
def
|
|
891
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
509
892
|
...
|
|
510
893
|
|
|
511
|
-
|
|
894
|
+
@typing.overload
|
|
895
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
896
|
+
...
|
|
897
|
+
|
|
898
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
|
512
899
|
"""
|
|
513
|
-
|
|
900
|
+
Specifies a timeout for your step.
|
|
514
901
|
|
|
515
|
-
|
|
902
|
+
This decorator is useful if this step may hang indefinitely.
|
|
903
|
+
|
|
904
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
|
905
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
|
906
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
|
907
|
+
|
|
908
|
+
Note that all the values specified in parameters are added together so if you specify
|
|
909
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
|
516
910
|
|
|
517
911
|
|
|
518
912
|
Parameters
|
|
519
913
|
----------
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
timeout : int, default 45
|
|
527
|
-
Interrupt reporting if it takes more than this many seconds.
|
|
914
|
+
seconds : int, default 0
|
|
915
|
+
Number of seconds to wait prior to timing out.
|
|
916
|
+
minutes : int, default 0
|
|
917
|
+
Number of minutes to wait prior to timing out.
|
|
918
|
+
hours : int, default 0
|
|
919
|
+
Number of hours to wait prior to timing out.
|
|
528
920
|
"""
|
|
529
921
|
...
|
|
530
922
|
|
|
531
923
|
@typing.overload
|
|
532
|
-
def
|
|
924
|
+
def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
533
925
|
"""
|
|
534
|
-
|
|
926
|
+
A simple decorator that demonstrates using CardDecoratorInjector
|
|
927
|
+
to inject a card and render simple markdown content.
|
|
535
928
|
"""
|
|
536
929
|
...
|
|
537
930
|
|
|
538
931
|
@typing.overload
|
|
539
|
-
def
|
|
932
|
+
def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
540
933
|
...
|
|
541
934
|
|
|
542
|
-
def
|
|
935
|
+
def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
543
936
|
"""
|
|
544
|
-
|
|
937
|
+
A simple decorator that demonstrates using CardDecoratorInjector
|
|
938
|
+
to inject a card and render simple markdown content.
|
|
545
939
|
"""
|
|
546
940
|
...
|
|
547
941
|
|
|
548
942
|
@typing.overload
|
|
549
|
-
def
|
|
943
|
+
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
550
944
|
"""
|
|
551
|
-
Specifies
|
|
945
|
+
Specifies the resources needed when executing this step.
|
|
552
946
|
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
947
|
+
Use `@resources` to specify the resource requirements
|
|
948
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
|
949
|
+
|
|
950
|
+
You can choose the compute layer on the command line by executing e.g.
|
|
951
|
+
```
|
|
952
|
+
python myflow.py run --with batch
|
|
953
|
+
```
|
|
954
|
+
or
|
|
955
|
+
```
|
|
956
|
+
python myflow.py run --with kubernetes
|
|
957
|
+
```
|
|
958
|
+
which executes the flow on the desired system using the
|
|
959
|
+
requirements specified in `@resources`.
|
|
557
960
|
|
|
558
961
|
|
|
559
962
|
Parameters
|
|
560
963
|
----------
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
964
|
+
cpu : int, default 1
|
|
965
|
+
Number of CPUs required for this step.
|
|
966
|
+
gpu : int, optional, default None
|
|
967
|
+
Number of GPUs required for this step.
|
|
968
|
+
disk : int, optional, default None
|
|
969
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
|
970
|
+
memory : int, default 4096
|
|
971
|
+
Memory size (in MB) required for this step.
|
|
972
|
+
shared_memory : int, optional, default None
|
|
973
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
974
|
+
This parameter maps to the `--shm-size` option in Docker.
|
|
567
975
|
"""
|
|
568
976
|
...
|
|
569
977
|
|
|
570
978
|
@typing.overload
|
|
571
|
-
def
|
|
979
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
572
980
|
...
|
|
573
981
|
|
|
574
982
|
@typing.overload
|
|
575
|
-
def
|
|
983
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
576
984
|
...
|
|
577
985
|
|
|
578
|
-
def
|
|
986
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
|
579
987
|
"""
|
|
580
|
-
Specifies
|
|
988
|
+
Specifies the resources needed when executing this step.
|
|
581
989
|
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
990
|
+
Use `@resources` to specify the resource requirements
|
|
991
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
|
992
|
+
|
|
993
|
+
You can choose the compute layer on the command line by executing e.g.
|
|
994
|
+
```
|
|
995
|
+
python myflow.py run --with batch
|
|
996
|
+
```
|
|
997
|
+
or
|
|
998
|
+
```
|
|
999
|
+
python myflow.py run --with kubernetes
|
|
1000
|
+
```
|
|
1001
|
+
which executes the flow on the desired system using the
|
|
1002
|
+
requirements specified in `@resources`.
|
|
586
1003
|
|
|
587
1004
|
|
|
588
1005
|
Parameters
|
|
589
1006
|
----------
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
1007
|
+
cpu : int, default 1
|
|
1008
|
+
Number of CPUs required for this step.
|
|
1009
|
+
gpu : int, optional, default None
|
|
1010
|
+
Number of GPUs required for this step.
|
|
1011
|
+
disk : int, optional, default None
|
|
1012
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
|
1013
|
+
memory : int, default 4096
|
|
1014
|
+
Memory size (in MB) required for this step.
|
|
1015
|
+
shared_memory : int, optional, default None
|
|
1016
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
1017
|
+
This parameter maps to the `--shm-size` option in Docker.
|
|
596
1018
|
"""
|
|
597
1019
|
...
|
|
598
1020
|
|
|
599
1021
|
@typing.overload
|
|
600
|
-
def
|
|
1022
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
601
1023
|
"""
|
|
602
|
-
Specifies the
|
|
1024
|
+
Specifies that the step will success under all circumstances.
|
|
603
1025
|
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
1026
|
+
The decorator will create an optional artifact, specified by `var`, which
|
|
1027
|
+
contains the exception raised. You can use it to detect the presence
|
|
1028
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
|
1029
|
+
are missing.
|
|
608
1030
|
|
|
609
1031
|
|
|
610
1032
|
Parameters
|
|
611
1033
|
----------
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
1034
|
+
var : str, optional, default None
|
|
1035
|
+
Name of the artifact in which to store the caught exception.
|
|
1036
|
+
If not specified, the exception is not stored.
|
|
1037
|
+
print_exception : bool, default True
|
|
1038
|
+
Determines whether or not the exception is printed to
|
|
1039
|
+
stdout when caught.
|
|
618
1040
|
"""
|
|
619
1041
|
...
|
|
620
1042
|
|
|
621
1043
|
@typing.overload
|
|
622
|
-
def
|
|
1044
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
623
1045
|
...
|
|
624
1046
|
|
|
625
1047
|
@typing.overload
|
|
626
|
-
def
|
|
627
|
-
...
|
|
628
|
-
|
|
629
|
-
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
|
630
|
-
"""
|
|
631
|
-
Specifies the PyPI packages for the step.
|
|
632
|
-
|
|
633
|
-
Information in this decorator will augment any
|
|
634
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
|
635
|
-
you can use `@pypi_base` to set packages required by all
|
|
636
|
-
steps and use `@pypi` to specify step-specific overrides.
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
Parameters
|
|
640
|
-
----------
|
|
641
|
-
packages : Dict[str, str], default: {}
|
|
642
|
-
Packages to use for this step. The key is the name of the package
|
|
643
|
-
and the value is the version to use.
|
|
644
|
-
python : str, optional, default: None
|
|
645
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
646
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
647
|
-
"""
|
|
1048
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
648
1049
|
...
|
|
649
1050
|
|
|
650
|
-
def
|
|
1051
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
|
651
1052
|
"""
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
> Examples
|
|
655
|
-
|
|
656
|
-
**Usage: creating references to models from the Hugging Face Hub that may be loaded in downstream steps**
|
|
657
|
-
```python
|
|
658
|
-
@huggingface_hub
|
|
659
|
-
@step
|
|
660
|
-
def pull_model_from_huggingface(self):
|
|
661
|
-
# `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
|
|
662
|
-
# and saves it in the backend storage based on the model's `repo_id`. If there exists a model
|
|
663
|
-
# with the same `repo_id` in the backend storage, it will not download the model again. The return
|
|
664
|
-
# value of the function is a reference to the model in the backend storage.
|
|
665
|
-
# This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
|
|
666
|
-
|
|
667
|
-
self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
|
|
668
|
-
self.llama_model = current.huggingface_hub.snapshot_download(
|
|
669
|
-
repo_id=self.model_id,
|
|
670
|
-
allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
|
|
671
|
-
)
|
|
672
|
-
self.next(self.train)
|
|
673
|
-
```
|
|
674
|
-
|
|
675
|
-
**Usage: loading models directly from the Hugging Face Hub or from cache (from Metaflow's datastore)**
|
|
676
|
-
```python
|
|
677
|
-
@huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
|
|
678
|
-
@step
|
|
679
|
-
def pull_model_from_huggingface(self):
|
|
680
|
-
path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
|
|
681
|
-
```
|
|
682
|
-
|
|
683
|
-
```python
|
|
684
|
-
@huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora", "/my-lora-directory")])
|
|
685
|
-
@step
|
|
686
|
-
def finetune_model(self):
|
|
687
|
-
path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
|
|
688
|
-
# path_to_model will be /my-directory
|
|
689
|
-
```
|
|
1053
|
+
Specifies that the step will success under all circumstances.
|
|
690
1054
|
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
{
|
|
696
|
-
"repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
|
|
697
|
-
},
|
|
698
|
-
{
|
|
699
|
-
"repo_id": "myorg/mistral-lora",
|
|
700
|
-
"repo_type": "model",
|
|
701
|
-
},
|
|
702
|
-
])
|
|
703
|
-
@step
|
|
704
|
-
def finetune_model(self):
|
|
705
|
-
path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
|
|
706
|
-
# path_to_model will be /my-directory
|
|
707
|
-
```
|
|
1055
|
+
The decorator will create an optional artifact, specified by `var`, which
|
|
1056
|
+
contains the exception raised. You can use it to detect the presence
|
|
1057
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
|
1058
|
+
are missing.
|
|
708
1059
|
|
|
709
1060
|
|
|
710
1061
|
Parameters
|
|
711
1062
|
----------
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
- `checkpoint` (default): All repos are stored like objects saved by `@checkpoint`.
|
|
719
|
-
i.e., the cached path is derived from the namespace, flow, step, and Metaflow foreach iteration.
|
|
720
|
-
Any repo downloaded under this scope will only be retrieved from the cache when the step runs under the same namespace in the same flow (at the same foreach index).
|
|
721
|
-
|
|
722
|
-
- `flow`: All repos are cached under the flow, regardless of namespace.
|
|
723
|
-
i.e., the cached path is derived solely from the flow name.
|
|
724
|
-
When to use this mode:
|
|
725
|
-
- Multiple users are executing the same flow and want shared access to the repos cached by the decorator.
|
|
726
|
-
- Multiple versions of a flow are deployed, all needing access to the same repos cached by the decorator.
|
|
727
|
-
|
|
728
|
-
- `global`: All repos are cached under a globally static path.
|
|
729
|
-
i.e., the base path of the cache is static and all repos are stored under it.
|
|
730
|
-
When to use this mode:
|
|
731
|
-
- All repos from the Hugging Face Hub need to be shared by users across all flow executions.
|
|
732
|
-
|
|
733
|
-
Each caching scope comes with its own trade-offs:
|
|
734
|
-
- `checkpoint`:
|
|
735
|
-
- Has explicit control over when caches are populated (controlled by the same flow that has the `@huggingface_hub` decorator) but ends up hitting the Hugging Face Hub more often if there are many users/namespaces/steps.
|
|
736
|
-
- Since objects are written on a `namespace/flow/step` basis, the blast radius of a bad checkpoint is limited to a particular flow in a namespace.
|
|
737
|
-
- `flow`:
|
|
738
|
-
- Has less control over when caches are populated (can be written by any execution instance of a flow from any namespace) but results in more cache hits.
|
|
739
|
-
- The blast radius of a bad checkpoint is limited to all runs of a particular flow.
|
|
740
|
-
- It doesn't promote cache reuse across flows.
|
|
741
|
-
- `global`:
|
|
742
|
-
- Has no control over when caches are populated (can be written by any flow execution) but has the highest cache hit rate.
|
|
743
|
-
- It promotes cache reuse across flows.
|
|
744
|
-
- The blast radius of a bad checkpoint spans every flow that could be using a particular repo.
|
|
745
|
-
|
|
746
|
-
load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
|
|
747
|
-
The list of repos (models/datasets) to load.
|
|
748
|
-
|
|
749
|
-
Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
|
|
750
|
-
|
|
751
|
-
- If repo (model/dataset) is not found in the datastore:
|
|
752
|
-
- Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
|
|
753
|
-
- Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
|
|
754
|
-
- All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
|
|
755
|
-
|
|
756
|
-
- If repo is found in the datastore:
|
|
757
|
-
- Loads it directly from datastore to local path (can be temporary directory or specified path)
|
|
1063
|
+
var : str, optional, default None
|
|
1064
|
+
Name of the artifact in which to store the caught exception.
|
|
1065
|
+
If not specified, the exception is not stored.
|
|
1066
|
+
print_exception : bool, default True
|
|
1067
|
+
Determines whether or not the exception is printed to
|
|
1068
|
+
stdout when caught.
|
|
758
1069
|
"""
|
|
759
1070
|
...
|
|
760
1071
|
|
|
@@ -936,137 +1247,92 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
|
936
1247
|
"""
|
|
937
1248
|
...
|
|
938
1249
|
|
|
939
|
-
def
|
|
940
|
-
"""
|
|
941
|
-
S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
Parameters
|
|
945
|
-
----------
|
|
946
|
-
integration_name : str, optional
|
|
947
|
-
Name of the S3 proxy integration. If not specified, will use the only
|
|
948
|
-
available S3 proxy integration in the namespace (fails if multiple exist).
|
|
949
|
-
write_mode : str, optional
|
|
950
|
-
The desired behavior during write operations to target (origin) S3 bucket.
|
|
951
|
-
allowed options are:
|
|
952
|
-
"origin-and-cache" -> write to both the target S3 bucket and local object
|
|
953
|
-
storage
|
|
954
|
-
"origin" -> only write to the target S3 bucket
|
|
955
|
-
"cache" -> only write to the object storage service used for caching
|
|
956
|
-
debug : bool, optional
|
|
957
|
-
Enable debug logging for proxy operations.
|
|
958
|
-
"""
|
|
959
|
-
...
|
|
960
|
-
|
|
961
|
-
@typing.overload
|
|
962
|
-
def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
963
|
-
"""
|
|
964
|
-
Decorator prototype for all step decorators. This function gets specialized
|
|
965
|
-
and imported for all decorators types by _import_plugin_decorators().
|
|
966
|
-
"""
|
|
967
|
-
...
|
|
968
|
-
|
|
969
|
-
@typing.overload
|
|
970
|
-
def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
971
|
-
...
|
|
972
|
-
|
|
973
|
-
def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
974
|
-
"""
|
|
975
|
-
Decorator prototype for all step decorators. This function gets specialized
|
|
976
|
-
and imported for all decorators types by _import_plugin_decorators().
|
|
977
|
-
"""
|
|
978
|
-
...
|
|
979
|
-
|
|
980
|
-
@typing.overload
|
|
981
|
-
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1250
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
982
1251
|
"""
|
|
983
|
-
Specifies
|
|
984
|
-
|
|
985
|
-
Use `@resources` to specify the resource requirements
|
|
986
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
|
987
|
-
|
|
988
|
-
You can choose the compute layer on the command line by executing e.g.
|
|
989
|
-
```
|
|
990
|
-
python myflow.py run --with batch
|
|
991
|
-
```
|
|
992
|
-
or
|
|
993
|
-
```
|
|
994
|
-
python myflow.py run --with kubernetes
|
|
995
|
-
```
|
|
996
|
-
which executes the flow on the desired system using the
|
|
997
|
-
requirements specified in `@resources`.
|
|
1252
|
+
Specifies that this step should execute on Kubernetes.
|
|
998
1253
|
|
|
999
1254
|
|
|
1000
1255
|
Parameters
|
|
1001
1256
|
----------
|
|
1002
1257
|
cpu : int, default 1
|
|
1003
|
-
Number of CPUs required for this step.
|
|
1004
|
-
|
|
1005
|
-
Number of GPUs required for this step.
|
|
1006
|
-
disk : int, optional, default None
|
|
1007
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
|
1258
|
+
Number of CPUs required for this step. If `@resources` is
|
|
1259
|
+
also present, the maximum value from all decorators is used.
|
|
1008
1260
|
memory : int, default 4096
|
|
1009
|
-
Memory size (in MB) required for this step.
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
|
|
1039
|
-
which executes the flow on the desired system using the
|
|
1040
|
-
requirements specified in `@resources`.
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
Parameters
|
|
1044
|
-
----------
|
|
1045
|
-
cpu : int, default 1
|
|
1046
|
-
Number of CPUs required for this step.
|
|
1261
|
+
Memory size (in MB) required for this step. If
|
|
1262
|
+
`@resources` is also present, the maximum value from all decorators is
|
|
1263
|
+
used.
|
|
1264
|
+
disk : int, default 10240
|
|
1265
|
+
Disk size (in MB) required for this step. If
|
|
1266
|
+
`@resources` is also present, the maximum value from all decorators is
|
|
1267
|
+
used.
|
|
1268
|
+
image : str, optional, default None
|
|
1269
|
+
Docker image to use when launching on Kubernetes. If not specified, and
|
|
1270
|
+
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
|
1271
|
+
not, a default Docker image mapping to the current version of Python is used.
|
|
1272
|
+
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
|
1273
|
+
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
|
1274
|
+
image_pull_secrets: List[str], default []
|
|
1275
|
+
The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
|
|
1276
|
+
Kubernetes image pull secrets to use when pulling container images
|
|
1277
|
+
in Kubernetes.
|
|
1278
|
+
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
|
1279
|
+
Kubernetes service account to use when launching pod in Kubernetes.
|
|
1280
|
+
secrets : List[str], optional, default None
|
|
1281
|
+
Kubernetes secrets to use when launching pod in Kubernetes. These
|
|
1282
|
+
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
|
1283
|
+
in Metaflow configuration.
|
|
1284
|
+
node_selector: Union[Dict[str,str], str], optional, default None
|
|
1285
|
+
Kubernetes node selector(s) to apply to the pod running the task.
|
|
1286
|
+
Can be passed in as a comma separated string of values e.g.
|
|
1287
|
+
'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
|
|
1288
|
+
{'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
|
|
1289
|
+
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
|
1290
|
+
Kubernetes namespace to use when launching pod in Kubernetes.
|
|
1047
1291
|
gpu : int, optional, default None
|
|
1048
|
-
Number of GPUs required for this step.
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1292
|
+
Number of GPUs required for this step. A value of zero implies that
|
|
1293
|
+
the scheduled node should not have GPUs.
|
|
1294
|
+
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
|
1295
|
+
The vendor of the GPUs to be used for this step.
|
|
1296
|
+
tolerations : List[Dict[str,str]], default []
|
|
1297
|
+
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
|
1298
|
+
Kubernetes tolerations to use when launching pod in Kubernetes.
|
|
1299
|
+
labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
|
|
1300
|
+
Kubernetes labels to use when launching pod in Kubernetes.
|
|
1301
|
+
annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
|
|
1302
|
+
Kubernetes annotations to use when launching pod in Kubernetes.
|
|
1303
|
+
use_tmpfs : bool, default False
|
|
1304
|
+
This enables an explicit tmpfs mount for this step.
|
|
1305
|
+
tmpfs_tempdir : bool, default True
|
|
1306
|
+
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
|
1307
|
+
tmpfs_size : int, optional, default: None
|
|
1308
|
+
The value for the size (in MiB) of the tmpfs mount for this step.
|
|
1309
|
+
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
|
1310
|
+
memory allocated for this step.
|
|
1311
|
+
tmpfs_path : str, optional, default /metaflow_temp
|
|
1312
|
+
Path to tmpfs mount for this step.
|
|
1313
|
+
persistent_volume_claims : Dict[str, str], optional, default None
|
|
1314
|
+
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
|
1315
|
+
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
|
1316
|
+
shared_memory: int, optional
|
|
1317
|
+
Shared memory size (in MiB) required for this step
|
|
1318
|
+
port: int, optional
|
|
1319
|
+
Port number to specify in the Kubernetes job object
|
|
1320
|
+
compute_pool : str, optional, default None
|
|
1321
|
+
Compute pool to be used for for this step.
|
|
1322
|
+
If not specified, any accessible compute pool within the perimeter is used.
|
|
1323
|
+
hostname_resolution_timeout: int, default 10 * 60
|
|
1324
|
+
Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
|
|
1325
|
+
Only applicable when @parallel is used.
|
|
1326
|
+
qos: str, default: Burstable
|
|
1327
|
+
Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
|
|
1063
1328
|
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1329
|
+
security_context: Dict[str, Any], optional, default None
|
|
1330
|
+
Container security context. Applies to the task container. Allows the following keys:
|
|
1331
|
+
- privileged: bool, optional, default None
|
|
1332
|
+
- allow_privilege_escalation: bool, optional, default None
|
|
1333
|
+
- run_as_user: int, optional, default None
|
|
1334
|
+
- run_as_group: int, optional, default None
|
|
1335
|
+
- run_as_non_root: bool, optional, default None
|
|
1070
1336
|
"""
|
|
1071
1337
|
...
|
|
1072
1338
|
|
|
@@ -1089,389 +1355,194 @@ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, s
|
|
|
1089
1355
|
libraries : Dict[str, str], default {}
|
|
1090
1356
|
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
1091
1357
|
python : str, optional, default None
|
|
1092
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1093
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1094
|
-
disabled : bool, default False
|
|
1095
|
-
If set to True, disables @conda.
|
|
1096
|
-
"""
|
|
1097
|
-
...
|
|
1098
|
-
|
|
1099
|
-
@typing.overload
|
|
1100
|
-
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1101
|
-
...
|
|
1102
|
-
|
|
1103
|
-
@typing.overload
|
|
1104
|
-
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1105
|
-
...
|
|
1106
|
-
|
|
1107
|
-
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
|
1108
|
-
"""
|
|
1109
|
-
Specifies the Conda environment for the step.
|
|
1110
|
-
|
|
1111
|
-
Information in this decorator will augment any
|
|
1112
|
-
attributes set in the `@conda_base` flow-level decorator. Hence,
|
|
1113
|
-
you can use `@conda_base` to set packages required by all
|
|
1114
|
-
steps and use `@conda` to specify step-specific overrides.
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
Parameters
|
|
1118
|
-
----------
|
|
1119
|
-
packages : Dict[str, str], default {}
|
|
1120
|
-
Packages to use for this step. The key is the name of the package
|
|
1121
|
-
and the value is the version to use.
|
|
1122
|
-
libraries : Dict[str, str], default {}
|
|
1123
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
1124
|
-
python : str, optional, default None
|
|
1125
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1126
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1127
|
-
disabled : bool, default False
|
|
1128
|
-
If set to True, disables @conda.
|
|
1129
|
-
"""
|
|
1130
|
-
...
|
|
1131
|
-
|
|
1132
|
-
@typing.overload
|
|
1133
|
-
def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1134
|
-
"""
|
|
1135
|
-
Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
1136
|
-
It exists to make it easier for users to know that this decorator should only be used with
|
|
1137
|
-
a Neo Cloud like Nebius.
|
|
1138
|
-
"""
|
|
1139
|
-
...
|
|
1140
|
-
|
|
1141
|
-
@typing.overload
|
|
1142
|
-
def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1143
|
-
...
|
|
1144
|
-
|
|
1145
|
-
def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
1146
|
-
"""
|
|
1147
|
-
Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
1148
|
-
It exists to make it easier for users to know that this decorator should only be used with
|
|
1149
|
-
a Neo Cloud like Nebius.
|
|
1150
|
-
"""
|
|
1151
|
-
...
|
|
1152
|
-
|
|
1153
|
-
@typing.overload
|
|
1154
|
-
def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1155
|
-
"""
|
|
1156
|
-
Enables checkpointing for a step.
|
|
1157
|
-
|
|
1158
|
-
> Examples
|
|
1159
|
-
|
|
1160
|
-
- Saving Checkpoints
|
|
1161
|
-
|
|
1162
|
-
```python
|
|
1163
|
-
@checkpoint
|
|
1164
|
-
@step
|
|
1165
|
-
def train(self):
|
|
1166
|
-
model = create_model(self.parameters, checkpoint_path = None)
|
|
1167
|
-
for i in range(self.epochs):
|
|
1168
|
-
# some training logic
|
|
1169
|
-
loss = model.train(self.dataset)
|
|
1170
|
-
if i % 10 == 0:
|
|
1171
|
-
model.save(
|
|
1172
|
-
current.checkpoint.directory,
|
|
1173
|
-
)
|
|
1174
|
-
# saves the contents of the `current.checkpoint.directory` as a checkpoint
|
|
1175
|
-
# and returns a reference dictionary to the checkpoint saved in the datastore
|
|
1176
|
-
self.latest_checkpoint = current.checkpoint.save(
|
|
1177
|
-
name="epoch_checkpoint",
|
|
1178
|
-
metadata={
|
|
1179
|
-
"epoch": i,
|
|
1180
|
-
"loss": loss,
|
|
1181
|
-
}
|
|
1182
|
-
)
|
|
1183
|
-
```
|
|
1184
|
-
|
|
1185
|
-
- Using Loaded Checkpoints
|
|
1186
|
-
|
|
1187
|
-
```python
|
|
1188
|
-
@retry(times=3)
|
|
1189
|
-
@checkpoint
|
|
1190
|
-
@step
|
|
1191
|
-
def train(self):
|
|
1192
|
-
# Assume that the task has restarted and the previous attempt of the task
|
|
1193
|
-
# saved a checkpoint
|
|
1194
|
-
checkpoint_path = None
|
|
1195
|
-
if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
|
|
1196
|
-
print("Loaded checkpoint from the previous attempt")
|
|
1197
|
-
checkpoint_path = current.checkpoint.directory
|
|
1198
|
-
|
|
1199
|
-
model = create_model(self.parameters, checkpoint_path = checkpoint_path)
|
|
1200
|
-
for i in range(self.epochs):
|
|
1201
|
-
...
|
|
1202
|
-
```
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
Parameters
|
|
1206
|
-
----------
|
|
1207
|
-
load_policy : str, default: "fresh"
|
|
1208
|
-
The policy for loading the checkpoint. The following policies are supported:
|
|
1209
|
-
- "eager": Loads the the latest available checkpoint within the namespace.
|
|
1210
|
-
With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
|
|
1211
|
-
will be loaded at the start of the task.
|
|
1212
|
-
- "none": Do not load any checkpoint
|
|
1213
|
-
- "fresh": Loads the lastest checkpoint created within the running Task.
|
|
1214
|
-
This mode helps loading checkpoints across various retry attempts of the same task.
|
|
1215
|
-
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
|
1216
|
-
created within the task will be loaded when the task is retries execution on failure.
|
|
1217
|
-
|
|
1218
|
-
temp_dir_root : str, default: None
|
|
1219
|
-
The root directory under which `current.checkpoint.directory` will be created.
|
|
1220
|
-
"""
|
|
1221
|
-
...
|
|
1222
|
-
|
|
1223
|
-
@typing.overload
|
|
1224
|
-
def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1225
|
-
...
|
|
1226
|
-
|
|
1227
|
-
@typing.overload
|
|
1228
|
-
def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1229
|
-
...
|
|
1230
|
-
|
|
1231
|
-
def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
|
|
1232
|
-
"""
|
|
1233
|
-
Enables checkpointing for a step.
|
|
1234
|
-
|
|
1235
|
-
> Examples
|
|
1236
|
-
|
|
1237
|
-
- Saving Checkpoints
|
|
1238
|
-
|
|
1239
|
-
```python
|
|
1240
|
-
@checkpoint
|
|
1241
|
-
@step
|
|
1242
|
-
def train(self):
|
|
1243
|
-
model = create_model(self.parameters, checkpoint_path = None)
|
|
1244
|
-
for i in range(self.epochs):
|
|
1245
|
-
# some training logic
|
|
1246
|
-
loss = model.train(self.dataset)
|
|
1247
|
-
if i % 10 == 0:
|
|
1248
|
-
model.save(
|
|
1249
|
-
current.checkpoint.directory,
|
|
1250
|
-
)
|
|
1251
|
-
# saves the contents of the `current.checkpoint.directory` as a checkpoint
|
|
1252
|
-
# and returns a reference dictionary to the checkpoint saved in the datastore
|
|
1253
|
-
self.latest_checkpoint = current.checkpoint.save(
|
|
1254
|
-
name="epoch_checkpoint",
|
|
1255
|
-
metadata={
|
|
1256
|
-
"epoch": i,
|
|
1257
|
-
"loss": loss,
|
|
1258
|
-
}
|
|
1259
|
-
)
|
|
1260
|
-
```
|
|
1261
|
-
|
|
1262
|
-
- Using Loaded Checkpoints
|
|
1263
|
-
|
|
1264
|
-
```python
|
|
1265
|
-
@retry(times=3)
|
|
1266
|
-
@checkpoint
|
|
1267
|
-
@step
|
|
1268
|
-
def train(self):
|
|
1269
|
-
# Assume that the task has restarted and the previous attempt of the task
|
|
1270
|
-
# saved a checkpoint
|
|
1271
|
-
checkpoint_path = None
|
|
1272
|
-
if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
|
|
1273
|
-
print("Loaded checkpoint from the previous attempt")
|
|
1274
|
-
checkpoint_path = current.checkpoint.directory
|
|
1275
|
-
|
|
1276
|
-
model = create_model(self.parameters, checkpoint_path = checkpoint_path)
|
|
1277
|
-
for i in range(self.epochs):
|
|
1278
|
-
...
|
|
1279
|
-
```
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
Parameters
|
|
1283
|
-
----------
|
|
1284
|
-
load_policy : str, default: "fresh"
|
|
1285
|
-
The policy for loading the checkpoint. The following policies are supported:
|
|
1286
|
-
- "eager": Loads the the latest available checkpoint within the namespace.
|
|
1287
|
-
With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
|
|
1288
|
-
will be loaded at the start of the task.
|
|
1289
|
-
- "none": Do not load any checkpoint
|
|
1290
|
-
- "fresh": Loads the lastest checkpoint created within the running Task.
|
|
1291
|
-
This mode helps loading checkpoints across various retry attempts of the same task.
|
|
1292
|
-
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
|
1293
|
-
created within the task will be loaded when the task is retries execution on failure.
|
|
1294
|
-
|
|
1295
|
-
temp_dir_root : str, default: None
|
|
1296
|
-
The root directory under which `current.checkpoint.directory` will be created.
|
|
1297
|
-
"""
|
|
1298
|
-
...
|
|
1299
|
-
|
|
1300
|
-
def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1301
|
-
"""
|
|
1302
|
-
This decorator is used to run Ollama APIs as Metaflow task sidecars.
|
|
1303
|
-
|
|
1304
|
-
User code call
|
|
1305
|
-
--------------
|
|
1306
|
-
@ollama(
|
|
1307
|
-
models=[...],
|
|
1308
|
-
...
|
|
1309
|
-
)
|
|
1310
|
-
|
|
1311
|
-
Valid backend options
|
|
1312
|
-
---------------------
|
|
1313
|
-
- 'local': Run as a separate process on the local task machine.
|
|
1314
|
-
- (TODO) 'managed': Outerbounds hosts and selects compute provider.
|
|
1315
|
-
- (TODO) 'remote': Spin up separate instance to serve Ollama models.
|
|
1316
|
-
|
|
1317
|
-
Valid model options
|
|
1318
|
-
-------------------
|
|
1319
|
-
Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
Parameters
|
|
1323
|
-
----------
|
|
1324
|
-
models: list[str]
|
|
1325
|
-
List of Ollama containers running models in sidecars.
|
|
1326
|
-
backend: str
|
|
1327
|
-
Determines where and how to run the Ollama process.
|
|
1328
|
-
force_pull: bool
|
|
1329
|
-
Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
|
|
1330
|
-
cache_update_policy: str
|
|
1331
|
-
Cache update policy: "auto", "force", or "never".
|
|
1332
|
-
force_cache_update: bool
|
|
1333
|
-
Simple override for "force" cache update policy.
|
|
1334
|
-
debug: bool
|
|
1335
|
-
Whether to turn on verbose debugging logs.
|
|
1336
|
-
circuit_breaker_config: dict
|
|
1337
|
-
Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
|
|
1338
|
-
timeout_config: dict
|
|
1339
|
-
Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
|
|
1340
|
-
"""
|
|
1341
|
-
...
|
|
1342
|
-
|
|
1343
|
-
@typing.overload
|
|
1344
|
-
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1345
|
-
"""
|
|
1346
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
|
1347
|
-
the execution of a step.
|
|
1348
|
-
|
|
1349
|
-
|
|
1350
|
-
Parameters
|
|
1351
|
-
----------
|
|
1352
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
1353
|
-
List of secret specs, defining how the secrets are to be retrieved
|
|
1354
|
-
role : str, optional, default: None
|
|
1355
|
-
Role to use for fetching secrets
|
|
1358
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1359
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1360
|
+
disabled : bool, default False
|
|
1361
|
+
If set to True, disables @conda.
|
|
1356
1362
|
"""
|
|
1357
1363
|
...
|
|
1358
1364
|
|
|
1359
1365
|
@typing.overload
|
|
1360
|
-
def
|
|
1366
|
+
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1361
1367
|
...
|
|
1362
1368
|
|
|
1363
1369
|
@typing.overload
|
|
1364
|
-
def
|
|
1370
|
+
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1365
1371
|
...
|
|
1366
1372
|
|
|
1367
|
-
def
|
|
1373
|
+
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
|
1368
1374
|
"""
|
|
1369
|
-
Specifies
|
|
1370
|
-
|
|
1375
|
+
Specifies the Conda environment for the step.
|
|
1376
|
+
|
|
1377
|
+
Information in this decorator will augment any
|
|
1378
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
|
1379
|
+
you can use `@conda_base` to set packages required by all
|
|
1380
|
+
steps and use `@conda` to specify step-specific overrides.
|
|
1371
1381
|
|
|
1372
1382
|
|
|
1373
1383
|
Parameters
|
|
1374
1384
|
----------
|
|
1375
|
-
|
|
1376
|
-
|
|
1377
|
-
|
|
1378
|
-
|
|
1385
|
+
packages : Dict[str, str], default {}
|
|
1386
|
+
Packages to use for this step. The key is the name of the package
|
|
1387
|
+
and the value is the version to use.
|
|
1388
|
+
libraries : Dict[str, str], default {}
|
|
1389
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
1390
|
+
python : str, optional, default None
|
|
1391
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1392
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1393
|
+
disabled : bool, default False
|
|
1394
|
+
If set to True, disables @conda.
|
|
1379
1395
|
"""
|
|
1380
1396
|
...
|
|
1381
1397
|
|
|
1382
|
-
|
|
1398
|
+
@typing.overload
|
|
1399
|
+
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1383
1400
|
"""
|
|
1384
|
-
Specifies
|
|
1401
|
+
Specifies the Conda environment for all steps of the flow.
|
|
1385
1402
|
|
|
1386
|
-
|
|
1387
|
-
use
|
|
1403
|
+
Use `@conda_base` to set common libraries required by all
|
|
1404
|
+
steps and use `@conda` to specify step-specific additions.
|
|
1388
1405
|
|
|
1389
1406
|
|
|
1390
1407
|
Parameters
|
|
1391
1408
|
----------
|
|
1392
|
-
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
|
|
1409
|
+
packages : Dict[str, str], default {}
|
|
1410
|
+
Packages to use for this flow. The key is the name of the package
|
|
1411
|
+
and the value is the version to use.
|
|
1412
|
+
libraries : Dict[str, str], default {}
|
|
1413
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
1414
|
+
python : str, optional, default None
|
|
1415
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1416
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1417
|
+
disabled : bool, default False
|
|
1418
|
+
If set to True, disables Conda.
|
|
1419
|
+
"""
|
|
1420
|
+
...
|
|
1421
|
+
|
|
1422
|
+
@typing.overload
|
|
1423
|
+
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1424
|
+
...
|
|
1425
|
+
|
|
1426
|
+
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
|
1427
|
+
"""
|
|
1428
|
+
Specifies the Conda environment for all steps of the flow.
|
|
1396
1429
|
|
|
1397
|
-
|
|
1398
|
-
|
|
1399
|
-
`user.<username>` unless `production` is set to `True`. This can
|
|
1400
|
-
also be set on the command line using `--branch` as a top-level option.
|
|
1401
|
-
It is an error to specify `branch` in the decorator and on the command line.
|
|
1430
|
+
Use `@conda_base` to set common libraries required by all
|
|
1431
|
+
steps and use `@conda` to specify step-specific additions.
|
|
1402
1432
|
|
|
1403
|
-
|
|
1404
|
-
|
|
1405
|
-
|
|
1406
|
-
|
|
1407
|
-
The
|
|
1408
|
-
|
|
1409
|
-
|
|
1410
|
-
|
|
1411
|
-
|
|
1412
|
-
|
|
1413
|
-
|
|
1433
|
+
|
|
1434
|
+
Parameters
|
|
1435
|
+
----------
|
|
1436
|
+
packages : Dict[str, str], default {}
|
|
1437
|
+
Packages to use for this flow. The key is the name of the package
|
|
1438
|
+
and the value is the version to use.
|
|
1439
|
+
libraries : Dict[str, str], default {}
|
|
1440
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
1441
|
+
python : str, optional, default None
|
|
1442
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1443
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1444
|
+
disabled : bool, default False
|
|
1445
|
+
If set to True, disables Conda.
|
|
1414
1446
|
"""
|
|
1415
1447
|
...
|
|
1416
1448
|
|
|
1417
1449
|
@typing.overload
|
|
1418
|
-
def
|
|
1450
|
+
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1419
1451
|
"""
|
|
1420
|
-
Specifies the
|
|
1421
|
-
|
|
1452
|
+
Specifies the event(s) that this flow depends on.
|
|
1453
|
+
|
|
1454
|
+
```
|
|
1455
|
+
@trigger(event='foo')
|
|
1456
|
+
```
|
|
1457
|
+
or
|
|
1458
|
+
```
|
|
1459
|
+
@trigger(events=['foo', 'bar'])
|
|
1460
|
+
```
|
|
1461
|
+
|
|
1462
|
+
Additionally, you can specify the parameter mappings
|
|
1463
|
+
to map event payload to Metaflow parameters for the flow.
|
|
1464
|
+
```
|
|
1465
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
|
1466
|
+
```
|
|
1467
|
+
or
|
|
1468
|
+
```
|
|
1469
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
|
1470
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
|
1471
|
+
```
|
|
1472
|
+
|
|
1473
|
+
'parameters' can also be a list of strings and tuples like so:
|
|
1474
|
+
```
|
|
1475
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
|
1476
|
+
```
|
|
1477
|
+
This is equivalent to:
|
|
1478
|
+
```
|
|
1479
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
1480
|
+
```
|
|
1422
1481
|
|
|
1423
1482
|
|
|
1424
1483
|
Parameters
|
|
1425
1484
|
----------
|
|
1426
|
-
|
|
1427
|
-
|
|
1428
|
-
|
|
1429
|
-
|
|
1430
|
-
|
|
1431
|
-
|
|
1432
|
-
cron : str, optional, default None
|
|
1433
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
|
1434
|
-
specified by this expression.
|
|
1435
|
-
timezone : str, optional, default None
|
|
1436
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
|
1437
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
|
1485
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
|
1486
|
+
Event dependency for this flow.
|
|
1487
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
|
1488
|
+
Events dependency for this flow.
|
|
1489
|
+
options : Dict[str, Any], default {}
|
|
1490
|
+
Backend-specific configuration for tuning eventing behavior.
|
|
1438
1491
|
"""
|
|
1439
1492
|
...
|
|
1440
1493
|
|
|
1441
1494
|
@typing.overload
|
|
1442
|
-
def
|
|
1495
|
+
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1443
1496
|
...
|
|
1444
1497
|
|
|
1445
|
-
def
|
|
1498
|
+
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
|
1446
1499
|
"""
|
|
1447
|
-
Specifies the
|
|
1448
|
-
|
|
1500
|
+
Specifies the event(s) that this flow depends on.
|
|
1501
|
+
|
|
1502
|
+
```
|
|
1503
|
+
@trigger(event='foo')
|
|
1504
|
+
```
|
|
1505
|
+
or
|
|
1506
|
+
```
|
|
1507
|
+
@trigger(events=['foo', 'bar'])
|
|
1508
|
+
```
|
|
1509
|
+
|
|
1510
|
+
Additionally, you can specify the parameter mappings
|
|
1511
|
+
to map event payload to Metaflow parameters for the flow.
|
|
1512
|
+
```
|
|
1513
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
|
1514
|
+
```
|
|
1515
|
+
or
|
|
1516
|
+
```
|
|
1517
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
|
1518
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
|
1519
|
+
```
|
|
1520
|
+
|
|
1521
|
+
'parameters' can also be a list of strings and tuples like so:
|
|
1522
|
+
```
|
|
1523
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
|
1524
|
+
```
|
|
1525
|
+
This is equivalent to:
|
|
1526
|
+
```
|
|
1527
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
1528
|
+
```
|
|
1449
1529
|
|
|
1450
1530
|
|
|
1451
1531
|
Parameters
|
|
1452
1532
|
----------
|
|
1453
|
-
|
|
1454
|
-
|
|
1455
|
-
|
|
1456
|
-
|
|
1457
|
-
|
|
1458
|
-
|
|
1459
|
-
cron : str, optional, default None
|
|
1460
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
|
1461
|
-
specified by this expression.
|
|
1462
|
-
timezone : str, optional, default None
|
|
1463
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
|
1464
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
|
1533
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
|
1534
|
+
Event dependency for this flow.
|
|
1535
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
|
1536
|
+
Events dependency for this flow.
|
|
1537
|
+
options : Dict[str, Any], default {}
|
|
1538
|
+
Backend-specific configuration for tuning eventing behavior.
|
|
1465
1539
|
"""
|
|
1466
1540
|
...
|
|
1467
1541
|
|
|
1468
|
-
def
|
|
1542
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1469
1543
|
"""
|
|
1470
|
-
The `@
|
|
1471
|
-
|
|
1472
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
|
1473
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
|
1474
|
-
starts only after all sensors finish.
|
|
1544
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
|
1545
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
|
1475
1546
|
|
|
1476
1547
|
|
|
1477
1548
|
Parameters
|
|
@@ -1493,111 +1564,107 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
|
|
|
1493
1564
|
Name of the sensor on Airflow
|
|
1494
1565
|
description : str
|
|
1495
1566
|
Description of sensor in the Airflow UI
|
|
1496
|
-
|
|
1497
|
-
The
|
|
1498
|
-
|
|
1499
|
-
|
|
1500
|
-
|
|
1501
|
-
|
|
1502
|
-
|
|
1503
|
-
|
|
1504
|
-
|
|
1505
|
-
|
|
1506
|
-
|
|
1507
|
-
|
|
1567
|
+
external_dag_id : str
|
|
1568
|
+
The dag_id that contains the task you want to wait for.
|
|
1569
|
+
external_task_ids : List[str]
|
|
1570
|
+
The list of task_ids that you want to wait for.
|
|
1571
|
+
If None (default value) the sensor waits for the DAG. (Default: None)
|
|
1572
|
+
allowed_states : List[str]
|
|
1573
|
+
Iterable of allowed states, (Default: ['success'])
|
|
1574
|
+
failed_states : List[str]
|
|
1575
|
+
Iterable of failed or dis-allowed states. (Default: None)
|
|
1576
|
+
execution_delta : datetime.timedelta
|
|
1577
|
+
time difference with the previous execution to look at,
|
|
1578
|
+
the default is the same logical date as the current task or DAG. (Default: None)
|
|
1579
|
+
check_existence: bool
|
|
1580
|
+
Set to True to check if the external task exists or check if
|
|
1581
|
+
the DAG to wait for exists. (Default: True)
|
|
1582
|
+
"""
|
|
1583
|
+
...
|
|
1584
|
+
|
|
1585
|
+
def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1586
|
+
"""
|
|
1587
|
+
Specifies what flows belong to the same project.
|
|
1588
|
+
|
|
1589
|
+
A project-specific namespace is created for all flows that
|
|
1590
|
+
use the same `@project(name)`.
|
|
1591
|
+
|
|
1592
|
+
|
|
1593
|
+
Parameters
|
|
1594
|
+
----------
|
|
1595
|
+
name : str
|
|
1596
|
+
Project name. Make sure that the name is unique amongst all
|
|
1597
|
+
projects that use the same production scheduler. The name may
|
|
1598
|
+
contain only lowercase alphanumeric characters and underscores.
|
|
1599
|
+
|
|
1600
|
+
branch : Optional[str], default None
|
|
1601
|
+
The branch to use. If not specified, the branch is set to
|
|
1602
|
+
`user.<username>` unless `production` is set to `True`. This can
|
|
1603
|
+
also be set on the command line using `--branch` as a top-level option.
|
|
1604
|
+
It is an error to specify `branch` in the decorator and on the command line.
|
|
1605
|
+
|
|
1606
|
+
production : bool, default False
|
|
1607
|
+
Whether or not the branch is the production branch. This can also be set on the
|
|
1608
|
+
command line using `--production` as a top-level option. It is an error to specify
|
|
1609
|
+
`production` in the decorator and on the command line.
|
|
1610
|
+
The project branch name will be:
|
|
1611
|
+
- if `branch` is specified:
|
|
1612
|
+
- if `production` is True: `prod.<branch>`
|
|
1613
|
+
- if `production` is False: `test.<branch>`
|
|
1614
|
+
- if `branch` is not specified:
|
|
1615
|
+
- if `production` is True: `prod`
|
|
1616
|
+
- if `production` is False: `user.<username>`
|
|
1508
1617
|
"""
|
|
1509
1618
|
...
|
|
1510
1619
|
|
|
1511
1620
|
@typing.overload
|
|
1512
|
-
def
|
|
1621
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1513
1622
|
"""
|
|
1514
|
-
Specifies the
|
|
1515
|
-
|
|
1516
|
-
```
|
|
1517
|
-
@trigger(event='foo')
|
|
1518
|
-
```
|
|
1519
|
-
or
|
|
1520
|
-
```
|
|
1521
|
-
@trigger(events=['foo', 'bar'])
|
|
1522
|
-
```
|
|
1523
|
-
|
|
1524
|
-
Additionally, you can specify the parameter mappings
|
|
1525
|
-
to map event payload to Metaflow parameters for the flow.
|
|
1526
|
-
```
|
|
1527
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
|
1528
|
-
```
|
|
1529
|
-
or
|
|
1530
|
-
```
|
|
1531
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
|
1532
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
|
1533
|
-
```
|
|
1534
|
-
|
|
1535
|
-
'parameters' can also be a list of strings and tuples like so:
|
|
1536
|
-
```
|
|
1537
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
|
1538
|
-
```
|
|
1539
|
-
This is equivalent to:
|
|
1540
|
-
```
|
|
1541
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
1542
|
-
```
|
|
1623
|
+
Specifies the times when the flow should be run when running on a
|
|
1624
|
+
production scheduler.
|
|
1543
1625
|
|
|
1544
1626
|
|
|
1545
1627
|
Parameters
|
|
1546
1628
|
----------
|
|
1547
|
-
|
|
1548
|
-
|
|
1549
|
-
|
|
1550
|
-
|
|
1551
|
-
|
|
1552
|
-
|
|
1629
|
+
hourly : bool, default False
|
|
1630
|
+
Run the workflow hourly.
|
|
1631
|
+
daily : bool, default True
|
|
1632
|
+
Run the workflow daily.
|
|
1633
|
+
weekly : bool, default False
|
|
1634
|
+
Run the workflow weekly.
|
|
1635
|
+
cron : str, optional, default None
|
|
1636
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
|
1637
|
+
specified by this expression.
|
|
1638
|
+
timezone : str, optional, default None
|
|
1639
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
|
1640
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
|
1553
1641
|
"""
|
|
1554
1642
|
...
|
|
1555
1643
|
|
|
1556
1644
|
@typing.overload
|
|
1557
|
-
def
|
|
1645
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1558
1646
|
...
|
|
1559
1647
|
|
|
1560
|
-
def
|
|
1648
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
|
1561
1649
|
"""
|
|
1562
|
-
Specifies the
|
|
1563
|
-
|
|
1564
|
-
```
|
|
1565
|
-
@trigger(event='foo')
|
|
1566
|
-
```
|
|
1567
|
-
or
|
|
1568
|
-
```
|
|
1569
|
-
@trigger(events=['foo', 'bar'])
|
|
1570
|
-
```
|
|
1571
|
-
|
|
1572
|
-
Additionally, you can specify the parameter mappings
|
|
1573
|
-
to map event payload to Metaflow parameters for the flow.
|
|
1574
|
-
```
|
|
1575
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
|
1576
|
-
```
|
|
1577
|
-
or
|
|
1578
|
-
```
|
|
1579
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
|
1580
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
|
1581
|
-
```
|
|
1582
|
-
|
|
1583
|
-
'parameters' can also be a list of strings and tuples like so:
|
|
1584
|
-
```
|
|
1585
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
|
1586
|
-
```
|
|
1587
|
-
This is equivalent to:
|
|
1588
|
-
```
|
|
1589
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
1590
|
-
```
|
|
1650
|
+
Specifies the times when the flow should be run when running on a
|
|
1651
|
+
production scheduler.
|
|
1591
1652
|
|
|
1592
1653
|
|
|
1593
1654
|
Parameters
|
|
1594
1655
|
----------
|
|
1595
|
-
|
|
1596
|
-
|
|
1597
|
-
|
|
1598
|
-
|
|
1599
|
-
|
|
1600
|
-
|
|
1656
|
+
hourly : bool, default False
|
|
1657
|
+
Run the workflow hourly.
|
|
1658
|
+
daily : bool, default True
|
|
1659
|
+
Run the workflow daily.
|
|
1660
|
+
weekly : bool, default False
|
|
1661
|
+
Run the workflow weekly.
|
|
1662
|
+
cron : str, optional, default None
|
|
1663
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
|
1664
|
+
specified by this expression.
|
|
1665
|
+
timezone : str, optional, default None
|
|
1666
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
|
1667
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
|
1601
1668
|
"""
|
|
1602
1669
|
...
|
|
1603
1670
|
|
|
@@ -1715,57 +1782,6 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
|
|
|
1715
1782
|
"""
|
|
1716
1783
|
...
|
|
1717
1784
|
|
|
1718
|
-
@typing.overload
|
|
1719
|
-
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1720
|
-
"""
|
|
1721
|
-
Specifies the Conda environment for all steps of the flow.
|
|
1722
|
-
|
|
1723
|
-
Use `@conda_base` to set common libraries required by all
|
|
1724
|
-
steps and use `@conda` to specify step-specific additions.
|
|
1725
|
-
|
|
1726
|
-
|
|
1727
|
-
Parameters
|
|
1728
|
-
----------
|
|
1729
|
-
packages : Dict[str, str], default {}
|
|
1730
|
-
Packages to use for this flow. The key is the name of the package
|
|
1731
|
-
and the value is the version to use.
|
|
1732
|
-
libraries : Dict[str, str], default {}
|
|
1733
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
1734
|
-
python : str, optional, default None
|
|
1735
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1736
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1737
|
-
disabled : bool, default False
|
|
1738
|
-
If set to True, disables Conda.
|
|
1739
|
-
"""
|
|
1740
|
-
...
|
|
1741
|
-
|
|
1742
|
-
@typing.overload
|
|
1743
|
-
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1744
|
-
...
|
|
1745
|
-
|
|
1746
|
-
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
|
1747
|
-
"""
|
|
1748
|
-
Specifies the Conda environment for all steps of the flow.
|
|
1749
|
-
|
|
1750
|
-
Use `@conda_base` to set common libraries required by all
|
|
1751
|
-
steps and use `@conda` to specify step-specific additions.
|
|
1752
|
-
|
|
1753
|
-
|
|
1754
|
-
Parameters
|
|
1755
|
-
----------
|
|
1756
|
-
packages : Dict[str, str], default {}
|
|
1757
|
-
Packages to use for this flow. The key is the name of the package
|
|
1758
|
-
and the value is the version to use.
|
|
1759
|
-
libraries : Dict[str, str], default {}
|
|
1760
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
1761
|
-
python : str, optional, default None
|
|
1762
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1763
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1764
|
-
disabled : bool, default False
|
|
1765
|
-
If set to True, disables Conda.
|
|
1766
|
-
"""
|
|
1767
|
-
...
|
|
1768
|
-
|
|
1769
1785
|
@typing.overload
|
|
1770
1786
|
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1771
1787
|
"""
|
|
@@ -1867,6 +1883,49 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
|
|
|
1867
1883
|
"""
|
|
1868
1884
|
...
|
|
1869
1885
|
|
|
1886
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1887
|
+
"""
|
|
1888
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
|
1889
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
|
1890
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
|
1891
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
|
1892
|
+
starts only after all sensors finish.
|
|
1893
|
+
|
|
1894
|
+
|
|
1895
|
+
Parameters
|
|
1896
|
+
----------
|
|
1897
|
+
timeout : int
|
|
1898
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
|
1899
|
+
poke_interval : int
|
|
1900
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
|
1901
|
+
mode : str
|
|
1902
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
|
1903
|
+
exponential_backoff : bool
|
|
1904
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
|
1905
|
+
pool : str
|
|
1906
|
+
the slot pool this task should run in,
|
|
1907
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
|
1908
|
+
soft_fail : bool
|
|
1909
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
|
1910
|
+
name : str
|
|
1911
|
+
Name of the sensor on Airflow
|
|
1912
|
+
description : str
|
|
1913
|
+
Description of sensor in the Airflow UI
|
|
1914
|
+
bucket_key : Union[str, List[str]]
|
|
1915
|
+
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
|
1916
|
+
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
|
1917
|
+
bucket_name : str
|
|
1918
|
+
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
|
1919
|
+
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
|
1920
|
+
wildcard_match : bool
|
|
1921
|
+
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
|
1922
|
+
aws_conn_id : str
|
|
1923
|
+
a reference to the s3 connection on Airflow. (Default: None)
|
|
1924
|
+
verify : bool
|
|
1925
|
+
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
|
1926
|
+
"""
|
|
1927
|
+
...
|
|
1928
|
+
|
|
1870
1929
|
@typing.overload
|
|
1871
1930
|
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1872
1931
|
"""
|
|
@@ -1908,48 +1967,5 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
|
|
|
1908
1967
|
"""
|
|
1909
1968
|
...
|
|
1910
1969
|
|
|
1911
|
-
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1912
|
-
"""
|
|
1913
|
-
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
|
1914
|
-
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
|
1915
|
-
|
|
1916
|
-
|
|
1917
|
-
Parameters
|
|
1918
|
-
----------
|
|
1919
|
-
timeout : int
|
|
1920
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
|
1921
|
-
poke_interval : int
|
|
1922
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
|
1923
|
-
mode : str
|
|
1924
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
|
1925
|
-
exponential_backoff : bool
|
|
1926
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
|
1927
|
-
pool : str
|
|
1928
|
-
the slot pool this task should run in,
|
|
1929
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
|
1930
|
-
soft_fail : bool
|
|
1931
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
|
1932
|
-
name : str
|
|
1933
|
-
Name of the sensor on Airflow
|
|
1934
|
-
description : str
|
|
1935
|
-
Description of sensor in the Airflow UI
|
|
1936
|
-
external_dag_id : str
|
|
1937
|
-
The dag_id that contains the task you want to wait for.
|
|
1938
|
-
external_task_ids : List[str]
|
|
1939
|
-
The list of task_ids that you want to wait for.
|
|
1940
|
-
If None (default value) the sensor waits for the DAG. (Default: None)
|
|
1941
|
-
allowed_states : List[str]
|
|
1942
|
-
Iterable of allowed states, (Default: ['success'])
|
|
1943
|
-
failed_states : List[str]
|
|
1944
|
-
Iterable of failed or dis-allowed states. (Default: None)
|
|
1945
|
-
execution_delta : datetime.timedelta
|
|
1946
|
-
time difference with the previous execution to look at,
|
|
1947
|
-
the default is the same logical date as the current task or DAG. (Default: None)
|
|
1948
|
-
check_existence: bool
|
|
1949
|
-
Set to True to check if the external task exists or check if
|
|
1950
|
-
the DAG to wait for exists. (Default: True)
|
|
1951
|
-
"""
|
|
1952
|
-
...
|
|
1953
|
-
|
|
1954
1970
|
pkg_name: str
|
|
1955
1971
|
|