ob-metaflow-stubs 6.0.10.12__py2.py3-none-any.whl → 6.0.10.14__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ob-metaflow-stubs might be problematic. Click here for more details.
- metaflow-stubs/__init__.pyi +1044 -1044
- metaflow-stubs/cards.pyi +5 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +6 -6
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/events.pyi +2 -2
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +6 -6
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/meta_files.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +3 -3
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +57 -57
- metaflow-stubs/metaflow_git.pyi +2 -2
- metaflow-stubs/mf_extensions/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +5 -5
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +5 -5
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/cards/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/cards/hf_hub_card.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +4 -4
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +5 -5
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +4 -4
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +5 -5
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +4 -4
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +4 -4
- metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/ob_internal.pyi +2 -2
- metaflow-stubs/packaging_sys/__init__.pyi +8 -8
- metaflow-stubs/packaging_sys/backend.pyi +3 -3
- metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
- metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
- metaflow-stubs/packaging_sys/utils.pyi +2 -2
- metaflow-stubs/packaging_sys/v1.pyi +4 -4
- metaflow-stubs/parameters.pyi +4 -4
- metaflow-stubs/plugins/__init__.pyi +14 -14
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +3 -3
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +3 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +167 -3
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +119 -0
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +2 -2
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
- metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
- metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
- metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/parsers.pyi +2 -2
- metaflow-stubs/plugins/perimeters.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
- metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
- metaflow-stubs/plugins/secrets/utils.pyi +2 -2
- metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
- metaflow-stubs/profilers/__init__.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +6 -6
- metaflow-stubs/runner/deployer_impl.pyi +3 -3
- metaflow-stubs/runner/metaflow_runner.pyi +3 -3
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +3 -3
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +3 -3
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_options.pyi +4 -4
- metaflow-stubs/user_configs/config_parameters.pyi +5 -5
- metaflow-stubs/user_decorators/__init__.pyi +2 -2
- metaflow-stubs/user_decorators/common.pyi +2 -2
- metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
- metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
- metaflow-stubs/user_decorators/user_flow_decorator.pyi +5 -5
- metaflow-stubs/user_decorators/user_step_decorator.pyi +6 -6
- {ob_metaflow_stubs-6.0.10.12.dist-info → ob_metaflow_stubs-6.0.10.14.dist-info}/METADATA +1 -1
- ob_metaflow_stubs-6.0.10.14.dist-info/RECORD +266 -0
- ob_metaflow_stubs-6.0.10.12.dist-info/RECORD +0 -265
- {ob_metaflow_stubs-6.0.10.12.dist-info → ob_metaflow_stubs-6.0.10.14.dist-info}/WHEEL +0 -0
- {ob_metaflow_stubs-6.0.10.12.dist-info → ob_metaflow_stubs-6.0.10.14.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
######################################################################################################
|
|
2
2
|
# Auto-generated Metaflow stub file #
|
|
3
|
-
# MF version: 2.18.
|
|
4
|
-
# Generated on 2025-
|
|
3
|
+
# MF version: 2.18.10.1+obcheckpoint(0.2.8);ob(v1) #
|
|
4
|
+
# Generated on 2025-10-08T21:13:44.622359 #
|
|
5
5
|
######################################################################################################
|
|
6
6
|
|
|
7
7
|
from __future__ import annotations
|
|
8
8
|
|
|
9
9
|
import typing
|
|
10
10
|
if typing.TYPE_CHECKING:
|
|
11
|
-
import datetime
|
|
12
11
|
import typing
|
|
12
|
+
import datetime
|
|
13
13
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
|
14
14
|
StepFlag = typing.NewType("StepFlag", bool)
|
|
15
15
|
|
|
@@ -39,8 +39,8 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
|
|
|
39
39
|
from .user_decorators.user_step_decorator import StepMutator as StepMutator
|
|
40
40
|
from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
|
|
41
41
|
from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
|
|
42
|
-
from . import events as events
|
|
43
42
|
from . import cards as cards
|
|
43
|
+
from . import events as events
|
|
44
44
|
from . import tuple_util as tuple_util
|
|
45
45
|
from . import metaflow_git as metaflow_git
|
|
46
46
|
from . import runner as runner
|
|
@@ -48,9 +48,9 @@ from . import plugins as plugins
|
|
|
48
48
|
from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
|
|
49
49
|
from . import includefile as includefile
|
|
50
50
|
from .includefile import IncludeFile as IncludeFile
|
|
51
|
-
from .plugins.parsers import yaml_parser as yaml_parser
|
|
52
51
|
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
|
53
52
|
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
|
53
|
+
from .plugins.parsers import yaml_parser as yaml_parser
|
|
54
54
|
from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
|
|
55
55
|
from . import client as client
|
|
56
56
|
from .client.core import namespace as namespace
|
|
@@ -170,392 +170,105 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
|
170
170
|
...
|
|
171
171
|
|
|
172
172
|
@typing.overload
|
|
173
|
-
def
|
|
173
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
174
174
|
"""
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
> Examples
|
|
178
|
-
|
|
179
|
-
- Saving Checkpoints
|
|
180
|
-
|
|
181
|
-
```python
|
|
182
|
-
@checkpoint
|
|
183
|
-
@step
|
|
184
|
-
def train(self):
|
|
185
|
-
model = create_model(self.parameters, checkpoint_path = None)
|
|
186
|
-
for i in range(self.epochs):
|
|
187
|
-
# some training logic
|
|
188
|
-
loss = model.train(self.dataset)
|
|
189
|
-
if i % 10 == 0:
|
|
190
|
-
model.save(
|
|
191
|
-
current.checkpoint.directory,
|
|
192
|
-
)
|
|
193
|
-
# saves the contents of the `current.checkpoint.directory` as a checkpoint
|
|
194
|
-
# and returns a reference dictionary to the checkpoint saved in the datastore
|
|
195
|
-
self.latest_checkpoint = current.checkpoint.save(
|
|
196
|
-
name="epoch_checkpoint",
|
|
197
|
-
metadata={
|
|
198
|
-
"epoch": i,
|
|
199
|
-
"loss": loss,
|
|
200
|
-
}
|
|
201
|
-
)
|
|
202
|
-
```
|
|
175
|
+
Specifies a timeout for your step.
|
|
203
176
|
|
|
204
|
-
|
|
177
|
+
This decorator is useful if this step may hang indefinitely.
|
|
205
178
|
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
@step
|
|
210
|
-
def train(self):
|
|
211
|
-
# Assume that the task has restarted and the previous attempt of the task
|
|
212
|
-
# saved a checkpoint
|
|
213
|
-
checkpoint_path = None
|
|
214
|
-
if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
|
|
215
|
-
print("Loaded checkpoint from the previous attempt")
|
|
216
|
-
checkpoint_path = current.checkpoint.directory
|
|
179
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
|
180
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
|
181
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
|
217
182
|
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
...
|
|
221
|
-
```
|
|
183
|
+
Note that all the values specified in parameters are added together so if you specify
|
|
184
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
|
222
185
|
|
|
223
186
|
|
|
224
187
|
Parameters
|
|
225
188
|
----------
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
- "fresh": Loads the lastest checkpoint created within the running Task.
|
|
233
|
-
This mode helps loading checkpoints across various retry attempts of the same task.
|
|
234
|
-
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
|
235
|
-
created within the task will be loaded when the task is retries execution on failure.
|
|
236
|
-
|
|
237
|
-
temp_dir_root : str, default: None
|
|
238
|
-
The root directory under which `current.checkpoint.directory` will be created.
|
|
189
|
+
seconds : int, default 0
|
|
190
|
+
Number of seconds to wait prior to timing out.
|
|
191
|
+
minutes : int, default 0
|
|
192
|
+
Number of minutes to wait prior to timing out.
|
|
193
|
+
hours : int, default 0
|
|
194
|
+
Number of hours to wait prior to timing out.
|
|
239
195
|
"""
|
|
240
196
|
...
|
|
241
197
|
|
|
242
198
|
@typing.overload
|
|
243
|
-
def
|
|
199
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
244
200
|
...
|
|
245
201
|
|
|
246
202
|
@typing.overload
|
|
247
|
-
def
|
|
203
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
248
204
|
...
|
|
249
205
|
|
|
250
|
-
def
|
|
206
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
|
251
207
|
"""
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
> Examples
|
|
255
|
-
|
|
256
|
-
- Saving Checkpoints
|
|
257
|
-
|
|
258
|
-
```python
|
|
259
|
-
@checkpoint
|
|
260
|
-
@step
|
|
261
|
-
def train(self):
|
|
262
|
-
model = create_model(self.parameters, checkpoint_path = None)
|
|
263
|
-
for i in range(self.epochs):
|
|
264
|
-
# some training logic
|
|
265
|
-
loss = model.train(self.dataset)
|
|
266
|
-
if i % 10 == 0:
|
|
267
|
-
model.save(
|
|
268
|
-
current.checkpoint.directory,
|
|
269
|
-
)
|
|
270
|
-
# saves the contents of the `current.checkpoint.directory` as a checkpoint
|
|
271
|
-
# and returns a reference dictionary to the checkpoint saved in the datastore
|
|
272
|
-
self.latest_checkpoint = current.checkpoint.save(
|
|
273
|
-
name="epoch_checkpoint",
|
|
274
|
-
metadata={
|
|
275
|
-
"epoch": i,
|
|
276
|
-
"loss": loss,
|
|
277
|
-
}
|
|
278
|
-
)
|
|
279
|
-
```
|
|
208
|
+
Specifies a timeout for your step.
|
|
280
209
|
|
|
281
|
-
|
|
210
|
+
This decorator is useful if this step may hang indefinitely.
|
|
282
211
|
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
@step
|
|
287
|
-
def train(self):
|
|
288
|
-
# Assume that the task has restarted and the previous attempt of the task
|
|
289
|
-
# saved a checkpoint
|
|
290
|
-
checkpoint_path = None
|
|
291
|
-
if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
|
|
292
|
-
print("Loaded checkpoint from the previous attempt")
|
|
293
|
-
checkpoint_path = current.checkpoint.directory
|
|
212
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
|
213
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
|
214
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
|
294
215
|
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
...
|
|
298
|
-
```
|
|
216
|
+
Note that all the values specified in parameters are added together so if you specify
|
|
217
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
|
299
218
|
|
|
300
219
|
|
|
301
220
|
Parameters
|
|
302
221
|
----------
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
- "fresh": Loads the lastest checkpoint created within the running Task.
|
|
310
|
-
This mode helps loading checkpoints across various retry attempts of the same task.
|
|
311
|
-
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
|
312
|
-
created within the task will be loaded when the task is retries execution on failure.
|
|
313
|
-
|
|
314
|
-
temp_dir_root : str, default: None
|
|
315
|
-
The root directory under which `current.checkpoint.directory` will be created.
|
|
222
|
+
seconds : int, default 0
|
|
223
|
+
Number of seconds to wait prior to timing out.
|
|
224
|
+
minutes : int, default 0
|
|
225
|
+
Number of minutes to wait prior to timing out.
|
|
226
|
+
hours : int, default 0
|
|
227
|
+
Number of hours to wait prior to timing out.
|
|
316
228
|
"""
|
|
317
229
|
...
|
|
318
230
|
|
|
319
231
|
@typing.overload
|
|
320
|
-
def
|
|
232
|
+
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
321
233
|
"""
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
> Examples
|
|
325
|
-
- Saving Models
|
|
326
|
-
```python
|
|
327
|
-
@model
|
|
328
|
-
@step
|
|
329
|
-
def train(self):
|
|
330
|
-
# current.model.save returns a dictionary reference to the model saved
|
|
331
|
-
self.my_model = current.model.save(
|
|
332
|
-
path_to_my_model,
|
|
333
|
-
label="my_model",
|
|
334
|
-
metadata={
|
|
335
|
-
"epochs": 10,
|
|
336
|
-
"batch-size": 32,
|
|
337
|
-
"learning-rate": 0.001,
|
|
338
|
-
}
|
|
339
|
-
)
|
|
340
|
-
self.next(self.test)
|
|
341
|
-
|
|
342
|
-
@model(load="my_model")
|
|
343
|
-
@step
|
|
344
|
-
def test(self):
|
|
345
|
-
# `current.model.loaded` returns a dictionary of the loaded models
|
|
346
|
-
# where the key is the name of the artifact and the value is the path to the model
|
|
347
|
-
print(os.listdir(current.model.loaded["my_model"]))
|
|
348
|
-
self.next(self.end)
|
|
349
|
-
```
|
|
350
|
-
|
|
351
|
-
- Loading models
|
|
352
|
-
```python
|
|
353
|
-
@step
|
|
354
|
-
def train(self):
|
|
355
|
-
# current.model.load returns the path to the model loaded
|
|
356
|
-
checkpoint_path = current.model.load(
|
|
357
|
-
self.checkpoint_key,
|
|
358
|
-
)
|
|
359
|
-
model_path = current.model.load(
|
|
360
|
-
self.model,
|
|
361
|
-
)
|
|
362
|
-
self.next(self.test)
|
|
363
|
-
```
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
Parameters
|
|
367
|
-
----------
|
|
368
|
-
load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
|
|
369
|
-
Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
|
|
370
|
-
These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
|
|
371
|
-
If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
|
|
372
|
-
the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
|
|
373
|
-
If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
|
|
374
|
-
|
|
375
|
-
temp_dir_root : str, default: None
|
|
376
|
-
The root directory under which `current.model.loaded` will store loaded models
|
|
234
|
+
Internal decorator to support Fast bakery
|
|
377
235
|
"""
|
|
378
236
|
...
|
|
379
237
|
|
|
380
238
|
@typing.overload
|
|
381
|
-
def
|
|
239
|
+
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
240
|
+
...
|
|
241
|
+
|
|
242
|
+
def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
243
|
+
"""
|
|
244
|
+
Internal decorator to support Fast bakery
|
|
245
|
+
"""
|
|
382
246
|
...
|
|
383
247
|
|
|
384
248
|
@typing.overload
|
|
385
|
-
def
|
|
249
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
250
|
+
"""
|
|
251
|
+
Decorator prototype for all step decorators. This function gets specialized
|
|
252
|
+
and imported for all decorators types by _import_plugin_decorators().
|
|
253
|
+
"""
|
|
386
254
|
...
|
|
387
255
|
|
|
388
|
-
|
|
256
|
+
@typing.overload
|
|
257
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
258
|
+
...
|
|
259
|
+
|
|
260
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
389
261
|
"""
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
path_to_my_model,
|
|
401
|
-
label="my_model",
|
|
402
|
-
metadata={
|
|
403
|
-
"epochs": 10,
|
|
404
|
-
"batch-size": 32,
|
|
405
|
-
"learning-rate": 0.001,
|
|
406
|
-
}
|
|
407
|
-
)
|
|
408
|
-
self.next(self.test)
|
|
409
|
-
|
|
410
|
-
@model(load="my_model")
|
|
411
|
-
@step
|
|
412
|
-
def test(self):
|
|
413
|
-
# `current.model.loaded` returns a dictionary of the loaded models
|
|
414
|
-
# where the key is the name of the artifact and the value is the path to the model
|
|
415
|
-
print(os.listdir(current.model.loaded["my_model"]))
|
|
416
|
-
self.next(self.end)
|
|
417
|
-
```
|
|
418
|
-
|
|
419
|
-
- Loading models
|
|
420
|
-
```python
|
|
421
|
-
@step
|
|
422
|
-
def train(self):
|
|
423
|
-
# current.model.load returns the path to the model loaded
|
|
424
|
-
checkpoint_path = current.model.load(
|
|
425
|
-
self.checkpoint_key,
|
|
426
|
-
)
|
|
427
|
-
model_path = current.model.load(
|
|
428
|
-
self.model,
|
|
429
|
-
)
|
|
430
|
-
self.next(self.test)
|
|
431
|
-
```
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
Parameters
|
|
435
|
-
----------
|
|
436
|
-
load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
|
|
437
|
-
Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
|
|
438
|
-
These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
|
|
439
|
-
If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
|
|
440
|
-
the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
|
|
441
|
-
If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
|
|
442
|
-
|
|
443
|
-
temp_dir_root : str, default: None
|
|
444
|
-
The root directory under which `current.model.loaded` will store loaded models
|
|
445
|
-
"""
|
|
446
|
-
...
|
|
447
|
-
|
|
448
|
-
def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
449
|
-
"""
|
|
450
|
-
Specifies that this step should execute on DGX cloud.
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
Parameters
|
|
454
|
-
----------
|
|
455
|
-
gpu : int
|
|
456
|
-
Number of GPUs to use.
|
|
457
|
-
gpu_type : str
|
|
458
|
-
Type of Nvidia GPU to use.
|
|
459
|
-
queue_timeout : int
|
|
460
|
-
Time to keep the job in NVCF's queue.
|
|
461
|
-
"""
|
|
462
|
-
...
|
|
463
|
-
|
|
464
|
-
@typing.overload
|
|
465
|
-
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
466
|
-
"""
|
|
467
|
-
Specifies that the step will success under all circumstances.
|
|
468
|
-
|
|
469
|
-
The decorator will create an optional artifact, specified by `var`, which
|
|
470
|
-
contains the exception raised. You can use it to detect the presence
|
|
471
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
|
472
|
-
are missing.
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
Parameters
|
|
476
|
-
----------
|
|
477
|
-
var : str, optional, default None
|
|
478
|
-
Name of the artifact in which to store the caught exception.
|
|
479
|
-
If not specified, the exception is not stored.
|
|
480
|
-
print_exception : bool, default True
|
|
481
|
-
Determines whether or not the exception is printed to
|
|
482
|
-
stdout when caught.
|
|
483
|
-
"""
|
|
484
|
-
...
|
|
485
|
-
|
|
486
|
-
@typing.overload
|
|
487
|
-
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
488
|
-
...
|
|
489
|
-
|
|
490
|
-
@typing.overload
|
|
491
|
-
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
492
|
-
...
|
|
493
|
-
|
|
494
|
-
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
|
495
|
-
"""
|
|
496
|
-
Specifies that the step will success under all circumstances.
|
|
497
|
-
|
|
498
|
-
The decorator will create an optional artifact, specified by `var`, which
|
|
499
|
-
contains the exception raised. You can use it to detect the presence
|
|
500
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
|
501
|
-
are missing.
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
Parameters
|
|
505
|
-
----------
|
|
506
|
-
var : str, optional, default None
|
|
507
|
-
Name of the artifact in which to store the caught exception.
|
|
508
|
-
If not specified, the exception is not stored.
|
|
509
|
-
print_exception : bool, default True
|
|
510
|
-
Determines whether or not the exception is printed to
|
|
511
|
-
stdout when caught.
|
|
512
|
-
"""
|
|
513
|
-
...
|
|
514
|
-
|
|
515
|
-
@typing.overload
|
|
516
|
-
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
517
|
-
"""
|
|
518
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
|
519
|
-
the execution of a step.
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
Parameters
|
|
523
|
-
----------
|
|
524
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
525
|
-
List of secret specs, defining how the secrets are to be retrieved
|
|
526
|
-
role : str, optional, default: None
|
|
527
|
-
Role to use for fetching secrets
|
|
528
|
-
"""
|
|
529
|
-
...
|
|
530
|
-
|
|
531
|
-
@typing.overload
|
|
532
|
-
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
533
|
-
...
|
|
534
|
-
|
|
535
|
-
@typing.overload
|
|
536
|
-
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
537
|
-
...
|
|
538
|
-
|
|
539
|
-
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
|
|
540
|
-
"""
|
|
541
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
|
542
|
-
the execution of a step.
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
Parameters
|
|
546
|
-
----------
|
|
547
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
548
|
-
List of secret specs, defining how the secrets are to be retrieved
|
|
549
|
-
role : str, optional, default: None
|
|
550
|
-
Role to use for fetching secrets
|
|
551
|
-
"""
|
|
552
|
-
...
|
|
553
|
-
|
|
554
|
-
def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
555
|
-
"""
|
|
556
|
-
`@nebius_s3_proxy` is a Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
557
|
-
It exists to make it easier for users to know that this decorator should only be used with
|
|
558
|
-
a Neo Cloud like Nebius. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
|
|
262
|
+
Decorator prototype for all step decorators. This function gets specialized
|
|
263
|
+
and imported for all decorators types by _import_plugin_decorators().
|
|
264
|
+
"""
|
|
265
|
+
...
|
|
266
|
+
|
|
267
|
+
def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
268
|
+
"""
|
|
269
|
+
`@coreweave_s3_proxy` is a CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
270
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
271
|
+
a Neo Cloud like CoreWeave. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
|
|
559
272
|
|
|
560
273
|
|
|
561
274
|
Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
|
|
@@ -648,186 +361,117 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
|
|
|
648
361
|
"""
|
|
649
362
|
...
|
|
650
363
|
|
|
364
|
+
def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
365
|
+
"""
|
|
366
|
+
Specifies that this step should execute on DGX cloud.
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
Parameters
|
|
370
|
+
----------
|
|
371
|
+
gpu : int
|
|
372
|
+
Number of GPUs to use.
|
|
373
|
+
gpu_type : str
|
|
374
|
+
Type of Nvidia GPU to use.
|
|
375
|
+
queue_timeout : int
|
|
376
|
+
Time to keep the job in NVCF's queue.
|
|
377
|
+
"""
|
|
378
|
+
...
|
|
379
|
+
|
|
651
380
|
@typing.overload
|
|
652
|
-
def
|
|
381
|
+
def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
653
382
|
"""
|
|
654
|
-
|
|
655
|
-
|
|
383
|
+
A simple decorator that demonstrates using CardDecoratorInjector
|
|
384
|
+
to inject a card and render simple markdown content.
|
|
656
385
|
"""
|
|
657
386
|
...
|
|
658
387
|
|
|
659
388
|
@typing.overload
|
|
660
|
-
def
|
|
389
|
+
def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
661
390
|
...
|
|
662
391
|
|
|
663
|
-
def
|
|
392
|
+
def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
664
393
|
"""
|
|
665
|
-
|
|
666
|
-
|
|
394
|
+
A simple decorator that demonstrates using CardDecoratorInjector
|
|
395
|
+
to inject a card and render simple markdown content.
|
|
667
396
|
"""
|
|
668
397
|
...
|
|
669
398
|
|
|
670
399
|
@typing.overload
|
|
671
|
-
def
|
|
400
|
+
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
672
401
|
"""
|
|
673
|
-
Specifies the
|
|
674
|
-
to a step needs to be retried.
|
|
402
|
+
Specifies the resources needed when executing this step.
|
|
675
403
|
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
|
404
|
+
Use `@resources` to specify the resource requirements
|
|
405
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
|
679
406
|
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
407
|
+
You can choose the compute layer on the command line by executing e.g.
|
|
408
|
+
```
|
|
409
|
+
python myflow.py run --with batch
|
|
410
|
+
```
|
|
411
|
+
or
|
|
412
|
+
```
|
|
413
|
+
python myflow.py run --with kubernetes
|
|
414
|
+
```
|
|
415
|
+
which executes the flow on the desired system using the
|
|
416
|
+
requirements specified in `@resources`.
|
|
683
417
|
|
|
684
418
|
|
|
685
419
|
Parameters
|
|
686
420
|
----------
|
|
687
|
-
|
|
688
|
-
Number of
|
|
689
|
-
|
|
690
|
-
Number of
|
|
421
|
+
cpu : int, default 1
|
|
422
|
+
Number of CPUs required for this step.
|
|
423
|
+
gpu : int, optional, default None
|
|
424
|
+
Number of GPUs required for this step.
|
|
425
|
+
disk : int, optional, default None
|
|
426
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
|
427
|
+
memory : int, default 4096
|
|
428
|
+
Memory size (in MB) required for this step.
|
|
429
|
+
shared_memory : int, optional, default None
|
|
430
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
431
|
+
This parameter maps to the `--shm-size` option in Docker.
|
|
691
432
|
"""
|
|
692
433
|
...
|
|
693
434
|
|
|
694
435
|
@typing.overload
|
|
695
|
-
def
|
|
436
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
696
437
|
...
|
|
697
438
|
|
|
698
439
|
@typing.overload
|
|
699
|
-
def
|
|
440
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
700
441
|
...
|
|
701
442
|
|
|
702
|
-
def
|
|
443
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
|
703
444
|
"""
|
|
704
|
-
Specifies the
|
|
705
|
-
to a step needs to be retried.
|
|
445
|
+
Specifies the resources needed when executing this step.
|
|
706
446
|
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
|
447
|
+
Use `@resources` to specify the resource requirements
|
|
448
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
|
710
449
|
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
450
|
+
You can choose the compute layer on the command line by executing e.g.
|
|
451
|
+
```
|
|
452
|
+
python myflow.py run --with batch
|
|
453
|
+
```
|
|
454
|
+
or
|
|
455
|
+
```
|
|
456
|
+
python myflow.py run --with kubernetes
|
|
457
|
+
```
|
|
458
|
+
which executes the flow on the desired system using the
|
|
459
|
+
requirements specified in `@resources`.
|
|
714
460
|
|
|
715
461
|
|
|
716
462
|
Parameters
|
|
717
463
|
----------
|
|
718
|
-
|
|
719
|
-
Number of
|
|
720
|
-
|
|
721
|
-
Number of
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
This decorator is useful if this step may hang indefinitely.
|
|
731
|
-
|
|
732
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
|
733
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
|
734
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
|
735
|
-
|
|
736
|
-
Note that all the values specified in parameters are added together so if you specify
|
|
737
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
Parameters
|
|
741
|
-
----------
|
|
742
|
-
seconds : int, default 0
|
|
743
|
-
Number of seconds to wait prior to timing out.
|
|
744
|
-
minutes : int, default 0
|
|
745
|
-
Number of minutes to wait prior to timing out.
|
|
746
|
-
hours : int, default 0
|
|
747
|
-
Number of hours to wait prior to timing out.
|
|
748
|
-
"""
|
|
749
|
-
...
|
|
750
|
-
|
|
751
|
-
@typing.overload
|
|
752
|
-
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
753
|
-
...
|
|
754
|
-
|
|
755
|
-
@typing.overload
|
|
756
|
-
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
757
|
-
...
|
|
758
|
-
|
|
759
|
-
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
|
760
|
-
"""
|
|
761
|
-
Specifies a timeout for your step.
|
|
762
|
-
|
|
763
|
-
This decorator is useful if this step may hang indefinitely.
|
|
764
|
-
|
|
765
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
|
766
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
|
767
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
|
768
|
-
|
|
769
|
-
Note that all the values specified in parameters are added together so if you specify
|
|
770
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
Parameters
|
|
774
|
-
----------
|
|
775
|
-
seconds : int, default 0
|
|
776
|
-
Number of seconds to wait prior to timing out.
|
|
777
|
-
minutes : int, default 0
|
|
778
|
-
Number of minutes to wait prior to timing out.
|
|
779
|
-
hours : int, default 0
|
|
780
|
-
Number of hours to wait prior to timing out.
|
|
781
|
-
"""
|
|
782
|
-
...
|
|
783
|
-
|
|
784
|
-
@typing.overload
|
|
785
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
786
|
-
"""
|
|
787
|
-
Decorator prototype for all step decorators. This function gets specialized
|
|
788
|
-
and imported for all decorators types by _import_plugin_decorators().
|
|
789
|
-
"""
|
|
790
|
-
...
|
|
791
|
-
|
|
792
|
-
@typing.overload
|
|
793
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
794
|
-
...
|
|
795
|
-
|
|
796
|
-
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
797
|
-
"""
|
|
798
|
-
Decorator prototype for all step decorators. This function gets specialized
|
|
799
|
-
and imported for all decorators types by _import_plugin_decorators().
|
|
800
|
-
"""
|
|
801
|
-
...
|
|
802
|
-
|
|
803
|
-
@typing.overload
|
|
804
|
-
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
805
|
-
"""
|
|
806
|
-
Internal decorator to support Fast bakery
|
|
807
|
-
"""
|
|
808
|
-
...
|
|
809
|
-
|
|
810
|
-
@typing.overload
|
|
811
|
-
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
812
|
-
...
|
|
813
|
-
|
|
814
|
-
def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
815
|
-
"""
|
|
816
|
-
Internal decorator to support Fast bakery
|
|
817
|
-
"""
|
|
818
|
-
...
|
|
819
|
-
|
|
820
|
-
def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
821
|
-
"""
|
|
822
|
-
Specifies that this step should execute on DGX cloud.
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
Parameters
|
|
826
|
-
----------
|
|
827
|
-
gpu : int
|
|
828
|
-
Number of GPUs to use.
|
|
829
|
-
gpu_type : str
|
|
830
|
-
Type of Nvidia GPU to use.
|
|
464
|
+
cpu : int, default 1
|
|
465
|
+
Number of CPUs required for this step.
|
|
466
|
+
gpu : int, optional, default None
|
|
467
|
+
Number of GPUs required for this step.
|
|
468
|
+
disk : int, optional, default None
|
|
469
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
|
470
|
+
memory : int, default 4096
|
|
471
|
+
Memory size (in MB) required for this step.
|
|
472
|
+
shared_memory : int, optional, default None
|
|
473
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
474
|
+
This parameter maps to the `--shm-size` option in Docker.
|
|
831
475
|
"""
|
|
832
476
|
...
|
|
833
477
|
|
|
@@ -881,8 +525,13 @@ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card
|
|
|
881
525
|
"""
|
|
882
526
|
...
|
|
883
527
|
|
|
884
|
-
def
|
|
528
|
+
def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
885
529
|
"""
|
|
530
|
+
`@nebius_s3_proxy` is a Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
531
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
532
|
+
a Neo Cloud like Nebius. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
|
|
533
|
+
|
|
534
|
+
|
|
886
535
|
Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
|
|
887
536
|
for S3 read and write requests.
|
|
888
537
|
|
|
@@ -941,208 +590,609 @@ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typin
|
|
|
941
590
|
...
|
|
942
591
|
|
|
943
592
|
@typing.overload
|
|
944
|
-
def
|
|
593
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
945
594
|
"""
|
|
946
|
-
Specifies the
|
|
595
|
+
Specifies the number of times the task corresponding
|
|
596
|
+
to a step needs to be retried.
|
|
947
597
|
|
|
948
|
-
|
|
949
|
-
|
|
598
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
|
599
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
|
600
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
|
950
601
|
|
|
951
|
-
|
|
952
|
-
|
|
953
|
-
|
|
954
|
-
```
|
|
955
|
-
or
|
|
956
|
-
```
|
|
957
|
-
python myflow.py run --with kubernetes
|
|
958
|
-
```
|
|
959
|
-
which executes the flow on the desired system using the
|
|
960
|
-
requirements specified in `@resources`.
|
|
602
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
|
603
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
|
604
|
+
ensuring that the flow execution can continue.
|
|
961
605
|
|
|
962
606
|
|
|
963
607
|
Parameters
|
|
964
608
|
----------
|
|
965
|
-
|
|
966
|
-
Number of
|
|
967
|
-
|
|
968
|
-
Number of
|
|
969
|
-
disk : int, optional, default None
|
|
970
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
|
971
|
-
memory : int, default 4096
|
|
972
|
-
Memory size (in MB) required for this step.
|
|
973
|
-
shared_memory : int, optional, default None
|
|
974
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
975
|
-
This parameter maps to the `--shm-size` option in Docker.
|
|
609
|
+
times : int, default 3
|
|
610
|
+
Number of times to retry this task.
|
|
611
|
+
minutes_between_retries : int, default 2
|
|
612
|
+
Number of minutes between retries.
|
|
976
613
|
"""
|
|
977
614
|
...
|
|
978
615
|
|
|
979
616
|
@typing.overload
|
|
980
|
-
def
|
|
617
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
981
618
|
...
|
|
982
619
|
|
|
983
620
|
@typing.overload
|
|
984
|
-
def
|
|
621
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
985
622
|
...
|
|
986
623
|
|
|
987
|
-
def
|
|
624
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
|
988
625
|
"""
|
|
989
|
-
Specifies the
|
|
626
|
+
Specifies the number of times the task corresponding
|
|
627
|
+
to a step needs to be retried.
|
|
990
628
|
|
|
991
|
-
|
|
992
|
-
|
|
629
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
|
630
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
|
631
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
|
993
632
|
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
```
|
|
998
|
-
or
|
|
999
|
-
```
|
|
1000
|
-
python myflow.py run --with kubernetes
|
|
1001
|
-
```
|
|
1002
|
-
which executes the flow on the desired system using the
|
|
1003
|
-
requirements specified in `@resources`.
|
|
633
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
|
634
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
|
635
|
+
ensuring that the flow execution can continue.
|
|
1004
636
|
|
|
1005
637
|
|
|
1006
638
|
Parameters
|
|
1007
639
|
----------
|
|
1008
|
-
|
|
1009
|
-
Number of
|
|
1010
|
-
|
|
1011
|
-
Number of
|
|
1012
|
-
disk : int, optional, default None
|
|
1013
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
|
1014
|
-
memory : int, default 4096
|
|
1015
|
-
Memory size (in MB) required for this step.
|
|
1016
|
-
shared_memory : int, optional, default None
|
|
1017
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
1018
|
-
This parameter maps to the `--shm-size` option in Docker.
|
|
640
|
+
times : int, default 3
|
|
641
|
+
Number of times to retry this task.
|
|
642
|
+
minutes_between_retries : int, default 2
|
|
643
|
+
Number of minutes between retries.
|
|
1019
644
|
"""
|
|
1020
645
|
...
|
|
1021
646
|
|
|
1022
647
|
@typing.overload
|
|
1023
|
-
def
|
|
648
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1024
649
|
"""
|
|
1025
|
-
|
|
1026
|
-
|
|
650
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
|
651
|
+
the execution of a step.
|
|
652
|
+
|
|
653
|
+
|
|
654
|
+
Parameters
|
|
655
|
+
----------
|
|
656
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
657
|
+
List of secret specs, defining how the secrets are to be retrieved
|
|
658
|
+
role : str, optional, default: None
|
|
659
|
+
Role to use for fetching secrets
|
|
1027
660
|
"""
|
|
1028
661
|
...
|
|
1029
662
|
|
|
1030
663
|
@typing.overload
|
|
1031
|
-
def
|
|
664
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1032
665
|
...
|
|
1033
666
|
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
A simple decorator that demonstrates using CardDecoratorInjector
|
|
1037
|
-
to inject a card and render simple markdown content.
|
|
1038
|
-
"""
|
|
667
|
+
@typing.overload
|
|
668
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1039
669
|
...
|
|
1040
670
|
|
|
1041
|
-
|
|
1042
|
-
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
671
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
|
|
1043
672
|
"""
|
|
1044
|
-
Specifies
|
|
1045
|
-
|
|
1046
|
-
Information in this decorator will augment any
|
|
1047
|
-
attributes set in the `@conda_base` flow-level decorator. Hence,
|
|
1048
|
-
you can use `@conda_base` to set packages required by all
|
|
1049
|
-
steps and use `@conda` to specify step-specific overrides.
|
|
673
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
|
674
|
+
the execution of a step.
|
|
1050
675
|
|
|
1051
676
|
|
|
1052
677
|
Parameters
|
|
1053
678
|
----------
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
1059
|
-
python : str, optional, default None
|
|
1060
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1061
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1062
|
-
disabled : bool, default False
|
|
1063
|
-
If set to True, disables @conda.
|
|
679
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
680
|
+
List of secret specs, defining how the secrets are to be retrieved
|
|
681
|
+
role : str, optional, default: None
|
|
682
|
+
Role to use for fetching secrets
|
|
1064
683
|
"""
|
|
1065
684
|
...
|
|
1066
685
|
|
|
1067
686
|
@typing.overload
|
|
1068
|
-
def
|
|
1069
|
-
|
|
1070
|
-
|
|
687
|
+
def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
688
|
+
"""
|
|
689
|
+
Enables loading / saving of models within a step.
|
|
690
|
+
|
|
691
|
+
> Examples
|
|
692
|
+
- Saving Models
|
|
693
|
+
```python
|
|
694
|
+
@model
|
|
695
|
+
@step
|
|
696
|
+
def train(self):
|
|
697
|
+
# current.model.save returns a dictionary reference to the model saved
|
|
698
|
+
self.my_model = current.model.save(
|
|
699
|
+
path_to_my_model,
|
|
700
|
+
label="my_model",
|
|
701
|
+
metadata={
|
|
702
|
+
"epochs": 10,
|
|
703
|
+
"batch-size": 32,
|
|
704
|
+
"learning-rate": 0.001,
|
|
705
|
+
}
|
|
706
|
+
)
|
|
707
|
+
self.next(self.test)
|
|
708
|
+
|
|
709
|
+
@model(load="my_model")
|
|
710
|
+
@step
|
|
711
|
+
def test(self):
|
|
712
|
+
# `current.model.loaded` returns a dictionary of the loaded models
|
|
713
|
+
# where the key is the name of the artifact and the value is the path to the model
|
|
714
|
+
print(os.listdir(current.model.loaded["my_model"]))
|
|
715
|
+
self.next(self.end)
|
|
716
|
+
```
|
|
717
|
+
|
|
718
|
+
- Loading models
|
|
719
|
+
```python
|
|
720
|
+
@step
|
|
721
|
+
def train(self):
|
|
722
|
+
# current.model.load returns the path to the model loaded
|
|
723
|
+
checkpoint_path = current.model.load(
|
|
724
|
+
self.checkpoint_key,
|
|
725
|
+
)
|
|
726
|
+
model_path = current.model.load(
|
|
727
|
+
self.model,
|
|
728
|
+
)
|
|
729
|
+
self.next(self.test)
|
|
730
|
+
```
|
|
731
|
+
|
|
732
|
+
|
|
733
|
+
Parameters
|
|
734
|
+
----------
|
|
735
|
+
load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
|
|
736
|
+
Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
|
|
737
|
+
These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
|
|
738
|
+
If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
|
|
739
|
+
the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
|
|
740
|
+
If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
|
|
741
|
+
|
|
742
|
+
temp_dir_root : str, default: None
|
|
743
|
+
The root directory under which `current.model.loaded` will store loaded models
|
|
744
|
+
"""
|
|
745
|
+
...
|
|
746
|
+
|
|
747
|
+
@typing.overload
|
|
748
|
+
def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
749
|
+
...
|
|
750
|
+
|
|
751
|
+
@typing.overload
|
|
752
|
+
def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
753
|
+
...
|
|
754
|
+
|
|
755
|
+
def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
|
|
756
|
+
"""
|
|
757
|
+
Enables loading / saving of models within a step.
|
|
758
|
+
|
|
759
|
+
> Examples
|
|
760
|
+
- Saving Models
|
|
761
|
+
```python
|
|
762
|
+
@model
|
|
763
|
+
@step
|
|
764
|
+
def train(self):
|
|
765
|
+
# current.model.save returns a dictionary reference to the model saved
|
|
766
|
+
self.my_model = current.model.save(
|
|
767
|
+
path_to_my_model,
|
|
768
|
+
label="my_model",
|
|
769
|
+
metadata={
|
|
770
|
+
"epochs": 10,
|
|
771
|
+
"batch-size": 32,
|
|
772
|
+
"learning-rate": 0.001,
|
|
773
|
+
}
|
|
774
|
+
)
|
|
775
|
+
self.next(self.test)
|
|
776
|
+
|
|
777
|
+
@model(load="my_model")
|
|
778
|
+
@step
|
|
779
|
+
def test(self):
|
|
780
|
+
# `current.model.loaded` returns a dictionary of the loaded models
|
|
781
|
+
# where the key is the name of the artifact and the value is the path to the model
|
|
782
|
+
print(os.listdir(current.model.loaded["my_model"]))
|
|
783
|
+
self.next(self.end)
|
|
784
|
+
```
|
|
785
|
+
|
|
786
|
+
- Loading models
|
|
787
|
+
```python
|
|
788
|
+
@step
|
|
789
|
+
def train(self):
|
|
790
|
+
# current.model.load returns the path to the model loaded
|
|
791
|
+
checkpoint_path = current.model.load(
|
|
792
|
+
self.checkpoint_key,
|
|
793
|
+
)
|
|
794
|
+
model_path = current.model.load(
|
|
795
|
+
self.model,
|
|
796
|
+
)
|
|
797
|
+
self.next(self.test)
|
|
798
|
+
```
|
|
799
|
+
|
|
800
|
+
|
|
801
|
+
Parameters
|
|
802
|
+
----------
|
|
803
|
+
load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
|
|
804
|
+
Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
|
|
805
|
+
These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
|
|
806
|
+
If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
|
|
807
|
+
the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
|
|
808
|
+
If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
|
|
809
|
+
|
|
810
|
+
temp_dir_root : str, default: None
|
|
811
|
+
The root directory under which `current.model.loaded` will store loaded models
|
|
812
|
+
"""
|
|
813
|
+
...
|
|
814
|
+
|
|
815
|
+
def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
816
|
+
"""
|
|
817
|
+
Specifies that this step should execute on DGX cloud.
|
|
818
|
+
|
|
819
|
+
|
|
820
|
+
Parameters
|
|
821
|
+
----------
|
|
822
|
+
gpu : int
|
|
823
|
+
Number of GPUs to use.
|
|
824
|
+
gpu_type : str
|
|
825
|
+
Type of Nvidia GPU to use.
|
|
826
|
+
"""
|
|
827
|
+
...
|
|
828
|
+
|
|
829
|
+
@typing.overload
|
|
830
|
+
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
831
|
+
"""
|
|
832
|
+
Specifies the Conda environment for the step.
|
|
833
|
+
|
|
834
|
+
Information in this decorator will augment any
|
|
835
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
|
836
|
+
you can use `@conda_base` to set packages required by all
|
|
837
|
+
steps and use `@conda` to specify step-specific overrides.
|
|
838
|
+
|
|
839
|
+
|
|
840
|
+
Parameters
|
|
841
|
+
----------
|
|
842
|
+
packages : Dict[str, str], default {}
|
|
843
|
+
Packages to use for this step. The key is the name of the package
|
|
844
|
+
and the value is the version to use.
|
|
845
|
+
libraries : Dict[str, str], default {}
|
|
846
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
847
|
+
python : str, optional, default None
|
|
848
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
849
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
850
|
+
disabled : bool, default False
|
|
851
|
+
If set to True, disables @conda.
|
|
852
|
+
"""
|
|
853
|
+
...
|
|
854
|
+
|
|
855
|
+
@typing.overload
|
|
856
|
+
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
857
|
+
...
|
|
858
|
+
|
|
859
|
+
@typing.overload
|
|
860
|
+
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
861
|
+
...
|
|
862
|
+
|
|
863
|
+
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
|
864
|
+
"""
|
|
865
|
+
Specifies the Conda environment for the step.
|
|
866
|
+
|
|
867
|
+
Information in this decorator will augment any
|
|
868
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
|
869
|
+
you can use `@conda_base` to set packages required by all
|
|
870
|
+
steps and use `@conda` to specify step-specific overrides.
|
|
871
|
+
|
|
872
|
+
|
|
873
|
+
Parameters
|
|
874
|
+
----------
|
|
875
|
+
packages : Dict[str, str], default {}
|
|
876
|
+
Packages to use for this step. The key is the name of the package
|
|
877
|
+
and the value is the version to use.
|
|
878
|
+
libraries : Dict[str, str], default {}
|
|
879
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
880
|
+
python : str, optional, default None
|
|
881
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
882
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
883
|
+
disabled : bool, default False
|
|
884
|
+
If set to True, disables @conda.
|
|
885
|
+
"""
|
|
886
|
+
...
|
|
887
|
+
|
|
888
|
+
@typing.overload
|
|
889
|
+
def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
890
|
+
"""
|
|
891
|
+
Enables checkpointing for a step.
|
|
892
|
+
|
|
893
|
+
> Examples
|
|
894
|
+
|
|
895
|
+
- Saving Checkpoints
|
|
896
|
+
|
|
897
|
+
```python
|
|
898
|
+
@checkpoint
|
|
899
|
+
@step
|
|
900
|
+
def train(self):
|
|
901
|
+
model = create_model(self.parameters, checkpoint_path = None)
|
|
902
|
+
for i in range(self.epochs):
|
|
903
|
+
# some training logic
|
|
904
|
+
loss = model.train(self.dataset)
|
|
905
|
+
if i % 10 == 0:
|
|
906
|
+
model.save(
|
|
907
|
+
current.checkpoint.directory,
|
|
908
|
+
)
|
|
909
|
+
# saves the contents of the `current.checkpoint.directory` as a checkpoint
|
|
910
|
+
# and returns a reference dictionary to the checkpoint saved in the datastore
|
|
911
|
+
self.latest_checkpoint = current.checkpoint.save(
|
|
912
|
+
name="epoch_checkpoint",
|
|
913
|
+
metadata={
|
|
914
|
+
"epoch": i,
|
|
915
|
+
"loss": loss,
|
|
916
|
+
}
|
|
917
|
+
)
|
|
918
|
+
```
|
|
919
|
+
|
|
920
|
+
- Using Loaded Checkpoints
|
|
921
|
+
|
|
922
|
+
```python
|
|
923
|
+
@retry(times=3)
|
|
924
|
+
@checkpoint
|
|
925
|
+
@step
|
|
926
|
+
def train(self):
|
|
927
|
+
# Assume that the task has restarted and the previous attempt of the task
|
|
928
|
+
# saved a checkpoint
|
|
929
|
+
checkpoint_path = None
|
|
930
|
+
if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
|
|
931
|
+
print("Loaded checkpoint from the previous attempt")
|
|
932
|
+
checkpoint_path = current.checkpoint.directory
|
|
933
|
+
|
|
934
|
+
model = create_model(self.parameters, checkpoint_path = checkpoint_path)
|
|
935
|
+
for i in range(self.epochs):
|
|
936
|
+
...
|
|
937
|
+
```
|
|
938
|
+
|
|
939
|
+
|
|
940
|
+
Parameters
|
|
941
|
+
----------
|
|
942
|
+
load_policy : str, default: "fresh"
|
|
943
|
+
The policy for loading the checkpoint. The following policies are supported:
|
|
944
|
+
- "eager": Loads the the latest available checkpoint within the namespace.
|
|
945
|
+
With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
|
|
946
|
+
will be loaded at the start of the task.
|
|
947
|
+
- "none": Do not load any checkpoint
|
|
948
|
+
- "fresh": Loads the lastest checkpoint created within the running Task.
|
|
949
|
+
This mode helps loading checkpoints across various retry attempts of the same task.
|
|
950
|
+
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
|
951
|
+
created within the task will be loaded when the task is retries execution on failure.
|
|
952
|
+
|
|
953
|
+
temp_dir_root : str, default: None
|
|
954
|
+
The root directory under which `current.checkpoint.directory` will be created.
|
|
955
|
+
"""
|
|
956
|
+
...
|
|
957
|
+
|
|
958
|
+
@typing.overload
|
|
959
|
+
def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
960
|
+
...
|
|
961
|
+
|
|
962
|
+
@typing.overload
|
|
963
|
+
def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
964
|
+
...
|
|
965
|
+
|
|
966
|
+
def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
|
|
967
|
+
"""
|
|
968
|
+
Enables checkpointing for a step.
|
|
969
|
+
|
|
970
|
+
> Examples
|
|
971
|
+
|
|
972
|
+
- Saving Checkpoints
|
|
973
|
+
|
|
974
|
+
```python
|
|
975
|
+
@checkpoint
|
|
976
|
+
@step
|
|
977
|
+
def train(self):
|
|
978
|
+
model = create_model(self.parameters, checkpoint_path = None)
|
|
979
|
+
for i in range(self.epochs):
|
|
980
|
+
# some training logic
|
|
981
|
+
loss = model.train(self.dataset)
|
|
982
|
+
if i % 10 == 0:
|
|
983
|
+
model.save(
|
|
984
|
+
current.checkpoint.directory,
|
|
985
|
+
)
|
|
986
|
+
# saves the contents of the `current.checkpoint.directory` as a checkpoint
|
|
987
|
+
# and returns a reference dictionary to the checkpoint saved in the datastore
|
|
988
|
+
self.latest_checkpoint = current.checkpoint.save(
|
|
989
|
+
name="epoch_checkpoint",
|
|
990
|
+
metadata={
|
|
991
|
+
"epoch": i,
|
|
992
|
+
"loss": loss,
|
|
993
|
+
}
|
|
994
|
+
)
|
|
995
|
+
```
|
|
996
|
+
|
|
997
|
+
- Using Loaded Checkpoints
|
|
998
|
+
|
|
999
|
+
```python
|
|
1000
|
+
@retry(times=3)
|
|
1001
|
+
@checkpoint
|
|
1002
|
+
@step
|
|
1003
|
+
def train(self):
|
|
1004
|
+
# Assume that the task has restarted and the previous attempt of the task
|
|
1005
|
+
# saved a checkpoint
|
|
1006
|
+
checkpoint_path = None
|
|
1007
|
+
if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
|
|
1008
|
+
print("Loaded checkpoint from the previous attempt")
|
|
1009
|
+
checkpoint_path = current.checkpoint.directory
|
|
1010
|
+
|
|
1011
|
+
model = create_model(self.parameters, checkpoint_path = checkpoint_path)
|
|
1012
|
+
for i in range(self.epochs):
|
|
1013
|
+
...
|
|
1014
|
+
```
|
|
1015
|
+
|
|
1016
|
+
|
|
1017
|
+
Parameters
|
|
1018
|
+
----------
|
|
1019
|
+
load_policy : str, default: "fresh"
|
|
1020
|
+
The policy for loading the checkpoint. The following policies are supported:
|
|
1021
|
+
- "eager": Loads the the latest available checkpoint within the namespace.
|
|
1022
|
+
With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
|
|
1023
|
+
will be loaded at the start of the task.
|
|
1024
|
+
- "none": Do not load any checkpoint
|
|
1025
|
+
- "fresh": Loads the lastest checkpoint created within the running Task.
|
|
1026
|
+
This mode helps loading checkpoints across various retry attempts of the same task.
|
|
1027
|
+
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
|
1028
|
+
created within the task will be loaded when the task is retries execution on failure.
|
|
1029
|
+
|
|
1030
|
+
temp_dir_root : str, default: None
|
|
1031
|
+
The root directory under which `current.checkpoint.directory` will be created.
|
|
1032
|
+
"""
|
|
1033
|
+
...
|
|
1034
|
+
|
|
1071
1035
|
@typing.overload
|
|
1072
|
-
def
|
|
1036
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1037
|
+
"""
|
|
1038
|
+
Specifies that the step will success under all circumstances.
|
|
1039
|
+
|
|
1040
|
+
The decorator will create an optional artifact, specified by `var`, which
|
|
1041
|
+
contains the exception raised. You can use it to detect the presence
|
|
1042
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
|
1043
|
+
are missing.
|
|
1044
|
+
|
|
1045
|
+
|
|
1046
|
+
Parameters
|
|
1047
|
+
----------
|
|
1048
|
+
var : str, optional, default None
|
|
1049
|
+
Name of the artifact in which to store the caught exception.
|
|
1050
|
+
If not specified, the exception is not stored.
|
|
1051
|
+
print_exception : bool, default True
|
|
1052
|
+
Determines whether or not the exception is printed to
|
|
1053
|
+
stdout when caught.
|
|
1054
|
+
"""
|
|
1073
1055
|
...
|
|
1074
1056
|
|
|
1075
|
-
|
|
1057
|
+
@typing.overload
|
|
1058
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1059
|
+
...
|
|
1060
|
+
|
|
1061
|
+
@typing.overload
|
|
1062
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1063
|
+
...
|
|
1064
|
+
|
|
1065
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
|
1076
1066
|
"""
|
|
1077
|
-
Specifies the
|
|
1067
|
+
Specifies that the step will success under all circumstances.
|
|
1078
1068
|
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
|
|
1082
|
-
|
|
1069
|
+
The decorator will create an optional artifact, specified by `var`, which
|
|
1070
|
+
contains the exception raised. You can use it to detect the presence
|
|
1071
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
|
1072
|
+
are missing.
|
|
1083
1073
|
|
|
1084
1074
|
|
|
1085
1075
|
Parameters
|
|
1086
1076
|
----------
|
|
1087
|
-
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
|
|
1092
|
-
|
|
1093
|
-
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
|
|
1077
|
+
var : str, optional, default None
|
|
1078
|
+
Name of the artifact in which to store the caught exception.
|
|
1079
|
+
If not specified, the exception is not stored.
|
|
1080
|
+
print_exception : bool, default True
|
|
1081
|
+
Determines whether or not the exception is printed to
|
|
1082
|
+
stdout when caught.
|
|
1083
|
+
"""
|
|
1084
|
+
...
|
|
1085
|
+
|
|
1086
|
+
def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1087
|
+
"""
|
|
1088
|
+
This decorator is used to run Ollama APIs as Metaflow task sidecars.
|
|
1089
|
+
|
|
1090
|
+
User code call
|
|
1091
|
+
--------------
|
|
1092
|
+
@ollama(
|
|
1093
|
+
models=[...],
|
|
1094
|
+
...
|
|
1095
|
+
)
|
|
1096
|
+
|
|
1097
|
+
Valid backend options
|
|
1098
|
+
---------------------
|
|
1099
|
+
- 'local': Run as a separate process on the local task machine.
|
|
1100
|
+
- (TODO) 'managed': Outerbounds hosts and selects compute provider.
|
|
1101
|
+
- (TODO) 'remote': Spin up separate instance to serve Ollama models.
|
|
1102
|
+
|
|
1103
|
+
Valid model options
|
|
1104
|
+
-------------------
|
|
1105
|
+
Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
|
|
1106
|
+
|
|
1107
|
+
|
|
1108
|
+
Parameters
|
|
1109
|
+
----------
|
|
1110
|
+
models: list[str]
|
|
1111
|
+
List of Ollama containers running models in sidecars.
|
|
1112
|
+
backend: str
|
|
1113
|
+
Determines where and how to run the Ollama process.
|
|
1114
|
+
force_pull: bool
|
|
1115
|
+
Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
|
|
1116
|
+
cache_update_policy: str
|
|
1117
|
+
Cache update policy: "auto", "force", or "never".
|
|
1118
|
+
force_cache_update: bool
|
|
1119
|
+
Simple override for "force" cache update policy.
|
|
1120
|
+
debug: bool
|
|
1121
|
+
Whether to turn on verbose debugging logs.
|
|
1122
|
+
circuit_breaker_config: dict
|
|
1123
|
+
Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
|
|
1124
|
+
timeout_config: dict
|
|
1125
|
+
Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
|
|
1097
1126
|
"""
|
|
1098
1127
|
...
|
|
1099
1128
|
|
|
1100
1129
|
@typing.overload
|
|
1101
|
-
def
|
|
1130
|
+
def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1102
1131
|
"""
|
|
1103
|
-
|
|
1132
|
+
Decorator prototype for all step decorators. This function gets specialized
|
|
1133
|
+
and imported for all decorators types by _import_plugin_decorators().
|
|
1134
|
+
"""
|
|
1135
|
+
...
|
|
1136
|
+
|
|
1137
|
+
@typing.overload
|
|
1138
|
+
def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1139
|
+
...
|
|
1140
|
+
|
|
1141
|
+
def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
1142
|
+
"""
|
|
1143
|
+
Decorator prototype for all step decorators. This function gets specialized
|
|
1144
|
+
and imported for all decorators types by _import_plugin_decorators().
|
|
1145
|
+
"""
|
|
1146
|
+
...
|
|
1147
|
+
|
|
1148
|
+
@typing.overload
|
|
1149
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1150
|
+
"""
|
|
1151
|
+
Specifies the PyPI packages for the step.
|
|
1104
1152
|
|
|
1105
|
-
|
|
1153
|
+
Information in this decorator will augment any
|
|
1154
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
|
1155
|
+
you can use `@pypi_base` to set packages required by all
|
|
1156
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
1106
1157
|
|
|
1107
1158
|
|
|
1108
1159
|
Parameters
|
|
1109
1160
|
----------
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
timeout : int, default 45
|
|
1117
|
-
Interrupt reporting if it takes more than this many seconds.
|
|
1161
|
+
packages : Dict[str, str], default: {}
|
|
1162
|
+
Packages to use for this step. The key is the name of the package
|
|
1163
|
+
and the value is the version to use.
|
|
1164
|
+
python : str, optional, default: None
|
|
1165
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1166
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1118
1167
|
"""
|
|
1119
1168
|
...
|
|
1120
1169
|
|
|
1121
1170
|
@typing.overload
|
|
1122
|
-
def
|
|
1171
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1123
1172
|
...
|
|
1124
1173
|
|
|
1125
1174
|
@typing.overload
|
|
1126
|
-
def
|
|
1175
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1127
1176
|
...
|
|
1128
1177
|
|
|
1129
|
-
def
|
|
1178
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
|
1130
1179
|
"""
|
|
1131
|
-
|
|
1180
|
+
Specifies the PyPI packages for the step.
|
|
1132
1181
|
|
|
1133
|
-
|
|
1182
|
+
Information in this decorator will augment any
|
|
1183
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
|
1184
|
+
you can use `@pypi_base` to set packages required by all
|
|
1185
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
1134
1186
|
|
|
1135
1187
|
|
|
1136
1188
|
Parameters
|
|
1137
1189
|
----------
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
timeout : int, default 45
|
|
1145
|
-
Interrupt reporting if it takes more than this many seconds.
|
|
1190
|
+
packages : Dict[str, str], default: {}
|
|
1191
|
+
Packages to use for this step. The key is the name of the package
|
|
1192
|
+
and the value is the version to use.
|
|
1193
|
+
python : str, optional, default: None
|
|
1194
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1195
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1146
1196
|
"""
|
|
1147
1197
|
...
|
|
1148
1198
|
|
|
@@ -1225,56 +1275,121 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
|
|
|
1225
1275
|
qos: str, default: Burstable
|
|
1226
1276
|
Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
|
|
1227
1277
|
|
|
1228
|
-
security_context: Dict[str, Any], optional, default None
|
|
1229
|
-
Container security context. Applies to the task container. Allows the following keys:
|
|
1230
|
-
- privileged: bool, optional, default None
|
|
1231
|
-
- allow_privilege_escalation: bool, optional, default None
|
|
1232
|
-
- run_as_user: int, optional, default None
|
|
1233
|
-
- run_as_group: int, optional, default None
|
|
1234
|
-
- run_as_non_root: bool, optional, default None
|
|
1278
|
+
security_context: Dict[str, Any], optional, default None
|
|
1279
|
+
Container security context. Applies to the task container. Allows the following keys:
|
|
1280
|
+
- privileged: bool, optional, default None
|
|
1281
|
+
- allow_privilege_escalation: bool, optional, default None
|
|
1282
|
+
- run_as_user: int, optional, default None
|
|
1283
|
+
- run_as_group: int, optional, default None
|
|
1284
|
+
- run_as_non_root: bool, optional, default None
|
|
1285
|
+
"""
|
|
1286
|
+
...
|
|
1287
|
+
|
|
1288
|
+
@typing.overload
|
|
1289
|
+
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1290
|
+
"""
|
|
1291
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
|
1292
|
+
|
|
1293
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
|
1294
|
+
|
|
1295
|
+
|
|
1296
|
+
Parameters
|
|
1297
|
+
----------
|
|
1298
|
+
type : str, default 'default'
|
|
1299
|
+
Card type.
|
|
1300
|
+
id : str, optional, default None
|
|
1301
|
+
If multiple cards are present, use this id to identify this card.
|
|
1302
|
+
options : Dict[str, Any], default {}
|
|
1303
|
+
Options passed to the card. The contents depend on the card type.
|
|
1304
|
+
timeout : int, default 45
|
|
1305
|
+
Interrupt reporting if it takes more than this many seconds.
|
|
1306
|
+
"""
|
|
1307
|
+
...
|
|
1308
|
+
|
|
1309
|
+
@typing.overload
|
|
1310
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1311
|
+
...
|
|
1312
|
+
|
|
1313
|
+
@typing.overload
|
|
1314
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1315
|
+
...
|
|
1316
|
+
|
|
1317
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
|
1318
|
+
"""
|
|
1319
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
|
1320
|
+
|
|
1321
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
|
1322
|
+
|
|
1323
|
+
|
|
1324
|
+
Parameters
|
|
1325
|
+
----------
|
|
1326
|
+
type : str, default 'default'
|
|
1327
|
+
Card type.
|
|
1328
|
+
id : str, optional, default None
|
|
1329
|
+
If multiple cards are present, use this id to identify this card.
|
|
1330
|
+
options : Dict[str, Any], default {}
|
|
1331
|
+
Options passed to the card. The contents depend on the card type.
|
|
1332
|
+
timeout : int, default 45
|
|
1333
|
+
Interrupt reporting if it takes more than this many seconds.
|
|
1235
1334
|
"""
|
|
1236
1335
|
...
|
|
1237
1336
|
|
|
1238
|
-
def
|
|
1337
|
+
def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1239
1338
|
"""
|
|
1240
|
-
|
|
1339
|
+
Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
|
|
1340
|
+
for S3 read and write requests.
|
|
1241
1341
|
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
models=[...],
|
|
1246
|
-
...
|
|
1247
|
-
)
|
|
1342
|
+
This decorator requires an integration in the Outerbounds platform that
|
|
1343
|
+
points to an external bucket. It affects S3 operations performed via
|
|
1344
|
+
Metaflow's `get_aws_client` and `S3` within a `@step`.
|
|
1248
1345
|
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1346
|
+
Read operations
|
|
1347
|
+
---------------
|
|
1348
|
+
All read operations pass through the proxy. If an object does not already
|
|
1349
|
+
exist in the external bucket, it is cached there. For example, if code reads
|
|
1350
|
+
from buckets `FOO` and `BAR` using the `S3` interface, objects from both
|
|
1351
|
+
buckets are cached in the external bucket.
|
|
1254
1352
|
|
|
1255
|
-
|
|
1256
|
-
|
|
1257
|
-
|
|
1353
|
+
During task execution, all S3‑related read requests are routed through the
|
|
1354
|
+
proxy:
|
|
1355
|
+
- If the object is present in the external object store, the proxy
|
|
1356
|
+
streams it directly from there without accessing the requested origin
|
|
1357
|
+
bucket.
|
|
1358
|
+
- If the object is not present in the external storage, the proxy
|
|
1359
|
+
fetches it from the requested bucket, caches it in the external
|
|
1360
|
+
storage, and streams the response from the origin bucket.
|
|
1361
|
+
|
|
1362
|
+
Warning
|
|
1363
|
+
-------
|
|
1364
|
+
All READ operations (e.g., GetObject, HeadObject) pass through the external
|
|
1365
|
+
bucket regardless of the bucket specified in user code. Even
|
|
1366
|
+
`S3(run=self)` and `S3(s3root="mybucketfoo")` requests go through the
|
|
1367
|
+
external bucket cache.
|
|
1368
|
+
|
|
1369
|
+
Write operations
|
|
1370
|
+
----------------
|
|
1371
|
+
Write behavior is controlled by the `write_mode` parameter, which determines
|
|
1372
|
+
whether writes also persist objects in the cache.
|
|
1373
|
+
|
|
1374
|
+
`write_mode` values:
|
|
1375
|
+
- `origin-and-cache`: objects are written both to the cache and to their
|
|
1376
|
+
intended origin bucket.
|
|
1377
|
+
- `origin`: objects are written only to their intended origin bucket.
|
|
1258
1378
|
|
|
1259
1379
|
|
|
1260
1380
|
Parameters
|
|
1261
1381
|
----------
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
|
|
1265
|
-
|
|
1266
|
-
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
Whether to turn on verbose debugging logs.
|
|
1274
|
-
circuit_breaker_config: dict
|
|
1275
|
-
Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
|
|
1276
|
-
timeout_config: dict
|
|
1277
|
-
Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
|
|
1382
|
+
integration_name : str, optional
|
|
1383
|
+
[Outerbounds integration name](https://docs.outerbounds.com/outerbounds/configuring-secrets/#integrations-view)
|
|
1384
|
+
that holds the configuration for the external, S3‑compatible object
|
|
1385
|
+
storage bucket. If not specified, the only available S3 proxy
|
|
1386
|
+
integration in the namespace is used (fails if multiple exist).
|
|
1387
|
+
write_mode : str, optional
|
|
1388
|
+
Controls whether writes also go to the external bucket.
|
|
1389
|
+
- `origin` (default)
|
|
1390
|
+
- `origin-and-cache`
|
|
1391
|
+
debug : bool, optional
|
|
1392
|
+
Enables debug logging for proxy operations.
|
|
1278
1393
|
"""
|
|
1279
1394
|
...
|
|
1280
1395
|
|
|
@@ -1395,169 +1510,147 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope:
|
|
|
1395
1510
|
"""
|
|
1396
1511
|
...
|
|
1397
1512
|
|
|
1398
|
-
|
|
1513
|
+
@typing.overload
|
|
1514
|
+
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1399
1515
|
"""
|
|
1400
|
-
|
|
1401
|
-
It exists to make it easier for users to know that this decorator should only be used with
|
|
1402
|
-
a Neo Cloud like CoreWeave. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
|
|
1403
|
-
|
|
1404
|
-
|
|
1405
|
-
Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
|
|
1406
|
-
for S3 read and write requests.
|
|
1407
|
-
|
|
1408
|
-
This decorator requires an integration in the Outerbounds platform that
|
|
1409
|
-
points to an external bucket. It affects S3 operations performed via
|
|
1410
|
-
Metaflow's `get_aws_client` and `S3` within a `@step`.
|
|
1411
|
-
|
|
1412
|
-
Read operations
|
|
1413
|
-
---------------
|
|
1414
|
-
All read operations pass through the proxy. If an object does not already
|
|
1415
|
-
exist in the external bucket, it is cached there. For example, if code reads
|
|
1416
|
-
from buckets `FOO` and `BAR` using the `S3` interface, objects from both
|
|
1417
|
-
buckets are cached in the external bucket.
|
|
1418
|
-
|
|
1419
|
-
During task execution, all S3‑related read requests are routed through the
|
|
1420
|
-
proxy:
|
|
1421
|
-
- If the object is present in the external object store, the proxy
|
|
1422
|
-
streams it directly from there without accessing the requested origin
|
|
1423
|
-
bucket.
|
|
1424
|
-
- If the object is not present in the external storage, the proxy
|
|
1425
|
-
fetches it from the requested bucket, caches it in the external
|
|
1426
|
-
storage, and streams the response from the origin bucket.
|
|
1427
|
-
|
|
1428
|
-
Warning
|
|
1429
|
-
-------
|
|
1430
|
-
All READ operations (e.g., GetObject, HeadObject) pass through the external
|
|
1431
|
-
bucket regardless of the bucket specified in user code. Even
|
|
1432
|
-
`S3(run=self)` and `S3(s3root="mybucketfoo")` requests go through the
|
|
1433
|
-
external bucket cache.
|
|
1434
|
-
|
|
1435
|
-
Write operations
|
|
1436
|
-
----------------
|
|
1437
|
-
Write behavior is controlled by the `write_mode` parameter, which determines
|
|
1438
|
-
whether writes also persist objects in the cache.
|
|
1516
|
+
Specifies the flow(s) that this flow depends on.
|
|
1439
1517
|
|
|
1440
|
-
|
|
1441
|
-
|
|
1442
|
-
|
|
1443
|
-
|
|
1518
|
+
```
|
|
1519
|
+
@trigger_on_finish(flow='FooFlow')
|
|
1520
|
+
```
|
|
1521
|
+
or
|
|
1522
|
+
```
|
|
1523
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
|
1524
|
+
```
|
|
1525
|
+
This decorator respects the @project decorator and triggers the flow
|
|
1526
|
+
when upstream runs within the same namespace complete successfully
|
|
1444
1527
|
|
|
1528
|
+
Additionally, you can specify project aware upstream flow dependencies
|
|
1529
|
+
by specifying the fully qualified project_flow_name.
|
|
1530
|
+
```
|
|
1531
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
|
1532
|
+
```
|
|
1533
|
+
or
|
|
1534
|
+
```
|
|
1535
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
|
1536
|
+
```
|
|
1445
1537
|
|
|
1446
|
-
|
|
1447
|
-
|
|
1448
|
-
|
|
1449
|
-
|
|
1450
|
-
|
|
1451
|
-
storage bucket. If not specified, the only available S3 proxy
|
|
1452
|
-
integration in the namespace is used (fails if multiple exist).
|
|
1453
|
-
write_mode : str, optional
|
|
1454
|
-
Controls whether writes also go to the external bucket.
|
|
1455
|
-
- `origin` (default)
|
|
1456
|
-
- `origin-and-cache`
|
|
1457
|
-
debug : bool, optional
|
|
1458
|
-
Enables debug logging for proxy operations.
|
|
1459
|
-
"""
|
|
1460
|
-
...
|
|
1461
|
-
|
|
1462
|
-
@typing.overload
|
|
1463
|
-
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1464
|
-
"""
|
|
1465
|
-
Specifies the PyPI packages for the step.
|
|
1538
|
+
You can also specify just the project or project branch (other values will be
|
|
1539
|
+
inferred from the current project or project branch):
|
|
1540
|
+
```
|
|
1541
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
|
1542
|
+
```
|
|
1466
1543
|
|
|
1467
|
-
|
|
1468
|
-
|
|
1469
|
-
|
|
1470
|
-
|
|
1544
|
+
Note that `branch` is typically one of:
|
|
1545
|
+
- `prod`
|
|
1546
|
+
- `user.bob`
|
|
1547
|
+
- `test.my_experiment`
|
|
1548
|
+
- `prod.staging`
|
|
1471
1549
|
|
|
1472
1550
|
|
|
1473
1551
|
Parameters
|
|
1474
1552
|
----------
|
|
1475
|
-
|
|
1476
|
-
|
|
1477
|
-
|
|
1478
|
-
|
|
1479
|
-
|
|
1480
|
-
|
|
1553
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
|
1554
|
+
Upstream flow dependency for this flow.
|
|
1555
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
|
1556
|
+
Upstream flow dependencies for this flow.
|
|
1557
|
+
options : Dict[str, Any], default {}
|
|
1558
|
+
Backend-specific configuration for tuning eventing behavior.
|
|
1481
1559
|
"""
|
|
1482
1560
|
...
|
|
1483
1561
|
|
|
1484
1562
|
@typing.overload
|
|
1485
|
-
def
|
|
1486
|
-
...
|
|
1487
|
-
|
|
1488
|
-
@typing.overload
|
|
1489
|
-
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1563
|
+
def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1490
1564
|
...
|
|
1491
1565
|
|
|
1492
|
-
def
|
|
1566
|
+
def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
|
1493
1567
|
"""
|
|
1494
|
-
Specifies the
|
|
1568
|
+
Specifies the flow(s) that this flow depends on.
|
|
1495
1569
|
|
|
1496
|
-
|
|
1497
|
-
|
|
1498
|
-
|
|
1499
|
-
|
|
1570
|
+
```
|
|
1571
|
+
@trigger_on_finish(flow='FooFlow')
|
|
1572
|
+
```
|
|
1573
|
+
or
|
|
1574
|
+
```
|
|
1575
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
|
1576
|
+
```
|
|
1577
|
+
This decorator respects the @project decorator and triggers the flow
|
|
1578
|
+
when upstream runs within the same namespace complete successfully
|
|
1579
|
+
|
|
1580
|
+
Additionally, you can specify project aware upstream flow dependencies
|
|
1581
|
+
by specifying the fully qualified project_flow_name.
|
|
1582
|
+
```
|
|
1583
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
|
1584
|
+
```
|
|
1585
|
+
or
|
|
1586
|
+
```
|
|
1587
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
|
1588
|
+
```
|
|
1500
1589
|
|
|
1590
|
+
You can also specify just the project or project branch (other values will be
|
|
1591
|
+
inferred from the current project or project branch):
|
|
1592
|
+
```
|
|
1593
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
|
1594
|
+
```
|
|
1501
1595
|
|
|
1502
|
-
|
|
1503
|
-
|
|
1504
|
-
|
|
1505
|
-
|
|
1506
|
-
|
|
1507
|
-
python : str, optional, default: None
|
|
1508
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1509
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1510
|
-
"""
|
|
1511
|
-
...
|
|
1512
|
-
|
|
1513
|
-
@typing.overload
|
|
1514
|
-
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1515
|
-
"""
|
|
1516
|
-
Specifies the times when the flow should be run when running on a
|
|
1517
|
-
production scheduler.
|
|
1596
|
+
Note that `branch` is typically one of:
|
|
1597
|
+
- `prod`
|
|
1598
|
+
- `user.bob`
|
|
1599
|
+
- `test.my_experiment`
|
|
1600
|
+
- `prod.staging`
|
|
1518
1601
|
|
|
1519
1602
|
|
|
1520
1603
|
Parameters
|
|
1521
1604
|
----------
|
|
1522
|
-
|
|
1523
|
-
|
|
1524
|
-
|
|
1525
|
-
|
|
1526
|
-
|
|
1527
|
-
|
|
1528
|
-
cron : str, optional, default None
|
|
1529
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
|
1530
|
-
specified by this expression.
|
|
1531
|
-
timezone : str, optional, default None
|
|
1532
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
|
1533
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
|
1605
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
|
1606
|
+
Upstream flow dependency for this flow.
|
|
1607
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
|
1608
|
+
Upstream flow dependencies for this flow.
|
|
1609
|
+
options : Dict[str, Any], default {}
|
|
1610
|
+
Backend-specific configuration for tuning eventing behavior.
|
|
1534
1611
|
"""
|
|
1535
1612
|
...
|
|
1536
1613
|
|
|
1537
|
-
|
|
1538
|
-
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1539
|
-
...
|
|
1540
|
-
|
|
1541
|
-
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
|
1614
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1542
1615
|
"""
|
|
1543
|
-
|
|
1544
|
-
|
|
1616
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
|
1617
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
|
1618
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
|
1619
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
|
1620
|
+
starts only after all sensors finish.
|
|
1545
1621
|
|
|
1546
1622
|
|
|
1547
1623
|
Parameters
|
|
1548
1624
|
----------
|
|
1549
|
-
|
|
1550
|
-
|
|
1551
|
-
|
|
1552
|
-
|
|
1553
|
-
|
|
1554
|
-
|
|
1555
|
-
|
|
1556
|
-
|
|
1557
|
-
|
|
1558
|
-
|
|
1559
|
-
|
|
1560
|
-
|
|
1625
|
+
timeout : int
|
|
1626
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
|
1627
|
+
poke_interval : int
|
|
1628
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
|
1629
|
+
mode : str
|
|
1630
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
|
1631
|
+
exponential_backoff : bool
|
|
1632
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
|
1633
|
+
pool : str
|
|
1634
|
+
the slot pool this task should run in,
|
|
1635
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
|
1636
|
+
soft_fail : bool
|
|
1637
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
|
1638
|
+
name : str
|
|
1639
|
+
Name of the sensor on Airflow
|
|
1640
|
+
description : str
|
|
1641
|
+
Description of sensor in the Airflow UI
|
|
1642
|
+
bucket_key : Union[str, List[str]]
|
|
1643
|
+
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
|
1644
|
+
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
|
1645
|
+
bucket_name : str
|
|
1646
|
+
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
|
1647
|
+
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
|
1648
|
+
wildcard_match : bool
|
|
1649
|
+
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
|
1650
|
+
aws_conn_id : str
|
|
1651
|
+
a reference to the s3 connection on Airflow. (Default: None)
|
|
1652
|
+
verify : bool
|
|
1653
|
+
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
|
1561
1654
|
"""
|
|
1562
1655
|
...
|
|
1563
1656
|
|
|
@@ -1647,130 +1740,139 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
|
|
|
1647
1740
|
"""
|
|
1648
1741
|
...
|
|
1649
1742
|
|
|
1650
|
-
|
|
1743
|
+
@typing.overload
|
|
1744
|
+
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1651
1745
|
"""
|
|
1652
|
-
|
|
1653
|
-
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
|
1746
|
+
Specifies the event(s) that this flow depends on.
|
|
1654
1747
|
|
|
1748
|
+
```
|
|
1749
|
+
@trigger(event='foo')
|
|
1750
|
+
```
|
|
1751
|
+
or
|
|
1752
|
+
```
|
|
1753
|
+
@trigger(events=['foo', 'bar'])
|
|
1754
|
+
```
|
|
1655
1755
|
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
|
|
1659
|
-
|
|
1660
|
-
|
|
1661
|
-
|
|
1662
|
-
|
|
1663
|
-
|
|
1664
|
-
|
|
1665
|
-
|
|
1666
|
-
|
|
1667
|
-
|
|
1668
|
-
|
|
1669
|
-
|
|
1670
|
-
|
|
1671
|
-
|
|
1672
|
-
|
|
1673
|
-
|
|
1674
|
-
|
|
1675
|
-
external_dag_id : str
|
|
1676
|
-
The dag_id that contains the task you want to wait for.
|
|
1677
|
-
external_task_ids : List[str]
|
|
1678
|
-
The list of task_ids that you want to wait for.
|
|
1679
|
-
If None (default value) the sensor waits for the DAG. (Default: None)
|
|
1680
|
-
allowed_states : List[str]
|
|
1681
|
-
Iterable of allowed states, (Default: ['success'])
|
|
1682
|
-
failed_states : List[str]
|
|
1683
|
-
Iterable of failed or dis-allowed states. (Default: None)
|
|
1684
|
-
execution_delta : datetime.timedelta
|
|
1685
|
-
time difference with the previous execution to look at,
|
|
1686
|
-
the default is the same logical date as the current task or DAG. (Default: None)
|
|
1687
|
-
check_existence: bool
|
|
1688
|
-
Set to True to check if the external task exists or check if
|
|
1689
|
-
the DAG to wait for exists. (Default: True)
|
|
1690
|
-
"""
|
|
1691
|
-
...
|
|
1692
|
-
|
|
1693
|
-
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1694
|
-
"""
|
|
1695
|
-
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
|
1696
|
-
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
|
1697
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
|
1698
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
|
1699
|
-
starts only after all sensors finish.
|
|
1756
|
+
Additionally, you can specify the parameter mappings
|
|
1757
|
+
to map event payload to Metaflow parameters for the flow.
|
|
1758
|
+
```
|
|
1759
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
|
1760
|
+
```
|
|
1761
|
+
or
|
|
1762
|
+
```
|
|
1763
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
|
1764
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
|
1765
|
+
```
|
|
1766
|
+
|
|
1767
|
+
'parameters' can also be a list of strings and tuples like so:
|
|
1768
|
+
```
|
|
1769
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
|
1770
|
+
```
|
|
1771
|
+
This is equivalent to:
|
|
1772
|
+
```
|
|
1773
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
1774
|
+
```
|
|
1700
1775
|
|
|
1701
1776
|
|
|
1702
1777
|
Parameters
|
|
1703
1778
|
----------
|
|
1704
|
-
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
|
|
1708
|
-
|
|
1709
|
-
|
|
1710
|
-
exponential_backoff : bool
|
|
1711
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
|
1712
|
-
pool : str
|
|
1713
|
-
the slot pool this task should run in,
|
|
1714
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
|
1715
|
-
soft_fail : bool
|
|
1716
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
|
1717
|
-
name : str
|
|
1718
|
-
Name of the sensor on Airflow
|
|
1719
|
-
description : str
|
|
1720
|
-
Description of sensor in the Airflow UI
|
|
1721
|
-
bucket_key : Union[str, List[str]]
|
|
1722
|
-
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
|
1723
|
-
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
|
1724
|
-
bucket_name : str
|
|
1725
|
-
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
|
1726
|
-
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
|
1727
|
-
wildcard_match : bool
|
|
1728
|
-
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
|
1729
|
-
aws_conn_id : str
|
|
1730
|
-
a reference to the s3 connection on Airflow. (Default: None)
|
|
1731
|
-
verify : bool
|
|
1732
|
-
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
|
1779
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
|
1780
|
+
Event dependency for this flow.
|
|
1781
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
|
1782
|
+
Events dependency for this flow.
|
|
1783
|
+
options : Dict[str, Any], default {}
|
|
1784
|
+
Backend-specific configuration for tuning eventing behavior.
|
|
1733
1785
|
"""
|
|
1734
1786
|
...
|
|
1735
1787
|
|
|
1736
1788
|
@typing.overload
|
|
1737
|
-
def
|
|
1789
|
+
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1790
|
+
...
|
|
1791
|
+
|
|
1792
|
+
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
|
1738
1793
|
"""
|
|
1739
|
-
Specifies the
|
|
1794
|
+
Specifies the event(s) that this flow depends on.
|
|
1795
|
+
|
|
1796
|
+
```
|
|
1797
|
+
@trigger(event='foo')
|
|
1798
|
+
```
|
|
1799
|
+
or
|
|
1800
|
+
```
|
|
1801
|
+
@trigger(events=['foo', 'bar'])
|
|
1802
|
+
```
|
|
1803
|
+
|
|
1804
|
+
Additionally, you can specify the parameter mappings
|
|
1805
|
+
to map event payload to Metaflow parameters for the flow.
|
|
1806
|
+
```
|
|
1807
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
|
1808
|
+
```
|
|
1809
|
+
or
|
|
1810
|
+
```
|
|
1811
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
|
1812
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
|
1813
|
+
```
|
|
1814
|
+
|
|
1815
|
+
'parameters' can also be a list of strings and tuples like so:
|
|
1816
|
+
```
|
|
1817
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
|
1818
|
+
```
|
|
1819
|
+
This is equivalent to:
|
|
1820
|
+
```
|
|
1821
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
1822
|
+
```
|
|
1740
1823
|
|
|
1741
|
-
Use `@pypi_base` to set common packages required by all
|
|
1742
|
-
steps and use `@pypi` to specify step-specific overrides.
|
|
1743
1824
|
|
|
1744
1825
|
Parameters
|
|
1745
1826
|
----------
|
|
1746
|
-
|
|
1747
|
-
|
|
1748
|
-
|
|
1749
|
-
|
|
1750
|
-
|
|
1751
|
-
|
|
1827
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
|
1828
|
+
Event dependency for this flow.
|
|
1829
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
|
1830
|
+
Events dependency for this flow.
|
|
1831
|
+
options : Dict[str, Any], default {}
|
|
1832
|
+
Backend-specific configuration for tuning eventing behavior.
|
|
1752
1833
|
"""
|
|
1753
1834
|
...
|
|
1754
1835
|
|
|
1755
|
-
|
|
1756
|
-
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1757
|
-
...
|
|
1758
|
-
|
|
1759
|
-
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
|
1836
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1760
1837
|
"""
|
|
1761
|
-
|
|
1838
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
|
1839
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
|
1762
1840
|
|
|
1763
|
-
Use `@pypi_base` to set common packages required by all
|
|
1764
|
-
steps and use `@pypi` to specify step-specific overrides.
|
|
1765
1841
|
|
|
1766
1842
|
Parameters
|
|
1767
1843
|
----------
|
|
1768
|
-
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
|
|
1844
|
+
timeout : int
|
|
1845
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
|
1846
|
+
poke_interval : int
|
|
1847
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
|
1848
|
+
mode : str
|
|
1849
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
|
1850
|
+
exponential_backoff : bool
|
|
1851
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
|
1852
|
+
pool : str
|
|
1853
|
+
the slot pool this task should run in,
|
|
1854
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
|
1855
|
+
soft_fail : bool
|
|
1856
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
|
1857
|
+
name : str
|
|
1858
|
+
Name of the sensor on Airflow
|
|
1859
|
+
description : str
|
|
1860
|
+
Description of sensor in the Airflow UI
|
|
1861
|
+
external_dag_id : str
|
|
1862
|
+
The dag_id that contains the task you want to wait for.
|
|
1863
|
+
external_task_ids : List[str]
|
|
1864
|
+
The list of task_ids that you want to wait for.
|
|
1865
|
+
If None (default value) the sensor waits for the DAG. (Default: None)
|
|
1866
|
+
allowed_states : List[str]
|
|
1867
|
+
Iterable of allowed states, (Default: ['success'])
|
|
1868
|
+
failed_states : List[str]
|
|
1869
|
+
Iterable of failed or dis-allowed states. (Default: None)
|
|
1870
|
+
execution_delta : datetime.timedelta
|
|
1871
|
+
time difference with the previous execution to look at,
|
|
1872
|
+
the default is the same logical date as the current task or DAG. (Default: None)
|
|
1873
|
+
check_existence: bool
|
|
1874
|
+
Set to True to check if the external task exists or check if
|
|
1875
|
+
the DAG to wait for exists. (Default: True)
|
|
1774
1876
|
"""
|
|
1775
1877
|
...
|
|
1776
1878
|
|
|
@@ -1889,196 +1991,94 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
|
|
|
1889
1991
|
...
|
|
1890
1992
|
|
|
1891
1993
|
@typing.overload
|
|
1892
|
-
def
|
|
1994
|
+
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1893
1995
|
"""
|
|
1894
|
-
Specifies the
|
|
1895
|
-
|
|
1896
|
-
```
|
|
1897
|
-
@trigger_on_finish(flow='FooFlow')
|
|
1898
|
-
```
|
|
1899
|
-
or
|
|
1900
|
-
```
|
|
1901
|
-
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
|
1902
|
-
```
|
|
1903
|
-
This decorator respects the @project decorator and triggers the flow
|
|
1904
|
-
when upstream runs within the same namespace complete successfully
|
|
1905
|
-
|
|
1906
|
-
Additionally, you can specify project aware upstream flow dependencies
|
|
1907
|
-
by specifying the fully qualified project_flow_name.
|
|
1908
|
-
```
|
|
1909
|
-
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
|
1910
|
-
```
|
|
1911
|
-
or
|
|
1912
|
-
```
|
|
1913
|
-
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
|
1914
|
-
```
|
|
1915
|
-
|
|
1916
|
-
You can also specify just the project or project branch (other values will be
|
|
1917
|
-
inferred from the current project or project branch):
|
|
1918
|
-
```
|
|
1919
|
-
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
|
1920
|
-
```
|
|
1921
|
-
|
|
1922
|
-
Note that `branch` is typically one of:
|
|
1923
|
-
- `prod`
|
|
1924
|
-
- `user.bob`
|
|
1925
|
-
- `test.my_experiment`
|
|
1926
|
-
- `prod.staging`
|
|
1996
|
+
Specifies the PyPI packages for all steps of the flow.
|
|
1927
1997
|
|
|
1998
|
+
Use `@pypi_base` to set common packages required by all
|
|
1999
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
1928
2000
|
|
|
1929
2001
|
Parameters
|
|
1930
2002
|
----------
|
|
1931
|
-
|
|
1932
|
-
|
|
1933
|
-
|
|
1934
|
-
|
|
1935
|
-
|
|
1936
|
-
|
|
2003
|
+
packages : Dict[str, str], default: {}
|
|
2004
|
+
Packages to use for this flow. The key is the name of the package
|
|
2005
|
+
and the value is the version to use.
|
|
2006
|
+
python : str, optional, default: None
|
|
2007
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
2008
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1937
2009
|
"""
|
|
1938
2010
|
...
|
|
1939
2011
|
|
|
1940
2012
|
@typing.overload
|
|
1941
|
-
def
|
|
2013
|
+
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1942
2014
|
...
|
|
1943
2015
|
|
|
1944
|
-
def
|
|
2016
|
+
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
|
1945
2017
|
"""
|
|
1946
|
-
Specifies the
|
|
1947
|
-
|
|
1948
|
-
```
|
|
1949
|
-
@trigger_on_finish(flow='FooFlow')
|
|
1950
|
-
```
|
|
1951
|
-
or
|
|
1952
|
-
```
|
|
1953
|
-
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
|
1954
|
-
```
|
|
1955
|
-
This decorator respects the @project decorator and triggers the flow
|
|
1956
|
-
when upstream runs within the same namespace complete successfully
|
|
1957
|
-
|
|
1958
|
-
Additionally, you can specify project aware upstream flow dependencies
|
|
1959
|
-
by specifying the fully qualified project_flow_name.
|
|
1960
|
-
```
|
|
1961
|
-
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
|
1962
|
-
```
|
|
1963
|
-
or
|
|
1964
|
-
```
|
|
1965
|
-
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
|
1966
|
-
```
|
|
1967
|
-
|
|
1968
|
-
You can also specify just the project or project branch (other values will be
|
|
1969
|
-
inferred from the current project or project branch):
|
|
1970
|
-
```
|
|
1971
|
-
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
|
1972
|
-
```
|
|
1973
|
-
|
|
1974
|
-
Note that `branch` is typically one of:
|
|
1975
|
-
- `prod`
|
|
1976
|
-
- `user.bob`
|
|
1977
|
-
- `test.my_experiment`
|
|
1978
|
-
- `prod.staging`
|
|
2018
|
+
Specifies the PyPI packages for all steps of the flow.
|
|
1979
2019
|
|
|
2020
|
+
Use `@pypi_base` to set common packages required by all
|
|
2021
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
1980
2022
|
|
|
1981
2023
|
Parameters
|
|
1982
2024
|
----------
|
|
1983
|
-
|
|
1984
|
-
|
|
1985
|
-
|
|
1986
|
-
|
|
1987
|
-
|
|
1988
|
-
|
|
2025
|
+
packages : Dict[str, str], default: {}
|
|
2026
|
+
Packages to use for this flow. The key is the name of the package
|
|
2027
|
+
and the value is the version to use.
|
|
2028
|
+
python : str, optional, default: None
|
|
2029
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
2030
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1989
2031
|
"""
|
|
1990
2032
|
...
|
|
1991
2033
|
|
|
1992
2034
|
@typing.overload
|
|
1993
|
-
def
|
|
2035
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1994
2036
|
"""
|
|
1995
|
-
Specifies the
|
|
1996
|
-
|
|
1997
|
-
```
|
|
1998
|
-
@trigger(event='foo')
|
|
1999
|
-
```
|
|
2000
|
-
or
|
|
2001
|
-
```
|
|
2002
|
-
@trigger(events=['foo', 'bar'])
|
|
2003
|
-
```
|
|
2004
|
-
|
|
2005
|
-
Additionally, you can specify the parameter mappings
|
|
2006
|
-
to map event payload to Metaflow parameters for the flow.
|
|
2007
|
-
```
|
|
2008
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
|
2009
|
-
```
|
|
2010
|
-
or
|
|
2011
|
-
```
|
|
2012
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
|
2013
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
|
2014
|
-
```
|
|
2015
|
-
|
|
2016
|
-
'parameters' can also be a list of strings and tuples like so:
|
|
2017
|
-
```
|
|
2018
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
|
2019
|
-
```
|
|
2020
|
-
This is equivalent to:
|
|
2021
|
-
```
|
|
2022
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
2023
|
-
```
|
|
2037
|
+
Specifies the times when the flow should be run when running on a
|
|
2038
|
+
production scheduler.
|
|
2024
2039
|
|
|
2025
2040
|
|
|
2026
2041
|
Parameters
|
|
2027
2042
|
----------
|
|
2028
|
-
|
|
2029
|
-
|
|
2030
|
-
|
|
2031
|
-
|
|
2032
|
-
|
|
2033
|
-
|
|
2043
|
+
hourly : bool, default False
|
|
2044
|
+
Run the workflow hourly.
|
|
2045
|
+
daily : bool, default True
|
|
2046
|
+
Run the workflow daily.
|
|
2047
|
+
weekly : bool, default False
|
|
2048
|
+
Run the workflow weekly.
|
|
2049
|
+
cron : str, optional, default None
|
|
2050
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
|
2051
|
+
specified by this expression.
|
|
2052
|
+
timezone : str, optional, default None
|
|
2053
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
|
2054
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
|
2034
2055
|
"""
|
|
2035
2056
|
...
|
|
2036
2057
|
|
|
2037
2058
|
@typing.overload
|
|
2038
|
-
def
|
|
2059
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
2039
2060
|
...
|
|
2040
2061
|
|
|
2041
|
-
def
|
|
2062
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
|
2042
2063
|
"""
|
|
2043
|
-
Specifies the
|
|
2044
|
-
|
|
2045
|
-
```
|
|
2046
|
-
@trigger(event='foo')
|
|
2047
|
-
```
|
|
2048
|
-
or
|
|
2049
|
-
```
|
|
2050
|
-
@trigger(events=['foo', 'bar'])
|
|
2051
|
-
```
|
|
2052
|
-
|
|
2053
|
-
Additionally, you can specify the parameter mappings
|
|
2054
|
-
to map event payload to Metaflow parameters for the flow.
|
|
2055
|
-
```
|
|
2056
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
|
2057
|
-
```
|
|
2058
|
-
or
|
|
2059
|
-
```
|
|
2060
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
|
2061
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
|
2062
|
-
```
|
|
2063
|
-
|
|
2064
|
-
'parameters' can also be a list of strings and tuples like so:
|
|
2065
|
-
```
|
|
2066
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
|
2067
|
-
```
|
|
2068
|
-
This is equivalent to:
|
|
2069
|
-
```
|
|
2070
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
2071
|
-
```
|
|
2064
|
+
Specifies the times when the flow should be run when running on a
|
|
2065
|
+
production scheduler.
|
|
2072
2066
|
|
|
2073
2067
|
|
|
2074
2068
|
Parameters
|
|
2075
2069
|
----------
|
|
2076
|
-
|
|
2077
|
-
|
|
2078
|
-
|
|
2079
|
-
|
|
2080
|
-
|
|
2081
|
-
|
|
2070
|
+
hourly : bool, default False
|
|
2071
|
+
Run the workflow hourly.
|
|
2072
|
+
daily : bool, default True
|
|
2073
|
+
Run the workflow daily.
|
|
2074
|
+
weekly : bool, default False
|
|
2075
|
+
Run the workflow weekly.
|
|
2076
|
+
cron : str, optional, default None
|
|
2077
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
|
2078
|
+
specified by this expression.
|
|
2079
|
+
timezone : str, optional, default None
|
|
2080
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
|
2081
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
|
2082
2082
|
"""
|
|
2083
2083
|
...
|
|
2084
2084
|
|