ob-metaflow-stubs 6.0.10.2rc0__py2.py3-none-any.whl → 6.0.10.4__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ob-metaflow-stubs might be problematic. Click here for more details.
- metaflow-stubs/__init__.pyi +777 -731
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +6 -6
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/events.pyi +3 -3
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +5 -5
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/meta_files.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +3 -3
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +92 -48
- metaflow-stubs/metaflow_git.pyi +2 -2
- metaflow-stubs/mf_extensions/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +5 -5
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +10 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +6 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +5 -5
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +129 -14
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +4 -4
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +4 -4
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +5 -11
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +4 -4
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +4 -4
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +5 -5
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +4 -4
- metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -3
- metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/ob_internal.pyi +2 -3
- metaflow-stubs/packaging_sys/__init__.pyi +7 -7
- metaflow-stubs/packaging_sys/backend.pyi +4 -4
- metaflow-stubs/packaging_sys/distribution_support.pyi +5 -5
- metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
- metaflow-stubs/packaging_sys/utils.pyi +2 -2
- metaflow-stubs/packaging_sys/v1.pyi +3 -3
- metaflow-stubs/parameters.pyi +4 -4
- metaflow-stubs/plugins/__init__.pyi +12 -12
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +3 -3
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +3 -3
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
- metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
- metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
- metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/perimeters.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +6 -6
- metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
- metaflow-stubs/plugins/secrets/utils.pyi +2 -2
- metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
- metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
- metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
- metaflow-stubs/profilers/__init__.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +6 -6
- metaflow-stubs/runner/deployer_impl.pyi +2 -2
- metaflow-stubs/runner/metaflow_runner.pyi +4 -4
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +3 -3
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +2 -2
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_options.pyi +4 -4
- metaflow-stubs/user_configs/config_parameters.pyi +7 -7
- metaflow-stubs/user_decorators/__init__.pyi +2 -2
- metaflow-stubs/user_decorators/common.pyi +2 -2
- metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
- metaflow-stubs/user_decorators/mutable_step.pyi +4 -4
- metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
- metaflow-stubs/user_decorators/user_step_decorator.pyi +4 -4
- {ob_metaflow_stubs-6.0.10.2rc0.dist-info → ob_metaflow_stubs-6.0.10.4.dist-info}/METADATA +1 -1
- ob_metaflow_stubs-6.0.10.4.dist-info/RECORD +262 -0
- ob_metaflow_stubs-6.0.10.2rc0.dist-info/RECORD +0 -262
- {ob_metaflow_stubs-6.0.10.2rc0.dist-info → ob_metaflow_stubs-6.0.10.4.dist-info}/WHEEL +0 -0
- {ob_metaflow_stubs-6.0.10.2rc0.dist-info → ob_metaflow_stubs-6.0.10.4.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
######################################################################################################
|
|
2
2
|
# Auto-generated Metaflow stub file #
|
|
3
|
-
# MF version: 2.18.
|
|
4
|
-
# Generated on 2025-09-
|
|
3
|
+
# MF version: 2.18.5.1+obcheckpoint(0.2.6);ob(v1) #
|
|
4
|
+
# Generated on 2025-09-16T23:23:08.891416 #
|
|
5
5
|
######################################################################################################
|
|
6
6
|
|
|
7
7
|
from __future__ import annotations
|
|
@@ -39,8 +39,8 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
|
|
|
39
39
|
from .user_decorators.user_step_decorator import StepMutator as StepMutator
|
|
40
40
|
from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
|
|
41
41
|
from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
|
|
42
|
-
from . import tuple_util as tuple_util
|
|
43
42
|
from . import cards as cards
|
|
43
|
+
from . import tuple_util as tuple_util
|
|
44
44
|
from . import metaflow_git as metaflow_git
|
|
45
45
|
from . import events as events
|
|
46
46
|
from . import runner as runner
|
|
@@ -48,9 +48,9 @@ from . import plugins as plugins
|
|
|
48
48
|
from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
|
|
49
49
|
from . import includefile as includefile
|
|
50
50
|
from .includefile import IncludeFile as IncludeFile
|
|
51
|
+
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
|
51
52
|
from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
|
|
52
53
|
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
|
53
|
-
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
|
54
54
|
from . import client as client
|
|
55
55
|
from .client.core import namespace as namespace
|
|
56
56
|
from .client.core import get_namespace as get_namespace
|
|
@@ -83,7 +83,6 @@ from .mf_extensions.outerbounds.plugins.checkpoint_datastores.nebius import nebi
|
|
|
83
83
|
from .mf_extensions.outerbounds.plugins.checkpoint_datastores.coreweave import coreweave_checkpoints as coreweave_checkpoints
|
|
84
84
|
from .mf_extensions.outerbounds.plugins.aws.assume_role_decorator import assume_role as assume_role
|
|
85
85
|
from .mf_extensions.outerbounds.plugins.apps.core.deployer import AppDeployer as AppDeployer
|
|
86
|
-
from .mf_extensions.outerbounds.plugins.apps.core.deployer import DeployedApp as DeployedApp
|
|
87
86
|
from . import system as system
|
|
88
87
|
from . import cli_components as cli_components
|
|
89
88
|
from . import pylint_wrapper as pylint_wrapper
|
|
@@ -169,65 +168,23 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
|
169
168
|
...
|
|
170
169
|
|
|
171
170
|
@typing.overload
|
|
172
|
-
def
|
|
171
|
+
def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
173
172
|
"""
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
Parameters
|
|
178
|
-
----------
|
|
179
|
-
vars : Dict[str, str], default {}
|
|
180
|
-
Dictionary of environment variables to set.
|
|
173
|
+
CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
174
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
175
|
+
a Neo Cloud like CoreWeave.
|
|
181
176
|
"""
|
|
182
177
|
...
|
|
183
178
|
|
|
184
179
|
@typing.overload
|
|
185
|
-
def
|
|
186
|
-
...
|
|
187
|
-
|
|
188
|
-
@typing.overload
|
|
189
|
-
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
190
|
-
...
|
|
191
|
-
|
|
192
|
-
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
|
193
|
-
"""
|
|
194
|
-
Specifies environment variables to be set prior to the execution of a step.
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
Parameters
|
|
198
|
-
----------
|
|
199
|
-
vars : Dict[str, str], default {}
|
|
200
|
-
Dictionary of environment variables to set.
|
|
201
|
-
"""
|
|
202
|
-
...
|
|
203
|
-
|
|
204
|
-
def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
205
|
-
"""
|
|
206
|
-
Specifies that this step should execute on DGX cloud.
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
Parameters
|
|
210
|
-
----------
|
|
211
|
-
gpu : int
|
|
212
|
-
Number of GPUs to use.
|
|
213
|
-
gpu_type : str
|
|
214
|
-
Type of Nvidia GPU to use.
|
|
215
|
-
queue_timeout : int
|
|
216
|
-
Time to keep the job in NVCF's queue.
|
|
217
|
-
"""
|
|
180
|
+
def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
218
181
|
...
|
|
219
182
|
|
|
220
|
-
def
|
|
183
|
+
def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
221
184
|
"""
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
Parameters
|
|
226
|
-
----------
|
|
227
|
-
gpu : int
|
|
228
|
-
Number of GPUs to use.
|
|
229
|
-
gpu_type : str
|
|
230
|
-
Type of Nvidia GPU to use.
|
|
185
|
+
CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
186
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
187
|
+
a Neo Cloud like CoreWeave.
|
|
231
188
|
"""
|
|
232
189
|
...
|
|
233
190
|
|
|
@@ -248,133 +205,136 @@ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepF
|
|
|
248
205
|
"""
|
|
249
206
|
...
|
|
250
207
|
|
|
251
|
-
|
|
208
|
+
@typing.overload
|
|
209
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
252
210
|
"""
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
Parameters
|
|
257
|
-
----------
|
|
258
|
-
integration_name : str, optional
|
|
259
|
-
Name of the S3 proxy integration. If not specified, will use the only
|
|
260
|
-
available S3 proxy integration in the namespace (fails if multiple exist).
|
|
261
|
-
write_mode : str, optional
|
|
262
|
-
The desired behavior during write operations to target (origin) S3 bucket.
|
|
263
|
-
allowed options are:
|
|
264
|
-
"origin-and-cache" -> write to both the target S3 bucket and local object
|
|
265
|
-
storage
|
|
266
|
-
"origin" -> only write to the target S3 bucket
|
|
267
|
-
"cache" -> only write to the object storage service used for caching
|
|
268
|
-
debug : bool, optional
|
|
269
|
-
Enable debug logging for proxy operations.
|
|
211
|
+
Decorator prototype for all step decorators. This function gets specialized
|
|
212
|
+
and imported for all decorators types by _import_plugin_decorators().
|
|
270
213
|
"""
|
|
271
214
|
...
|
|
272
215
|
|
|
273
216
|
@typing.overload
|
|
274
|
-
def
|
|
217
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
218
|
+
...
|
|
219
|
+
|
|
220
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
275
221
|
"""
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
222
|
+
Decorator prototype for all step decorators. This function gets specialized
|
|
223
|
+
and imported for all decorators types by _import_plugin_decorators().
|
|
224
|
+
"""
|
|
225
|
+
...
|
|
226
|
+
|
|
227
|
+
@typing.overload
|
|
228
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
229
|
+
"""
|
|
230
|
+
Specifies the number of times the task corresponding
|
|
231
|
+
to a step needs to be retried.
|
|
279
232
|
|
|
280
|
-
This
|
|
281
|
-
|
|
282
|
-
|
|
233
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
|
234
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
|
235
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
|
283
236
|
|
|
284
|
-
|
|
285
|
-
|
|
237
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
|
238
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
|
239
|
+
ensuring that the flow execution can continue.
|
|
286
240
|
|
|
287
241
|
|
|
288
242
|
Parameters
|
|
289
243
|
----------
|
|
290
|
-
|
|
291
|
-
Number of
|
|
292
|
-
|
|
293
|
-
Number of minutes
|
|
294
|
-
hours : int, default 0
|
|
295
|
-
Number of hours to wait prior to timing out.
|
|
244
|
+
times : int, default 3
|
|
245
|
+
Number of times to retry this task.
|
|
246
|
+
minutes_between_retries : int, default 2
|
|
247
|
+
Number of minutes between retries.
|
|
296
248
|
"""
|
|
297
249
|
...
|
|
298
250
|
|
|
299
251
|
@typing.overload
|
|
300
|
-
def
|
|
252
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
301
253
|
...
|
|
302
254
|
|
|
303
255
|
@typing.overload
|
|
304
|
-
def
|
|
256
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
305
257
|
...
|
|
306
258
|
|
|
307
|
-
def
|
|
259
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
|
308
260
|
"""
|
|
309
|
-
Specifies
|
|
310
|
-
|
|
311
|
-
This decorator is useful if this step may hang indefinitely.
|
|
261
|
+
Specifies the number of times the task corresponding
|
|
262
|
+
to a step needs to be retried.
|
|
312
263
|
|
|
313
|
-
This
|
|
314
|
-
|
|
315
|
-
|
|
264
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
|
265
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
|
266
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
|
316
267
|
|
|
317
|
-
|
|
318
|
-
|
|
268
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
|
269
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
|
270
|
+
ensuring that the flow execution can continue.
|
|
319
271
|
|
|
320
272
|
|
|
321
273
|
Parameters
|
|
322
274
|
----------
|
|
323
|
-
|
|
324
|
-
Number of
|
|
325
|
-
|
|
326
|
-
Number of minutes
|
|
327
|
-
hours : int, default 0
|
|
328
|
-
Number of hours to wait prior to timing out.
|
|
275
|
+
times : int, default 3
|
|
276
|
+
Number of times to retry this task.
|
|
277
|
+
minutes_between_retries : int, default 2
|
|
278
|
+
Number of minutes between retries.
|
|
329
279
|
"""
|
|
330
280
|
...
|
|
331
281
|
|
|
332
282
|
@typing.overload
|
|
333
|
-
def
|
|
283
|
+
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
334
284
|
"""
|
|
335
|
-
|
|
336
|
-
|
|
285
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
|
286
|
+
|
|
287
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
|
337
288
|
|
|
338
289
|
|
|
339
290
|
Parameters
|
|
340
291
|
----------
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
292
|
+
type : str, default 'default'
|
|
293
|
+
Card type.
|
|
294
|
+
id : str, optional, default None
|
|
295
|
+
If multiple cards are present, use this id to identify this card.
|
|
296
|
+
options : Dict[str, Any], default {}
|
|
297
|
+
Options passed to the card. The contents depend on the card type.
|
|
298
|
+
timeout : int, default 45
|
|
299
|
+
Interrupt reporting if it takes more than this many seconds.
|
|
345
300
|
"""
|
|
346
301
|
...
|
|
347
302
|
|
|
348
303
|
@typing.overload
|
|
349
|
-
def
|
|
304
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
350
305
|
...
|
|
351
306
|
|
|
352
307
|
@typing.overload
|
|
353
|
-
def
|
|
308
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
354
309
|
...
|
|
355
310
|
|
|
356
|
-
def
|
|
311
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
|
357
312
|
"""
|
|
358
|
-
|
|
359
|
-
|
|
313
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
|
314
|
+
|
|
315
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
|
360
316
|
|
|
361
317
|
|
|
362
318
|
Parameters
|
|
363
319
|
----------
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
320
|
+
type : str, default 'default'
|
|
321
|
+
Card type.
|
|
322
|
+
id : str, optional, default None
|
|
323
|
+
If multiple cards are present, use this id to identify this card.
|
|
324
|
+
options : Dict[str, Any], default {}
|
|
325
|
+
Options passed to the card. The contents depend on the card type.
|
|
326
|
+
timeout : int, default 45
|
|
327
|
+
Interrupt reporting if it takes more than this many seconds.
|
|
368
328
|
"""
|
|
369
329
|
...
|
|
370
330
|
|
|
371
|
-
def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
331
|
+
def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
372
332
|
"""
|
|
373
|
-
Decorator that helps cache, version and store models/datasets from
|
|
333
|
+
Decorator that helps cache, version, and store models/datasets from the Hugging Face Hub.
|
|
374
334
|
|
|
375
335
|
> Examples
|
|
376
336
|
|
|
377
|
-
**Usage: creating references
|
|
337
|
+
**Usage: creating references to models from the Hugging Face Hub that may be loaded in downstream steps**
|
|
378
338
|
```python
|
|
379
339
|
@huggingface_hub
|
|
380
340
|
@step
|
|
@@ -393,7 +353,23 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
|
|
|
393
353
|
self.next(self.train)
|
|
394
354
|
```
|
|
395
355
|
|
|
396
|
-
**Usage: loading models
|
|
356
|
+
**Usage: explicitly loading models at runtime from the Hugging Face Hub or from cache (from Metaflow's datastore)**
|
|
357
|
+
```python
|
|
358
|
+
@huggingface_hub
|
|
359
|
+
@step
|
|
360
|
+
def run_training(self):
|
|
361
|
+
# Temporary directory (auto-cleaned on exit)
|
|
362
|
+
with current.huggingface_hub.load(
|
|
363
|
+
repo_id="google-bert/bert-base-uncased",
|
|
364
|
+
allow_patterns=["*.bin"],
|
|
365
|
+
) as local_path:
|
|
366
|
+
# Use files under local_path
|
|
367
|
+
train_model(local_path)
|
|
368
|
+
...
|
|
369
|
+
|
|
370
|
+
```
|
|
371
|
+
|
|
372
|
+
**Usage: loading models directly from the Hugging Face Hub or from cache (from Metaflow's datastore)**
|
|
397
373
|
```python
|
|
398
374
|
@huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
|
|
399
375
|
@step
|
|
@@ -402,7 +378,7 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
|
|
|
402
378
|
```
|
|
403
379
|
|
|
404
380
|
```python
|
|
405
|
-
@huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
|
|
381
|
+
@huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora", "/my-lora-directory")])
|
|
406
382
|
@step
|
|
407
383
|
def finetune_model(self):
|
|
408
384
|
path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
|
|
@@ -433,6 +409,37 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
|
|
|
433
409
|
temp_dir_root : str, optional
|
|
434
410
|
The root directory that will hold the temporary directory where objects will be downloaded.
|
|
435
411
|
|
|
412
|
+
cache_scope : str, optional
|
|
413
|
+
The scope of the cache. Can be `checkpoint` / `flow` / `global`.
|
|
414
|
+
|
|
415
|
+
- `checkpoint` (default): All repos are stored like objects saved by `@checkpoint`.
|
|
416
|
+
i.e., the cached path is derived from the namespace, flow, step, and Metaflow foreach iteration.
|
|
417
|
+
Any repo downloaded under this scope will only be retrieved from the cache when the step runs under the same namespace in the same flow (at the same foreach index).
|
|
418
|
+
|
|
419
|
+
- `flow`: All repos are cached under the flow, regardless of namespace.
|
|
420
|
+
i.e., the cached path is derived solely from the flow name.
|
|
421
|
+
When to use this mode:
|
|
422
|
+
- Multiple users are executing the same flow and want shared access to the repos cached by the decorator.
|
|
423
|
+
- Multiple versions of a flow are deployed, all needing access to the same repos cached by the decorator.
|
|
424
|
+
|
|
425
|
+
- `global`: All repos are cached under a globally static path.
|
|
426
|
+
i.e., the base path of the cache is static and all repos are stored under it.
|
|
427
|
+
When to use this mode:
|
|
428
|
+
- All repos from the Hugging Face Hub need to be shared by users across all flow executions.
|
|
429
|
+
|
|
430
|
+
Each caching scope comes with its own trade-offs:
|
|
431
|
+
- `checkpoint`:
|
|
432
|
+
- Has explicit control over when caches are populated (controlled by the same flow that has the `@huggingface_hub` decorator) but ends up hitting the Hugging Face Hub more often if there are many users/namespaces/steps.
|
|
433
|
+
- Since objects are written on a `namespace/flow/step` basis, the blast radius of a bad checkpoint is limited to a particular flow in a namespace.
|
|
434
|
+
- `flow`:
|
|
435
|
+
- Has less control over when caches are populated (can be written by any execution instance of a flow from any namespace) but results in more cache hits.
|
|
436
|
+
- The blast radius of a bad checkpoint is limited to all runs of a particular flow.
|
|
437
|
+
- It doesn't promote cache reuse across flows.
|
|
438
|
+
- `global`:
|
|
439
|
+
- Has no control over when caches are populated (can be written by any flow execution) but has the highest cache hit rate.
|
|
440
|
+
- It promotes cache reuse across flows.
|
|
441
|
+
- The blast radius of a bad checkpoint spans every flow that could be using a particular repo.
|
|
442
|
+
|
|
436
443
|
load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
|
|
437
444
|
The list of repos (models/datasets) to load.
|
|
438
445
|
|
|
@@ -448,252 +455,171 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
|
|
|
448
455
|
"""
|
|
449
456
|
...
|
|
450
457
|
|
|
451
|
-
|
|
452
|
-
def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
453
|
-
"""
|
|
454
|
-
CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
455
|
-
It exists to make it easier for users to know that this decorator should only be used with
|
|
456
|
-
a Neo Cloud like CoreWeave.
|
|
457
|
-
"""
|
|
458
|
-
...
|
|
459
|
-
|
|
460
|
-
@typing.overload
|
|
461
|
-
def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
462
|
-
...
|
|
463
|
-
|
|
464
|
-
def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
465
|
-
"""
|
|
466
|
-
CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
467
|
-
It exists to make it easier for users to know that this decorator should only be used with
|
|
468
|
-
a Neo Cloud like CoreWeave.
|
|
469
|
-
"""
|
|
470
|
-
...
|
|
471
|
-
|
|
472
|
-
@typing.overload
|
|
473
|
-
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
458
|
+
def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
474
459
|
"""
|
|
475
|
-
|
|
460
|
+
This decorator is used to run vllm APIs as Metaflow task sidecars.
|
|
476
461
|
|
|
477
|
-
|
|
462
|
+
User code call
|
|
463
|
+
--------------
|
|
464
|
+
@vllm(
|
|
465
|
+
model="...",
|
|
466
|
+
...
|
|
467
|
+
)
|
|
468
|
+
|
|
469
|
+
Valid backend options
|
|
470
|
+
---------------------
|
|
471
|
+
- 'local': Run as a separate process on the local task machine.
|
|
472
|
+
|
|
473
|
+
Valid model options
|
|
474
|
+
-------------------
|
|
475
|
+
Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
|
|
476
|
+
|
|
477
|
+
NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
|
|
478
|
+
If you need multiple models, you must create multiple @vllm decorators.
|
|
478
479
|
|
|
479
480
|
|
|
480
481
|
Parameters
|
|
481
482
|
----------
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
483
|
+
model: str
|
|
484
|
+
HuggingFace model identifier to be served by vLLM.
|
|
485
|
+
backend: str
|
|
486
|
+
Determines where and how to run the vLLM process.
|
|
487
|
+
openai_api_server: bool
|
|
488
|
+
Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
|
|
489
|
+
Default is False (uses native engine).
|
|
490
|
+
Set to True for backward compatibility with existing code.
|
|
491
|
+
debug: bool
|
|
492
|
+
Whether to turn on verbose debugging logs.
|
|
493
|
+
card_refresh_interval: int
|
|
494
|
+
Interval in seconds for refreshing the vLLM status card.
|
|
495
|
+
Only used when openai_api_server=True.
|
|
496
|
+
max_retries: int
|
|
497
|
+
Maximum number of retries checking for vLLM server startup.
|
|
498
|
+
Only used when openai_api_server=True.
|
|
499
|
+
retry_alert_frequency: int
|
|
500
|
+
Frequency of alert logs for vLLM server startup retries.
|
|
501
|
+
Only used when openai_api_server=True.
|
|
502
|
+
engine_args : dict
|
|
503
|
+
Additional keyword arguments to pass to the vLLM engine.
|
|
504
|
+
For example, `tensor_parallel_size=2`.
|
|
490
505
|
"""
|
|
491
506
|
...
|
|
492
507
|
|
|
493
508
|
@typing.overload
|
|
494
|
-
def
|
|
495
|
-
...
|
|
496
|
-
|
|
497
|
-
@typing.overload
|
|
498
|
-
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
499
|
-
...
|
|
500
|
-
|
|
501
|
-
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
|
509
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
502
510
|
"""
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
Note that you may add multiple `@card` decorators in a step with different parameters.
|
|
511
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
|
512
|
+
the execution of a step.
|
|
506
513
|
|
|
507
514
|
|
|
508
515
|
Parameters
|
|
509
516
|
----------
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
options : Dict[str, Any], default {}
|
|
515
|
-
Options passed to the card. The contents depend on the card type.
|
|
516
|
-
timeout : int, default 45
|
|
517
|
-
Interrupt reporting if it takes more than this many seconds.
|
|
517
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
518
|
+
List of secret specs, defining how the secrets are to be retrieved
|
|
519
|
+
role : str, optional, default: None
|
|
520
|
+
Role to use for fetching secrets
|
|
518
521
|
"""
|
|
519
522
|
...
|
|
520
523
|
|
|
521
524
|
@typing.overload
|
|
522
|
-
def
|
|
525
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
526
|
+
...
|
|
527
|
+
|
|
528
|
+
@typing.overload
|
|
529
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
530
|
+
...
|
|
531
|
+
|
|
532
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
|
|
523
533
|
"""
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
> Examples
|
|
527
|
-
- Saving Models
|
|
528
|
-
```python
|
|
529
|
-
@model
|
|
530
|
-
@step
|
|
531
|
-
def train(self):
|
|
532
|
-
# current.model.save returns a dictionary reference to the model saved
|
|
533
|
-
self.my_model = current.model.save(
|
|
534
|
-
path_to_my_model,
|
|
535
|
-
label="my_model",
|
|
536
|
-
metadata={
|
|
537
|
-
"epochs": 10,
|
|
538
|
-
"batch-size": 32,
|
|
539
|
-
"learning-rate": 0.001,
|
|
540
|
-
}
|
|
541
|
-
)
|
|
542
|
-
self.next(self.test)
|
|
543
|
-
|
|
544
|
-
@model(load="my_model")
|
|
545
|
-
@step
|
|
546
|
-
def test(self):
|
|
547
|
-
# `current.model.loaded` returns a dictionary of the loaded models
|
|
548
|
-
# where the key is the name of the artifact and the value is the path to the model
|
|
549
|
-
print(os.listdir(current.model.loaded["my_model"]))
|
|
550
|
-
self.next(self.end)
|
|
551
|
-
```
|
|
552
|
-
|
|
553
|
-
- Loading models
|
|
554
|
-
```python
|
|
555
|
-
@step
|
|
556
|
-
def train(self):
|
|
557
|
-
# current.model.load returns the path to the model loaded
|
|
558
|
-
checkpoint_path = current.model.load(
|
|
559
|
-
self.checkpoint_key,
|
|
560
|
-
)
|
|
561
|
-
model_path = current.model.load(
|
|
562
|
-
self.model,
|
|
563
|
-
)
|
|
564
|
-
self.next(self.test)
|
|
565
|
-
```
|
|
534
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
|
535
|
+
the execution of a step.
|
|
566
536
|
|
|
567
537
|
|
|
568
538
|
Parameters
|
|
569
539
|
----------
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
|
|
575
|
-
If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
|
|
576
|
-
|
|
577
|
-
temp_dir_root : str, default: None
|
|
578
|
-
The root directory under which `current.model.loaded` will store loaded models
|
|
540
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
541
|
+
List of secret specs, defining how the secrets are to be retrieved
|
|
542
|
+
role : str, optional, default: None
|
|
543
|
+
Role to use for fetching secrets
|
|
579
544
|
"""
|
|
580
545
|
...
|
|
581
546
|
|
|
582
|
-
|
|
583
|
-
def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
584
|
-
...
|
|
585
|
-
|
|
586
|
-
@typing.overload
|
|
587
|
-
def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
588
|
-
...
|
|
589
|
-
|
|
590
|
-
def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
|
|
547
|
+
def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
591
548
|
"""
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
> Examples
|
|
595
|
-
- Saving Models
|
|
596
|
-
```python
|
|
597
|
-
@model
|
|
598
|
-
@step
|
|
599
|
-
def train(self):
|
|
600
|
-
# current.model.save returns a dictionary reference to the model saved
|
|
601
|
-
self.my_model = current.model.save(
|
|
602
|
-
path_to_my_model,
|
|
603
|
-
label="my_model",
|
|
604
|
-
metadata={
|
|
605
|
-
"epochs": 10,
|
|
606
|
-
"batch-size": 32,
|
|
607
|
-
"learning-rate": 0.001,
|
|
608
|
-
}
|
|
609
|
-
)
|
|
610
|
-
self.next(self.test)
|
|
611
|
-
|
|
612
|
-
@model(load="my_model")
|
|
613
|
-
@step
|
|
614
|
-
def test(self):
|
|
615
|
-
# `current.model.loaded` returns a dictionary of the loaded models
|
|
616
|
-
# where the key is the name of the artifact and the value is the path to the model
|
|
617
|
-
print(os.listdir(current.model.loaded["my_model"]))
|
|
618
|
-
self.next(self.end)
|
|
619
|
-
```
|
|
620
|
-
|
|
621
|
-
- Loading models
|
|
622
|
-
```python
|
|
623
|
-
@step
|
|
624
|
-
def train(self):
|
|
625
|
-
# current.model.load returns the path to the model loaded
|
|
626
|
-
checkpoint_path = current.model.load(
|
|
627
|
-
self.checkpoint_key,
|
|
628
|
-
)
|
|
629
|
-
model_path = current.model.load(
|
|
630
|
-
self.model,
|
|
631
|
-
)
|
|
632
|
-
self.next(self.test)
|
|
633
|
-
```
|
|
549
|
+
S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
634
550
|
|
|
635
551
|
|
|
636
552
|
Parameters
|
|
637
553
|
----------
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
554
|
+
integration_name : str, optional
|
|
555
|
+
Name of the S3 proxy integration. If not specified, will use the only
|
|
556
|
+
available S3 proxy integration in the namespace (fails if multiple exist).
|
|
557
|
+
write_mode : str, optional
|
|
558
|
+
The desired behavior during write operations to target (origin) S3 bucket.
|
|
559
|
+
allowed options are:
|
|
560
|
+
"origin-and-cache" -> write to both the target S3 bucket and local object
|
|
561
|
+
storage
|
|
562
|
+
"origin" -> only write to the target S3 bucket
|
|
563
|
+
"cache" -> only write to the object storage service used for caching
|
|
564
|
+
debug : bool, optional
|
|
565
|
+
Enable debug logging for proxy operations.
|
|
647
566
|
"""
|
|
648
567
|
...
|
|
649
568
|
|
|
650
|
-
def
|
|
569
|
+
def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
651
570
|
"""
|
|
652
|
-
This decorator is used to run
|
|
571
|
+
This decorator is used to run Ollama APIs as Metaflow task sidecars.
|
|
653
572
|
|
|
654
573
|
User code call
|
|
655
574
|
--------------
|
|
656
|
-
@
|
|
657
|
-
|
|
575
|
+
@ollama(
|
|
576
|
+
models=[...],
|
|
658
577
|
...
|
|
659
578
|
)
|
|
660
579
|
|
|
661
580
|
Valid backend options
|
|
662
581
|
---------------------
|
|
663
582
|
- 'local': Run as a separate process on the local task machine.
|
|
583
|
+
- (TODO) 'managed': Outerbounds hosts and selects compute provider.
|
|
584
|
+
- (TODO) 'remote': Spin up separate instance to serve Ollama models.
|
|
664
585
|
|
|
665
586
|
Valid model options
|
|
666
587
|
-------------------
|
|
667
|
-
Any
|
|
668
|
-
|
|
669
|
-
NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
|
|
670
|
-
If you need multiple models, you must create multiple @vllm decorators.
|
|
588
|
+
Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
|
|
671
589
|
|
|
672
590
|
|
|
673
591
|
Parameters
|
|
674
592
|
----------
|
|
675
|
-
|
|
676
|
-
|
|
593
|
+
models: list[str]
|
|
594
|
+
List of Ollama containers running models in sidecars.
|
|
677
595
|
backend: str
|
|
678
|
-
Determines where and how to run the
|
|
679
|
-
|
|
680
|
-
Whether to
|
|
681
|
-
|
|
682
|
-
|
|
596
|
+
Determines where and how to run the Ollama process.
|
|
597
|
+
force_pull: bool
|
|
598
|
+
Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
|
|
599
|
+
cache_update_policy: str
|
|
600
|
+
Cache update policy: "auto", "force", or "never".
|
|
601
|
+
force_cache_update: bool
|
|
602
|
+
Simple override for "force" cache update policy.
|
|
683
603
|
debug: bool
|
|
684
604
|
Whether to turn on verbose debugging logs.
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
605
|
+
circuit_breaker_config: dict
|
|
606
|
+
Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
|
|
607
|
+
timeout_config: dict
|
|
608
|
+
Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
|
|
609
|
+
"""
|
|
610
|
+
...
|
|
611
|
+
|
|
612
|
+
def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
613
|
+
"""
|
|
614
|
+
Specifies that this step should execute on DGX cloud.
|
|
615
|
+
|
|
616
|
+
|
|
617
|
+
Parameters
|
|
618
|
+
----------
|
|
619
|
+
gpu : int
|
|
620
|
+
Number of GPUs to use.
|
|
621
|
+
gpu_type : str
|
|
622
|
+
Type of Nvidia GPU to use.
|
|
697
623
|
"""
|
|
698
624
|
...
|
|
699
625
|
|
|
@@ -716,151 +642,24 @@ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
|
|
|
716
642
|
"""
|
|
717
643
|
...
|
|
718
644
|
|
|
719
|
-
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
720
|
-
"""
|
|
721
|
-
Specifies that this step should execute on Kubernetes.
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
Parameters
|
|
725
|
-
----------
|
|
726
|
-
cpu : int, default 1
|
|
727
|
-
Number of CPUs required for this step. If `@resources` is
|
|
728
|
-
also present, the maximum value from all decorators is used.
|
|
729
|
-
memory : int, default 4096
|
|
730
|
-
Memory size (in MB) required for this step. If
|
|
731
|
-
`@resources` is also present, the maximum value from all decorators is
|
|
732
|
-
used.
|
|
733
|
-
disk : int, default 10240
|
|
734
|
-
Disk size (in MB) required for this step. If
|
|
735
|
-
`@resources` is also present, the maximum value from all decorators is
|
|
736
|
-
used.
|
|
737
|
-
image : str, optional, default None
|
|
738
|
-
Docker image to use when launching on Kubernetes. If not specified, and
|
|
739
|
-
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
|
740
|
-
not, a default Docker image mapping to the current version of Python is used.
|
|
741
|
-
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
|
742
|
-
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
|
743
|
-
image_pull_secrets: List[str], default []
|
|
744
|
-
The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
|
|
745
|
-
Kubernetes image pull secrets to use when pulling container images
|
|
746
|
-
in Kubernetes.
|
|
747
|
-
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
|
748
|
-
Kubernetes service account to use when launching pod in Kubernetes.
|
|
749
|
-
secrets : List[str], optional, default None
|
|
750
|
-
Kubernetes secrets to use when launching pod in Kubernetes. These
|
|
751
|
-
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
|
752
|
-
in Metaflow configuration.
|
|
753
|
-
node_selector: Union[Dict[str,str], str], optional, default None
|
|
754
|
-
Kubernetes node selector(s) to apply to the pod running the task.
|
|
755
|
-
Can be passed in as a comma separated string of values e.g.
|
|
756
|
-
'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
|
|
757
|
-
{'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
|
|
758
|
-
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
|
759
|
-
Kubernetes namespace to use when launching pod in Kubernetes.
|
|
760
|
-
gpu : int, optional, default None
|
|
761
|
-
Number of GPUs required for this step. A value of zero implies that
|
|
762
|
-
the scheduled node should not have GPUs.
|
|
763
|
-
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
|
764
|
-
The vendor of the GPUs to be used for this step.
|
|
765
|
-
tolerations : List[Dict[str,str]], default []
|
|
766
|
-
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
|
767
|
-
Kubernetes tolerations to use when launching pod in Kubernetes.
|
|
768
|
-
labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
|
|
769
|
-
Kubernetes labels to use when launching pod in Kubernetes.
|
|
770
|
-
annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
|
|
771
|
-
Kubernetes annotations to use when launching pod in Kubernetes.
|
|
772
|
-
use_tmpfs : bool, default False
|
|
773
|
-
This enables an explicit tmpfs mount for this step.
|
|
774
|
-
tmpfs_tempdir : bool, default True
|
|
775
|
-
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
|
776
|
-
tmpfs_size : int, optional, default: None
|
|
777
|
-
The value for the size (in MiB) of the tmpfs mount for this step.
|
|
778
|
-
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
|
779
|
-
memory allocated for this step.
|
|
780
|
-
tmpfs_path : str, optional, default /metaflow_temp
|
|
781
|
-
Path to tmpfs mount for this step.
|
|
782
|
-
persistent_volume_claims : Dict[str, str], optional, default None
|
|
783
|
-
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
|
784
|
-
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
|
785
|
-
shared_memory: int, optional
|
|
786
|
-
Shared memory size (in MiB) required for this step
|
|
787
|
-
port: int, optional
|
|
788
|
-
Port number to specify in the Kubernetes job object
|
|
789
|
-
compute_pool : str, optional, default None
|
|
790
|
-
Compute pool to be used for for this step.
|
|
791
|
-
If not specified, any accessible compute pool within the perimeter is used.
|
|
792
|
-
hostname_resolution_timeout: int, default 10 * 60
|
|
793
|
-
Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
|
|
794
|
-
Only applicable when @parallel is used.
|
|
795
|
-
qos: str, default: Burstable
|
|
796
|
-
Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
|
|
797
|
-
|
|
798
|
-
security_context: Dict[str, Any], optional, default None
|
|
799
|
-
Container security context. Applies to the task container. Allows the following keys:
|
|
800
|
-
- privileged: bool, optional, default None
|
|
801
|
-
- allow_privilege_escalation: bool, optional, default None
|
|
802
|
-
- run_as_user: int, optional, default None
|
|
803
|
-
- run_as_group: int, optional, default None
|
|
804
|
-
- run_as_non_root: bool, optional, default None
|
|
805
|
-
"""
|
|
806
|
-
...
|
|
807
|
-
|
|
808
645
|
@typing.overload
|
|
809
|
-
def
|
|
646
|
+
def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
810
647
|
"""
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
attributes set in the `@conda_base` flow-level decorator. Hence,
|
|
815
|
-
you can use `@conda_base` to set packages required by all
|
|
816
|
-
steps and use `@conda` to specify step-specific overrides.
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
Parameters
|
|
820
|
-
----------
|
|
821
|
-
packages : Dict[str, str], default {}
|
|
822
|
-
Packages to use for this step. The key is the name of the package
|
|
823
|
-
and the value is the version to use.
|
|
824
|
-
libraries : Dict[str, str], default {}
|
|
825
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
826
|
-
python : str, optional, default None
|
|
827
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
828
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
829
|
-
disabled : bool, default False
|
|
830
|
-
If set to True, disables @conda.
|
|
648
|
+
Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
649
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
650
|
+
a Neo Cloud like Nebius.
|
|
831
651
|
"""
|
|
832
652
|
...
|
|
833
653
|
|
|
834
654
|
@typing.overload
|
|
835
|
-
def
|
|
836
|
-
...
|
|
837
|
-
|
|
838
|
-
@typing.overload
|
|
839
|
-
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
655
|
+
def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
840
656
|
...
|
|
841
657
|
|
|
842
|
-
def
|
|
658
|
+
def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
843
659
|
"""
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
attributes set in the `@conda_base` flow-level decorator. Hence,
|
|
848
|
-
you can use `@conda_base` to set packages required by all
|
|
849
|
-
steps and use `@conda` to specify step-specific overrides.
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
Parameters
|
|
853
|
-
----------
|
|
854
|
-
packages : Dict[str, str], default {}
|
|
855
|
-
Packages to use for this step. The key is the name of the package
|
|
856
|
-
and the value is the version to use.
|
|
857
|
-
libraries : Dict[str, str], default {}
|
|
858
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
859
|
-
python : str, optional, default None
|
|
860
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
861
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
862
|
-
disabled : bool, default False
|
|
863
|
-
If set to True, disables @conda.
|
|
660
|
+
Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
661
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
662
|
+
a Neo Cloud like Nebius.
|
|
864
663
|
"""
|
|
865
664
|
...
|
|
866
665
|
|
|
@@ -915,25 +714,6 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
|
|
|
915
714
|
"""
|
|
916
715
|
...
|
|
917
716
|
|
|
918
|
-
@typing.overload
|
|
919
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
920
|
-
"""
|
|
921
|
-
Decorator prototype for all step decorators. This function gets specialized
|
|
922
|
-
and imported for all decorators types by _import_plugin_decorators().
|
|
923
|
-
"""
|
|
924
|
-
...
|
|
925
|
-
|
|
926
|
-
@typing.overload
|
|
927
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
928
|
-
...
|
|
929
|
-
|
|
930
|
-
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
931
|
-
"""
|
|
932
|
-
Decorator prototype for all step decorators. This function gets specialized
|
|
933
|
-
and imported for all decorators types by _import_plugin_decorators().
|
|
934
|
-
"""
|
|
935
|
-
...
|
|
936
|
-
|
|
937
717
|
@typing.overload
|
|
938
718
|
def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
939
719
|
"""
|
|
@@ -1076,51 +856,67 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
|
|
|
1076
856
|
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
|
1077
857
|
created within the task will be loaded when the task is retries execution on failure.
|
|
1078
858
|
|
|
1079
|
-
temp_dir_root : str, default: None
|
|
1080
|
-
The root directory under which `current.checkpoint.directory` will be created.
|
|
859
|
+
temp_dir_root : str, default: None
|
|
860
|
+
The root directory under which `current.checkpoint.directory` will be created.
|
|
861
|
+
"""
|
|
862
|
+
...
|
|
863
|
+
|
|
864
|
+
@typing.overload
|
|
865
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
866
|
+
"""
|
|
867
|
+
Specifies a timeout for your step.
|
|
868
|
+
|
|
869
|
+
This decorator is useful if this step may hang indefinitely.
|
|
870
|
+
|
|
871
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
|
872
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
|
873
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
|
874
|
+
|
|
875
|
+
Note that all the values specified in parameters are added together so if you specify
|
|
876
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
|
877
|
+
|
|
878
|
+
|
|
879
|
+
Parameters
|
|
880
|
+
----------
|
|
881
|
+
seconds : int, default 0
|
|
882
|
+
Number of seconds to wait prior to timing out.
|
|
883
|
+
minutes : int, default 0
|
|
884
|
+
Number of minutes to wait prior to timing out.
|
|
885
|
+
hours : int, default 0
|
|
886
|
+
Number of hours to wait prior to timing out.
|
|
1081
887
|
"""
|
|
1082
888
|
...
|
|
1083
889
|
|
|
1084
|
-
|
|
890
|
+
@typing.overload
|
|
891
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
892
|
+
...
|
|
893
|
+
|
|
894
|
+
@typing.overload
|
|
895
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
896
|
+
...
|
|
897
|
+
|
|
898
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
|
1085
899
|
"""
|
|
1086
|
-
|
|
900
|
+
Specifies a timeout for your step.
|
|
1087
901
|
|
|
1088
|
-
|
|
1089
|
-
--------------
|
|
1090
|
-
@ollama(
|
|
1091
|
-
models=[...],
|
|
1092
|
-
...
|
|
1093
|
-
)
|
|
902
|
+
This decorator is useful if this step may hang indefinitely.
|
|
1094
903
|
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
|
|
1098
|
-
- (TODO) 'managed': Outerbounds hosts and selects compute provider.
|
|
1099
|
-
- (TODO) 'remote': Spin up separate instance to serve Ollama models.
|
|
904
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
|
905
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
|
906
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
|
1100
907
|
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
|
|
908
|
+
Note that all the values specified in parameters are added together so if you specify
|
|
909
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
|
1104
910
|
|
|
1105
911
|
|
|
1106
912
|
Parameters
|
|
1107
913
|
----------
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
cache_update_policy: str
|
|
1115
|
-
Cache update policy: "auto", "force", or "never".
|
|
1116
|
-
force_cache_update: bool
|
|
1117
|
-
Simple override for "force" cache update policy.
|
|
1118
|
-
debug: bool
|
|
1119
|
-
Whether to turn on verbose debugging logs.
|
|
1120
|
-
circuit_breaker_config: dict
|
|
1121
|
-
Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
|
|
1122
|
-
timeout_config: dict
|
|
1123
|
-
Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
|
|
914
|
+
seconds : int, default 0
|
|
915
|
+
Number of seconds to wait prior to timing out.
|
|
916
|
+
minutes : int, default 0
|
|
917
|
+
Number of minutes to wait prior to timing out.
|
|
918
|
+
hours : int, default 0
|
|
919
|
+
Number of hours to wait prior to timing out.
|
|
1124
920
|
"""
|
|
1125
921
|
...
|
|
1126
922
|
|
|
@@ -1143,27 +939,6 @@ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag]
|
|
|
1143
939
|
"""
|
|
1144
940
|
...
|
|
1145
941
|
|
|
1146
|
-
@typing.overload
|
|
1147
|
-
def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1148
|
-
"""
|
|
1149
|
-
Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
1150
|
-
It exists to make it easier for users to know that this decorator should only be used with
|
|
1151
|
-
a Neo Cloud like Nebius.
|
|
1152
|
-
"""
|
|
1153
|
-
...
|
|
1154
|
-
|
|
1155
|
-
@typing.overload
|
|
1156
|
-
def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1157
|
-
...
|
|
1158
|
-
|
|
1159
|
-
def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
1160
|
-
"""
|
|
1161
|
-
Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
1162
|
-
It exists to make it easier for users to know that this decorator should only be used with
|
|
1163
|
-
a Neo Cloud like Nebius.
|
|
1164
|
-
"""
|
|
1165
|
-
...
|
|
1166
|
-
|
|
1167
942
|
@typing.overload
|
|
1168
943
|
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1169
944
|
"""
|
|
@@ -1294,99 +1069,329 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
|
1294
1069
|
"""
|
|
1295
1070
|
...
|
|
1296
1071
|
|
|
1072
|
+
def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1073
|
+
"""
|
|
1074
|
+
Specifies that this step should execute on DGX cloud.
|
|
1075
|
+
|
|
1076
|
+
|
|
1077
|
+
Parameters
|
|
1078
|
+
----------
|
|
1079
|
+
gpu : int
|
|
1080
|
+
Number of GPUs to use.
|
|
1081
|
+
gpu_type : str
|
|
1082
|
+
Type of Nvidia GPU to use.
|
|
1083
|
+
queue_timeout : int
|
|
1084
|
+
Time to keep the job in NVCF's queue.
|
|
1085
|
+
"""
|
|
1086
|
+
...
|
|
1087
|
+
|
|
1297
1088
|
@typing.overload
|
|
1298
|
-
def
|
|
1089
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1299
1090
|
"""
|
|
1300
|
-
Specifies
|
|
1301
|
-
to a step needs to be retried.
|
|
1091
|
+
Specifies environment variables to be set prior to the execution of a step.
|
|
1302
1092
|
|
|
1303
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
|
1304
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
|
1305
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
|
1306
1093
|
|
|
1307
|
-
|
|
1308
|
-
|
|
1309
|
-
|
|
1094
|
+
Parameters
|
|
1095
|
+
----------
|
|
1096
|
+
vars : Dict[str, str], default {}
|
|
1097
|
+
Dictionary of environment variables to set.
|
|
1098
|
+
"""
|
|
1099
|
+
...
|
|
1100
|
+
|
|
1101
|
+
@typing.overload
|
|
1102
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1103
|
+
...
|
|
1104
|
+
|
|
1105
|
+
@typing.overload
|
|
1106
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1107
|
+
...
|
|
1108
|
+
|
|
1109
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
|
1110
|
+
"""
|
|
1111
|
+
Specifies environment variables to be set prior to the execution of a step.
|
|
1112
|
+
|
|
1113
|
+
|
|
1114
|
+
Parameters
|
|
1115
|
+
----------
|
|
1116
|
+
vars : Dict[str, str], default {}
|
|
1117
|
+
Dictionary of environment variables to set.
|
|
1118
|
+
"""
|
|
1119
|
+
...
|
|
1120
|
+
|
|
1121
|
+
@typing.overload
|
|
1122
|
+
def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1123
|
+
"""
|
|
1124
|
+
Enables loading / saving of models within a step.
|
|
1125
|
+
|
|
1126
|
+
> Examples
|
|
1127
|
+
- Saving Models
|
|
1128
|
+
```python
|
|
1129
|
+
@model
|
|
1130
|
+
@step
|
|
1131
|
+
def train(self):
|
|
1132
|
+
# current.model.save returns a dictionary reference to the model saved
|
|
1133
|
+
self.my_model = current.model.save(
|
|
1134
|
+
path_to_my_model,
|
|
1135
|
+
label="my_model",
|
|
1136
|
+
metadata={
|
|
1137
|
+
"epochs": 10,
|
|
1138
|
+
"batch-size": 32,
|
|
1139
|
+
"learning-rate": 0.001,
|
|
1140
|
+
}
|
|
1141
|
+
)
|
|
1142
|
+
self.next(self.test)
|
|
1143
|
+
|
|
1144
|
+
@model(load="my_model")
|
|
1145
|
+
@step
|
|
1146
|
+
def test(self):
|
|
1147
|
+
# `current.model.loaded` returns a dictionary of the loaded models
|
|
1148
|
+
# where the key is the name of the artifact and the value is the path to the model
|
|
1149
|
+
print(os.listdir(current.model.loaded["my_model"]))
|
|
1150
|
+
self.next(self.end)
|
|
1151
|
+
```
|
|
1152
|
+
|
|
1153
|
+
- Loading models
|
|
1154
|
+
```python
|
|
1155
|
+
@step
|
|
1156
|
+
def train(self):
|
|
1157
|
+
# current.model.load returns the path to the model loaded
|
|
1158
|
+
checkpoint_path = current.model.load(
|
|
1159
|
+
self.checkpoint_key,
|
|
1160
|
+
)
|
|
1161
|
+
model_path = current.model.load(
|
|
1162
|
+
self.model,
|
|
1163
|
+
)
|
|
1164
|
+
self.next(self.test)
|
|
1165
|
+
```
|
|
1166
|
+
|
|
1167
|
+
|
|
1168
|
+
Parameters
|
|
1169
|
+
----------
|
|
1170
|
+
load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
|
|
1171
|
+
Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
|
|
1172
|
+
These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
|
|
1173
|
+
If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
|
|
1174
|
+
the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
|
|
1175
|
+
If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
|
|
1176
|
+
|
|
1177
|
+
temp_dir_root : str, default: None
|
|
1178
|
+
The root directory under which `current.model.loaded` will store loaded models
|
|
1179
|
+
"""
|
|
1180
|
+
...
|
|
1181
|
+
|
|
1182
|
+
@typing.overload
|
|
1183
|
+
def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1184
|
+
...
|
|
1185
|
+
|
|
1186
|
+
@typing.overload
|
|
1187
|
+
def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1188
|
+
...
|
|
1189
|
+
|
|
1190
|
+
def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
|
|
1191
|
+
"""
|
|
1192
|
+
Enables loading / saving of models within a step.
|
|
1193
|
+
|
|
1194
|
+
> Examples
|
|
1195
|
+
- Saving Models
|
|
1196
|
+
```python
|
|
1197
|
+
@model
|
|
1198
|
+
@step
|
|
1199
|
+
def train(self):
|
|
1200
|
+
# current.model.save returns a dictionary reference to the model saved
|
|
1201
|
+
self.my_model = current.model.save(
|
|
1202
|
+
path_to_my_model,
|
|
1203
|
+
label="my_model",
|
|
1204
|
+
metadata={
|
|
1205
|
+
"epochs": 10,
|
|
1206
|
+
"batch-size": 32,
|
|
1207
|
+
"learning-rate": 0.001,
|
|
1208
|
+
}
|
|
1209
|
+
)
|
|
1210
|
+
self.next(self.test)
|
|
1211
|
+
|
|
1212
|
+
@model(load="my_model")
|
|
1213
|
+
@step
|
|
1214
|
+
def test(self):
|
|
1215
|
+
# `current.model.loaded` returns a dictionary of the loaded models
|
|
1216
|
+
# where the key is the name of the artifact and the value is the path to the model
|
|
1217
|
+
print(os.listdir(current.model.loaded["my_model"]))
|
|
1218
|
+
self.next(self.end)
|
|
1219
|
+
```
|
|
1220
|
+
|
|
1221
|
+
- Loading models
|
|
1222
|
+
```python
|
|
1223
|
+
@step
|
|
1224
|
+
def train(self):
|
|
1225
|
+
# current.model.load returns the path to the model loaded
|
|
1226
|
+
checkpoint_path = current.model.load(
|
|
1227
|
+
self.checkpoint_key,
|
|
1228
|
+
)
|
|
1229
|
+
model_path = current.model.load(
|
|
1230
|
+
self.model,
|
|
1231
|
+
)
|
|
1232
|
+
self.next(self.test)
|
|
1233
|
+
```
|
|
1310
1234
|
|
|
1311
1235
|
|
|
1312
1236
|
Parameters
|
|
1313
1237
|
----------
|
|
1314
|
-
|
|
1315
|
-
|
|
1316
|
-
|
|
1317
|
-
|
|
1238
|
+
load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
|
|
1239
|
+
Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
|
|
1240
|
+
These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
|
|
1241
|
+
If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
|
|
1242
|
+
the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
|
|
1243
|
+
If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
|
|
1244
|
+
|
|
1245
|
+
temp_dir_root : str, default: None
|
|
1246
|
+
The root directory under which `current.model.loaded` will store loaded models
|
|
1318
1247
|
"""
|
|
1319
1248
|
...
|
|
1320
1249
|
|
|
1321
|
-
|
|
1322
|
-
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1323
|
-
...
|
|
1324
|
-
|
|
1325
|
-
@typing.overload
|
|
1326
|
-
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1327
|
-
...
|
|
1328
|
-
|
|
1329
|
-
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
|
1250
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1330
1251
|
"""
|
|
1331
|
-
Specifies
|
|
1332
|
-
to a step needs to be retried.
|
|
1333
|
-
|
|
1334
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
|
1335
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
|
1336
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
|
1337
|
-
|
|
1338
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
|
1339
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
|
1340
|
-
ensuring that the flow execution can continue.
|
|
1252
|
+
Specifies that this step should execute on Kubernetes.
|
|
1341
1253
|
|
|
1342
1254
|
|
|
1343
1255
|
Parameters
|
|
1344
1256
|
----------
|
|
1345
|
-
|
|
1346
|
-
Number of
|
|
1347
|
-
|
|
1348
|
-
|
|
1257
|
+
cpu : int, default 1
|
|
1258
|
+
Number of CPUs required for this step. If `@resources` is
|
|
1259
|
+
also present, the maximum value from all decorators is used.
|
|
1260
|
+
memory : int, default 4096
|
|
1261
|
+
Memory size (in MB) required for this step. If
|
|
1262
|
+
`@resources` is also present, the maximum value from all decorators is
|
|
1263
|
+
used.
|
|
1264
|
+
disk : int, default 10240
|
|
1265
|
+
Disk size (in MB) required for this step. If
|
|
1266
|
+
`@resources` is also present, the maximum value from all decorators is
|
|
1267
|
+
used.
|
|
1268
|
+
image : str, optional, default None
|
|
1269
|
+
Docker image to use when launching on Kubernetes. If not specified, and
|
|
1270
|
+
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
|
1271
|
+
not, a default Docker image mapping to the current version of Python is used.
|
|
1272
|
+
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
|
1273
|
+
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
|
1274
|
+
image_pull_secrets: List[str], default []
|
|
1275
|
+
The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
|
|
1276
|
+
Kubernetes image pull secrets to use when pulling container images
|
|
1277
|
+
in Kubernetes.
|
|
1278
|
+
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
|
1279
|
+
Kubernetes service account to use when launching pod in Kubernetes.
|
|
1280
|
+
secrets : List[str], optional, default None
|
|
1281
|
+
Kubernetes secrets to use when launching pod in Kubernetes. These
|
|
1282
|
+
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
|
1283
|
+
in Metaflow configuration.
|
|
1284
|
+
node_selector: Union[Dict[str,str], str], optional, default None
|
|
1285
|
+
Kubernetes node selector(s) to apply to the pod running the task.
|
|
1286
|
+
Can be passed in as a comma separated string of values e.g.
|
|
1287
|
+
'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
|
|
1288
|
+
{'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
|
|
1289
|
+
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
|
1290
|
+
Kubernetes namespace to use when launching pod in Kubernetes.
|
|
1291
|
+
gpu : int, optional, default None
|
|
1292
|
+
Number of GPUs required for this step. A value of zero implies that
|
|
1293
|
+
the scheduled node should not have GPUs.
|
|
1294
|
+
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
|
1295
|
+
The vendor of the GPUs to be used for this step.
|
|
1296
|
+
tolerations : List[Dict[str,str]], default []
|
|
1297
|
+
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
|
1298
|
+
Kubernetes tolerations to use when launching pod in Kubernetes.
|
|
1299
|
+
labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
|
|
1300
|
+
Kubernetes labels to use when launching pod in Kubernetes.
|
|
1301
|
+
annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
|
|
1302
|
+
Kubernetes annotations to use when launching pod in Kubernetes.
|
|
1303
|
+
use_tmpfs : bool, default False
|
|
1304
|
+
This enables an explicit tmpfs mount for this step.
|
|
1305
|
+
tmpfs_tempdir : bool, default True
|
|
1306
|
+
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
|
1307
|
+
tmpfs_size : int, optional, default: None
|
|
1308
|
+
The value for the size (in MiB) of the tmpfs mount for this step.
|
|
1309
|
+
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
|
1310
|
+
memory allocated for this step.
|
|
1311
|
+
tmpfs_path : str, optional, default /metaflow_temp
|
|
1312
|
+
Path to tmpfs mount for this step.
|
|
1313
|
+
persistent_volume_claims : Dict[str, str], optional, default None
|
|
1314
|
+
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
|
1315
|
+
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
|
1316
|
+
shared_memory: int, optional
|
|
1317
|
+
Shared memory size (in MiB) required for this step
|
|
1318
|
+
port: int, optional
|
|
1319
|
+
Port number to specify in the Kubernetes job object
|
|
1320
|
+
compute_pool : str, optional, default None
|
|
1321
|
+
Compute pool to be used for for this step.
|
|
1322
|
+
If not specified, any accessible compute pool within the perimeter is used.
|
|
1323
|
+
hostname_resolution_timeout: int, default 10 * 60
|
|
1324
|
+
Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
|
|
1325
|
+
Only applicable when @parallel is used.
|
|
1326
|
+
qos: str, default: Burstable
|
|
1327
|
+
Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
|
|
1328
|
+
|
|
1329
|
+
security_context: Dict[str, Any], optional, default None
|
|
1330
|
+
Container security context. Applies to the task container. Allows the following keys:
|
|
1331
|
+
- privileged: bool, optional, default None
|
|
1332
|
+
- allow_privilege_escalation: bool, optional, default None
|
|
1333
|
+
- run_as_user: int, optional, default None
|
|
1334
|
+
- run_as_group: int, optional, default None
|
|
1335
|
+
- run_as_non_root: bool, optional, default None
|
|
1349
1336
|
"""
|
|
1350
1337
|
...
|
|
1351
1338
|
|
|
1352
1339
|
@typing.overload
|
|
1353
|
-
def
|
|
1340
|
+
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1354
1341
|
"""
|
|
1355
|
-
Specifies the
|
|
1342
|
+
Specifies the Conda environment for the step.
|
|
1343
|
+
|
|
1344
|
+
Information in this decorator will augment any
|
|
1345
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
|
1346
|
+
you can use `@conda_base` to set packages required by all
|
|
1347
|
+
steps and use `@conda` to specify step-specific overrides.
|
|
1356
1348
|
|
|
1357
|
-
Use `@pypi_base` to set common packages required by all
|
|
1358
|
-
steps and use `@pypi` to specify step-specific overrides.
|
|
1359
1349
|
|
|
1360
1350
|
Parameters
|
|
1361
1351
|
----------
|
|
1362
|
-
packages : Dict[str, str], default
|
|
1363
|
-
Packages to use for this
|
|
1352
|
+
packages : Dict[str, str], default {}
|
|
1353
|
+
Packages to use for this step. The key is the name of the package
|
|
1364
1354
|
and the value is the version to use.
|
|
1365
|
-
|
|
1355
|
+
libraries : Dict[str, str], default {}
|
|
1356
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
1357
|
+
python : str, optional, default None
|
|
1366
1358
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1367
1359
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1360
|
+
disabled : bool, default False
|
|
1361
|
+
If set to True, disables @conda.
|
|
1368
1362
|
"""
|
|
1369
1363
|
...
|
|
1370
1364
|
|
|
1371
1365
|
@typing.overload
|
|
1372
|
-
def
|
|
1366
|
+
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1373
1367
|
...
|
|
1374
1368
|
|
|
1375
|
-
|
|
1369
|
+
@typing.overload
|
|
1370
|
+
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1371
|
+
...
|
|
1372
|
+
|
|
1373
|
+
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
|
1376
1374
|
"""
|
|
1377
|
-
Specifies the
|
|
1375
|
+
Specifies the Conda environment for the step.
|
|
1376
|
+
|
|
1377
|
+
Information in this decorator will augment any
|
|
1378
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
|
1379
|
+
you can use `@conda_base` to set packages required by all
|
|
1380
|
+
steps and use `@conda` to specify step-specific overrides.
|
|
1378
1381
|
|
|
1379
|
-
Use `@pypi_base` to set common packages required by all
|
|
1380
|
-
steps and use `@pypi` to specify step-specific overrides.
|
|
1381
1382
|
|
|
1382
1383
|
Parameters
|
|
1383
1384
|
----------
|
|
1384
|
-
packages : Dict[str, str], default
|
|
1385
|
-
Packages to use for this
|
|
1385
|
+
packages : Dict[str, str], default {}
|
|
1386
|
+
Packages to use for this step. The key is the name of the package
|
|
1386
1387
|
and the value is the version to use.
|
|
1387
|
-
|
|
1388
|
+
libraries : Dict[str, str], default {}
|
|
1389
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
1390
|
+
python : str, optional, default None
|
|
1388
1391
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1389
1392
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1393
|
+
disabled : bool, default False
|
|
1394
|
+
If set to True, disables @conda.
|
|
1390
1395
|
"""
|
|
1391
1396
|
...
|
|
1392
1397
|
|
|
@@ -1469,167 +1474,66 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
|
|
|
1469
1474
|
```
|
|
1470
1475
|
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
|
1471
1476
|
```
|
|
1472
|
-
This is equivalent to:
|
|
1473
|
-
```
|
|
1474
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
1475
|
-
```
|
|
1476
|
-
|
|
1477
|
-
|
|
1478
|
-
Parameters
|
|
1479
|
-
----------
|
|
1480
|
-
event : Union[str, Dict[str, Any]], optional, default None
|
|
1481
|
-
Event dependency for this flow.
|
|
1482
|
-
events : List[Union[str, Dict[str, Any]]], default []
|
|
1483
|
-
Events dependency for this flow.
|
|
1484
|
-
options : Dict[str, Any], default {}
|
|
1485
|
-
Backend-specific configuration for tuning eventing behavior.
|
|
1486
|
-
"""
|
|
1487
|
-
...
|
|
1488
|
-
|
|
1489
|
-
@typing.overload
|
|
1490
|
-
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1491
|
-
...
|
|
1492
|
-
|
|
1493
|
-
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
|
1494
|
-
"""
|
|
1495
|
-
Specifies the event(s) that this flow depends on.
|
|
1496
|
-
|
|
1497
|
-
```
|
|
1498
|
-
@trigger(event='foo')
|
|
1499
|
-
```
|
|
1500
|
-
or
|
|
1501
|
-
```
|
|
1502
|
-
@trigger(events=['foo', 'bar'])
|
|
1503
|
-
```
|
|
1504
|
-
|
|
1505
|
-
Additionally, you can specify the parameter mappings
|
|
1506
|
-
to map event payload to Metaflow parameters for the flow.
|
|
1507
|
-
```
|
|
1508
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
|
1509
|
-
```
|
|
1510
|
-
or
|
|
1511
|
-
```
|
|
1512
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
|
1513
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
|
1514
|
-
```
|
|
1515
|
-
|
|
1516
|
-
'parameters' can also be a list of strings and tuples like so:
|
|
1517
|
-
```
|
|
1518
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
|
1519
|
-
```
|
|
1520
|
-
This is equivalent to:
|
|
1521
|
-
```
|
|
1522
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
1523
|
-
```
|
|
1524
|
-
|
|
1525
|
-
|
|
1526
|
-
Parameters
|
|
1527
|
-
----------
|
|
1528
|
-
event : Union[str, Dict[str, Any]], optional, default None
|
|
1529
|
-
Event dependency for this flow.
|
|
1530
|
-
events : List[Union[str, Dict[str, Any]]], default []
|
|
1531
|
-
Events dependency for this flow.
|
|
1532
|
-
options : Dict[str, Any], default {}
|
|
1533
|
-
Backend-specific configuration for tuning eventing behavior.
|
|
1534
|
-
"""
|
|
1535
|
-
...
|
|
1536
|
-
|
|
1537
|
-
@typing.overload
|
|
1538
|
-
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1539
|
-
"""
|
|
1540
|
-
Specifies the flow(s) that this flow depends on.
|
|
1541
|
-
|
|
1542
|
-
```
|
|
1543
|
-
@trigger_on_finish(flow='FooFlow')
|
|
1544
|
-
```
|
|
1545
|
-
or
|
|
1546
|
-
```
|
|
1547
|
-
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
|
1548
|
-
```
|
|
1549
|
-
This decorator respects the @project decorator and triggers the flow
|
|
1550
|
-
when upstream runs within the same namespace complete successfully
|
|
1551
|
-
|
|
1552
|
-
Additionally, you can specify project aware upstream flow dependencies
|
|
1553
|
-
by specifying the fully qualified project_flow_name.
|
|
1554
|
-
```
|
|
1555
|
-
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
|
1556
|
-
```
|
|
1557
|
-
or
|
|
1558
|
-
```
|
|
1559
|
-
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
|
1560
|
-
```
|
|
1561
|
-
|
|
1562
|
-
You can also specify just the project or project branch (other values will be
|
|
1563
|
-
inferred from the current project or project branch):
|
|
1477
|
+
This is equivalent to:
|
|
1564
1478
|
```
|
|
1565
|
-
@
|
|
1479
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
1566
1480
|
```
|
|
1567
1481
|
|
|
1568
|
-
Note that `branch` is typically one of:
|
|
1569
|
-
- `prod`
|
|
1570
|
-
- `user.bob`
|
|
1571
|
-
- `test.my_experiment`
|
|
1572
|
-
- `prod.staging`
|
|
1573
|
-
|
|
1574
1482
|
|
|
1575
1483
|
Parameters
|
|
1576
1484
|
----------
|
|
1577
|
-
|
|
1578
|
-
|
|
1579
|
-
|
|
1580
|
-
|
|
1485
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
|
1486
|
+
Event dependency for this flow.
|
|
1487
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
|
1488
|
+
Events dependency for this flow.
|
|
1581
1489
|
options : Dict[str, Any], default {}
|
|
1582
1490
|
Backend-specific configuration for tuning eventing behavior.
|
|
1583
1491
|
"""
|
|
1584
1492
|
...
|
|
1585
1493
|
|
|
1586
1494
|
@typing.overload
|
|
1587
|
-
def
|
|
1495
|
+
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1588
1496
|
...
|
|
1589
1497
|
|
|
1590
|
-
def
|
|
1498
|
+
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
|
1591
1499
|
"""
|
|
1592
|
-
Specifies the
|
|
1500
|
+
Specifies the event(s) that this flow depends on.
|
|
1593
1501
|
|
|
1594
1502
|
```
|
|
1595
|
-
@
|
|
1503
|
+
@trigger(event='foo')
|
|
1596
1504
|
```
|
|
1597
1505
|
or
|
|
1598
1506
|
```
|
|
1599
|
-
@
|
|
1507
|
+
@trigger(events=['foo', 'bar'])
|
|
1600
1508
|
```
|
|
1601
|
-
This decorator respects the @project decorator and triggers the flow
|
|
1602
|
-
when upstream runs within the same namespace complete successfully
|
|
1603
1509
|
|
|
1604
|
-
Additionally, you can specify
|
|
1605
|
-
|
|
1510
|
+
Additionally, you can specify the parameter mappings
|
|
1511
|
+
to map event payload to Metaflow parameters for the flow.
|
|
1606
1512
|
```
|
|
1607
|
-
@
|
|
1513
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
|
1608
1514
|
```
|
|
1609
1515
|
or
|
|
1610
1516
|
```
|
|
1611
|
-
@
|
|
1517
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
|
1518
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
|
1612
1519
|
```
|
|
1613
1520
|
|
|
1614
|
-
|
|
1615
|
-
inferred from the current project or project branch):
|
|
1521
|
+
'parameters' can also be a list of strings and tuples like so:
|
|
1616
1522
|
```
|
|
1617
|
-
@
|
|
1523
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
|
1524
|
+
```
|
|
1525
|
+
This is equivalent to:
|
|
1526
|
+
```
|
|
1527
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
1618
1528
|
```
|
|
1619
|
-
|
|
1620
|
-
Note that `branch` is typically one of:
|
|
1621
|
-
- `prod`
|
|
1622
|
-
- `user.bob`
|
|
1623
|
-
- `test.my_experiment`
|
|
1624
|
-
- `prod.staging`
|
|
1625
1529
|
|
|
1626
1530
|
|
|
1627
1531
|
Parameters
|
|
1628
1532
|
----------
|
|
1629
|
-
|
|
1630
|
-
|
|
1631
|
-
|
|
1632
|
-
|
|
1533
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
|
1534
|
+
Event dependency for this flow.
|
|
1535
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
|
1536
|
+
Events dependency for this flow.
|
|
1633
1537
|
options : Dict[str, Any], default {}
|
|
1634
1538
|
Backend-specific configuration for tuning eventing behavior.
|
|
1635
1539
|
"""
|
|
@@ -1678,6 +1582,41 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
|
|
|
1678
1582
|
"""
|
|
1679
1583
|
...
|
|
1680
1584
|
|
|
1585
|
+
def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1586
|
+
"""
|
|
1587
|
+
Specifies what flows belong to the same project.
|
|
1588
|
+
|
|
1589
|
+
A project-specific namespace is created for all flows that
|
|
1590
|
+
use the same `@project(name)`.
|
|
1591
|
+
|
|
1592
|
+
|
|
1593
|
+
Parameters
|
|
1594
|
+
----------
|
|
1595
|
+
name : str
|
|
1596
|
+
Project name. Make sure that the name is unique amongst all
|
|
1597
|
+
projects that use the same production scheduler. The name may
|
|
1598
|
+
contain only lowercase alphanumeric characters and underscores.
|
|
1599
|
+
|
|
1600
|
+
branch : Optional[str], default None
|
|
1601
|
+
The branch to use. If not specified, the branch is set to
|
|
1602
|
+
`user.<username>` unless `production` is set to `True`. This can
|
|
1603
|
+
also be set on the command line using `--branch` as a top-level option.
|
|
1604
|
+
It is an error to specify `branch` in the decorator and on the command line.
|
|
1605
|
+
|
|
1606
|
+
production : bool, default False
|
|
1607
|
+
Whether or not the branch is the production branch. This can also be set on the
|
|
1608
|
+
command line using `--production` as a top-level option. It is an error to specify
|
|
1609
|
+
`production` in the decorator and on the command line.
|
|
1610
|
+
The project branch name will be:
|
|
1611
|
+
- if `branch` is specified:
|
|
1612
|
+
- if `production` is True: `prod.<branch>`
|
|
1613
|
+
- if `production` is False: `test.<branch>`
|
|
1614
|
+
- if `branch` is not specified:
|
|
1615
|
+
- if `production` is True: `prod`
|
|
1616
|
+
- if `production` is False: `user.<username>`
|
|
1617
|
+
"""
|
|
1618
|
+
...
|
|
1619
|
+
|
|
1681
1620
|
@typing.overload
|
|
1682
1621
|
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1683
1622
|
"""
|
|
@@ -1843,6 +1782,107 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
|
|
|
1843
1782
|
"""
|
|
1844
1783
|
...
|
|
1845
1784
|
|
|
1785
|
+
@typing.overload
|
|
1786
|
+
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1787
|
+
"""
|
|
1788
|
+
Specifies the flow(s) that this flow depends on.
|
|
1789
|
+
|
|
1790
|
+
```
|
|
1791
|
+
@trigger_on_finish(flow='FooFlow')
|
|
1792
|
+
```
|
|
1793
|
+
or
|
|
1794
|
+
```
|
|
1795
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
|
1796
|
+
```
|
|
1797
|
+
This decorator respects the @project decorator and triggers the flow
|
|
1798
|
+
when upstream runs within the same namespace complete successfully
|
|
1799
|
+
|
|
1800
|
+
Additionally, you can specify project aware upstream flow dependencies
|
|
1801
|
+
by specifying the fully qualified project_flow_name.
|
|
1802
|
+
```
|
|
1803
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
|
1804
|
+
```
|
|
1805
|
+
or
|
|
1806
|
+
```
|
|
1807
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
|
1808
|
+
```
|
|
1809
|
+
|
|
1810
|
+
You can also specify just the project or project branch (other values will be
|
|
1811
|
+
inferred from the current project or project branch):
|
|
1812
|
+
```
|
|
1813
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
|
1814
|
+
```
|
|
1815
|
+
|
|
1816
|
+
Note that `branch` is typically one of:
|
|
1817
|
+
- `prod`
|
|
1818
|
+
- `user.bob`
|
|
1819
|
+
- `test.my_experiment`
|
|
1820
|
+
- `prod.staging`
|
|
1821
|
+
|
|
1822
|
+
|
|
1823
|
+
Parameters
|
|
1824
|
+
----------
|
|
1825
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
|
1826
|
+
Upstream flow dependency for this flow.
|
|
1827
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
|
1828
|
+
Upstream flow dependencies for this flow.
|
|
1829
|
+
options : Dict[str, Any], default {}
|
|
1830
|
+
Backend-specific configuration for tuning eventing behavior.
|
|
1831
|
+
"""
|
|
1832
|
+
...
|
|
1833
|
+
|
|
1834
|
+
@typing.overload
|
|
1835
|
+
def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1836
|
+
...
|
|
1837
|
+
|
|
1838
|
+
def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
|
1839
|
+
"""
|
|
1840
|
+
Specifies the flow(s) that this flow depends on.
|
|
1841
|
+
|
|
1842
|
+
```
|
|
1843
|
+
@trigger_on_finish(flow='FooFlow')
|
|
1844
|
+
```
|
|
1845
|
+
or
|
|
1846
|
+
```
|
|
1847
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
|
1848
|
+
```
|
|
1849
|
+
This decorator respects the @project decorator and triggers the flow
|
|
1850
|
+
when upstream runs within the same namespace complete successfully
|
|
1851
|
+
|
|
1852
|
+
Additionally, you can specify project aware upstream flow dependencies
|
|
1853
|
+
by specifying the fully qualified project_flow_name.
|
|
1854
|
+
```
|
|
1855
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
|
1856
|
+
```
|
|
1857
|
+
or
|
|
1858
|
+
```
|
|
1859
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
|
1860
|
+
```
|
|
1861
|
+
|
|
1862
|
+
You can also specify just the project or project branch (other values will be
|
|
1863
|
+
inferred from the current project or project branch):
|
|
1864
|
+
```
|
|
1865
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
|
1866
|
+
```
|
|
1867
|
+
|
|
1868
|
+
Note that `branch` is typically one of:
|
|
1869
|
+
- `prod`
|
|
1870
|
+
- `user.bob`
|
|
1871
|
+
- `test.my_experiment`
|
|
1872
|
+
- `prod.staging`
|
|
1873
|
+
|
|
1874
|
+
|
|
1875
|
+
Parameters
|
|
1876
|
+
----------
|
|
1877
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
|
1878
|
+
Upstream flow dependency for this flow.
|
|
1879
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
|
1880
|
+
Upstream flow dependencies for this flow.
|
|
1881
|
+
options : Dict[str, Any], default {}
|
|
1882
|
+
Backend-specific configuration for tuning eventing behavior.
|
|
1883
|
+
"""
|
|
1884
|
+
...
|
|
1885
|
+
|
|
1846
1886
|
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1847
1887
|
"""
|
|
1848
1888
|
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
|
@@ -1886,38 +1926,44 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
|
|
|
1886
1926
|
"""
|
|
1887
1927
|
...
|
|
1888
1928
|
|
|
1889
|
-
|
|
1929
|
+
@typing.overload
|
|
1930
|
+
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1890
1931
|
"""
|
|
1891
|
-
Specifies
|
|
1892
|
-
|
|
1893
|
-
A project-specific namespace is created for all flows that
|
|
1894
|
-
use the same `@project(name)`.
|
|
1932
|
+
Specifies the PyPI packages for all steps of the flow.
|
|
1895
1933
|
|
|
1934
|
+
Use `@pypi_base` to set common packages required by all
|
|
1935
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
1896
1936
|
|
|
1897
1937
|
Parameters
|
|
1898
1938
|
----------
|
|
1899
|
-
|
|
1900
|
-
|
|
1901
|
-
|
|
1902
|
-
|
|
1939
|
+
packages : Dict[str, str], default: {}
|
|
1940
|
+
Packages to use for this flow. The key is the name of the package
|
|
1941
|
+
and the value is the version to use.
|
|
1942
|
+
python : str, optional, default: None
|
|
1943
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1944
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1945
|
+
"""
|
|
1946
|
+
...
|
|
1947
|
+
|
|
1948
|
+
@typing.overload
|
|
1949
|
+
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1950
|
+
...
|
|
1951
|
+
|
|
1952
|
+
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
|
1953
|
+
"""
|
|
1954
|
+
Specifies the PyPI packages for all steps of the flow.
|
|
1903
1955
|
|
|
1904
|
-
|
|
1905
|
-
|
|
1906
|
-
`user.<username>` unless `production` is set to `True`. This can
|
|
1907
|
-
also be set on the command line using `--branch` as a top-level option.
|
|
1908
|
-
It is an error to specify `branch` in the decorator and on the command line.
|
|
1956
|
+
Use `@pypi_base` to set common packages required by all
|
|
1957
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
1909
1958
|
|
|
1910
|
-
|
|
1911
|
-
|
|
1912
|
-
|
|
1913
|
-
|
|
1914
|
-
|
|
1915
|
-
|
|
1916
|
-
|
|
1917
|
-
|
|
1918
|
-
- if `branch` is not specified:
|
|
1919
|
-
- if `production` is True: `prod`
|
|
1920
|
-
- if `production` is False: `user.<username>`
|
|
1959
|
+
Parameters
|
|
1960
|
+
----------
|
|
1961
|
+
packages : Dict[str, str], default: {}
|
|
1962
|
+
Packages to use for this flow. The key is the name of the package
|
|
1963
|
+
and the value is the version to use.
|
|
1964
|
+
python : str, optional, default: None
|
|
1965
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1966
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1921
1967
|
"""
|
|
1922
1968
|
...
|
|
1923
1969
|
|