ob-metaflow-stubs 6.0.8.2__py2.py3-none-any.whl → 6.0.9.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ob-metaflow-stubs might be problematic. Click here for more details.
- metaflow-stubs/__init__.pyi +848 -848
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +5 -5
- metaflow-stubs/client/filecache.pyi +3 -3
- metaflow-stubs/events.pyi +3 -3
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +14 -5
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +3 -3
- metaflow-stubs/meta_files.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +2 -2
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +46 -46
- metaflow-stubs/metaflow_git.pyi +2 -2
- metaflow-stubs/mf_extensions/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +5 -5
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +4 -4
- metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +3 -3
- metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/ob_internal.pyi +2 -2
- metaflow-stubs/packaging_sys/__init__.pyi +6 -6
- metaflow-stubs/packaging_sys/backend.pyi +4 -4
- metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
- metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
- metaflow-stubs/packaging_sys/utils.pyi +2 -2
- metaflow-stubs/packaging_sys/v1.pyi +3 -3
- metaflow-stubs/parameters.pyi +3 -3
- metaflow-stubs/plugins/__init__.pyi +12 -12
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +8 -2
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
- metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +2 -2
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +2 -2
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
- metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
- metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
- metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/perimeters.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
- metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
- metaflow-stubs/plugins/secrets/utils.pyi +2 -2
- metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
- metaflow-stubs/profilers/__init__.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +5 -5
- metaflow-stubs/runner/deployer_impl.pyi +2 -2
- metaflow-stubs/runner/metaflow_runner.pyi +3 -3
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +4 -4
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +3 -3
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_options.pyi +2 -2
- metaflow-stubs/user_configs/config_parameters.pyi +8 -6
- metaflow-stubs/user_decorators/__init__.pyi +2 -2
- metaflow-stubs/user_decorators/common.pyi +2 -2
- metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
- metaflow-stubs/user_decorators/mutable_step.pyi +6 -6
- metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
- metaflow-stubs/user_decorators/user_step_decorator.pyi +20 -7
- {ob_metaflow_stubs-6.0.8.2.dist-info → ob_metaflow_stubs-6.0.9.0.dist-info}/METADATA +1 -1
- ob_metaflow_stubs-6.0.9.0.dist-info/RECORD +262 -0
- ob_metaflow_stubs-6.0.8.2.dist-info/RECORD +0 -262
- {ob_metaflow_stubs-6.0.8.2.dist-info → ob_metaflow_stubs-6.0.9.0.dist-info}/WHEEL +0 -0
- {ob_metaflow_stubs-6.0.8.2.dist-info → ob_metaflow_stubs-6.0.9.0.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
######################################################################################################
|
|
2
2
|
# Auto-generated Metaflow stub file #
|
|
3
|
-
# MF version: 2.
|
|
4
|
-
# Generated on 2025-08-
|
|
3
|
+
# MF version: 2.18.0.1+obcheckpoint(0.2.4);ob(v1) #
|
|
4
|
+
# Generated on 2025-08-27T22:09:03.717615 #
|
|
5
5
|
######################################################################################################
|
|
6
6
|
|
|
7
7
|
from __future__ import annotations
|
|
@@ -39,18 +39,18 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
|
|
|
39
39
|
from .user_decorators.user_step_decorator import StepMutator as StepMutator
|
|
40
40
|
from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
|
|
41
41
|
from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
|
|
42
|
+
from . import metaflow_git as metaflow_git
|
|
42
43
|
from . import cards as cards
|
|
43
44
|
from . import tuple_util as tuple_util
|
|
44
|
-
from . import metaflow_git as metaflow_git
|
|
45
45
|
from . import events as events
|
|
46
46
|
from . import runner as runner
|
|
47
47
|
from . import plugins as plugins
|
|
48
48
|
from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
|
|
49
49
|
from . import includefile as includefile
|
|
50
50
|
from .includefile import IncludeFile as IncludeFile
|
|
51
|
-
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
|
52
51
|
from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
|
|
53
52
|
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
|
53
|
+
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
|
54
54
|
from . import client as client
|
|
55
55
|
from .client.core import namespace as namespace
|
|
56
56
|
from .client.core import get_namespace as get_namespace
|
|
@@ -167,6 +167,165 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
|
167
167
|
"""
|
|
168
168
|
...
|
|
169
169
|
|
|
170
|
+
@typing.overload
|
|
171
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
172
|
+
"""
|
|
173
|
+
Specifies the PyPI packages for the step.
|
|
174
|
+
|
|
175
|
+
Information in this decorator will augment any
|
|
176
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
|
177
|
+
you can use `@pypi_base` to set packages required by all
|
|
178
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
Parameters
|
|
182
|
+
----------
|
|
183
|
+
packages : Dict[str, str], default: {}
|
|
184
|
+
Packages to use for this step. The key is the name of the package
|
|
185
|
+
and the value is the version to use.
|
|
186
|
+
python : str, optional, default: None
|
|
187
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
188
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
189
|
+
"""
|
|
190
|
+
...
|
|
191
|
+
|
|
192
|
+
@typing.overload
|
|
193
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
194
|
+
...
|
|
195
|
+
|
|
196
|
+
@typing.overload
|
|
197
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
198
|
+
...
|
|
199
|
+
|
|
200
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
|
201
|
+
"""
|
|
202
|
+
Specifies the PyPI packages for the step.
|
|
203
|
+
|
|
204
|
+
Information in this decorator will augment any
|
|
205
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
|
206
|
+
you can use `@pypi_base` to set packages required by all
|
|
207
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
Parameters
|
|
211
|
+
----------
|
|
212
|
+
packages : Dict[str, str], default: {}
|
|
213
|
+
Packages to use for this step. The key is the name of the package
|
|
214
|
+
and the value is the version to use.
|
|
215
|
+
python : str, optional, default: None
|
|
216
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
217
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
218
|
+
"""
|
|
219
|
+
...
|
|
220
|
+
|
|
221
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
222
|
+
"""
|
|
223
|
+
Specifies that this step should execute on Kubernetes.
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
Parameters
|
|
227
|
+
----------
|
|
228
|
+
cpu : int, default 1
|
|
229
|
+
Number of CPUs required for this step. If `@resources` is
|
|
230
|
+
also present, the maximum value from all decorators is used.
|
|
231
|
+
memory : int, default 4096
|
|
232
|
+
Memory size (in MB) required for this step. If
|
|
233
|
+
`@resources` is also present, the maximum value from all decorators is
|
|
234
|
+
used.
|
|
235
|
+
disk : int, default 10240
|
|
236
|
+
Disk size (in MB) required for this step. If
|
|
237
|
+
`@resources` is also present, the maximum value from all decorators is
|
|
238
|
+
used.
|
|
239
|
+
image : str, optional, default None
|
|
240
|
+
Docker image to use when launching on Kubernetes. If not specified, and
|
|
241
|
+
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
|
242
|
+
not, a default Docker image mapping to the current version of Python is used.
|
|
243
|
+
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
|
244
|
+
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
|
245
|
+
image_pull_secrets: List[str], default []
|
|
246
|
+
The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
|
|
247
|
+
Kubernetes image pull secrets to use when pulling container images
|
|
248
|
+
in Kubernetes.
|
|
249
|
+
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
|
250
|
+
Kubernetes service account to use when launching pod in Kubernetes.
|
|
251
|
+
secrets : List[str], optional, default None
|
|
252
|
+
Kubernetes secrets to use when launching pod in Kubernetes. These
|
|
253
|
+
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
|
254
|
+
in Metaflow configuration.
|
|
255
|
+
node_selector: Union[Dict[str,str], str], optional, default None
|
|
256
|
+
Kubernetes node selector(s) to apply to the pod running the task.
|
|
257
|
+
Can be passed in as a comma separated string of values e.g.
|
|
258
|
+
'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
|
|
259
|
+
{'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
|
|
260
|
+
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
|
261
|
+
Kubernetes namespace to use when launching pod in Kubernetes.
|
|
262
|
+
gpu : int, optional, default None
|
|
263
|
+
Number of GPUs required for this step. A value of zero implies that
|
|
264
|
+
the scheduled node should not have GPUs.
|
|
265
|
+
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
|
266
|
+
The vendor of the GPUs to be used for this step.
|
|
267
|
+
tolerations : List[Dict[str,str]], default []
|
|
268
|
+
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
|
269
|
+
Kubernetes tolerations to use when launching pod in Kubernetes.
|
|
270
|
+
labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
|
|
271
|
+
Kubernetes labels to use when launching pod in Kubernetes.
|
|
272
|
+
annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
|
|
273
|
+
Kubernetes annotations to use when launching pod in Kubernetes.
|
|
274
|
+
use_tmpfs : bool, default False
|
|
275
|
+
This enables an explicit tmpfs mount for this step.
|
|
276
|
+
tmpfs_tempdir : bool, default True
|
|
277
|
+
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
|
278
|
+
tmpfs_size : int, optional, default: None
|
|
279
|
+
The value for the size (in MiB) of the tmpfs mount for this step.
|
|
280
|
+
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
|
281
|
+
memory allocated for this step.
|
|
282
|
+
tmpfs_path : str, optional, default /metaflow_temp
|
|
283
|
+
Path to tmpfs mount for this step.
|
|
284
|
+
persistent_volume_claims : Dict[str, str], optional, default None
|
|
285
|
+
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
|
286
|
+
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
|
287
|
+
shared_memory: int, optional
|
|
288
|
+
Shared memory size (in MiB) required for this step
|
|
289
|
+
port: int, optional
|
|
290
|
+
Port number to specify in the Kubernetes job object
|
|
291
|
+
compute_pool : str, optional, default None
|
|
292
|
+
Compute pool to be used for for this step.
|
|
293
|
+
If not specified, any accessible compute pool within the perimeter is used.
|
|
294
|
+
hostname_resolution_timeout: int, default 10 * 60
|
|
295
|
+
Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
|
|
296
|
+
Only applicable when @parallel is used.
|
|
297
|
+
qos: str, default: Burstable
|
|
298
|
+
Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
|
|
299
|
+
|
|
300
|
+
security_context: Dict[str, Any], optional, default None
|
|
301
|
+
Container security context. Applies to the task container. Allows the following keys:
|
|
302
|
+
- privileged: bool, optional, default None
|
|
303
|
+
- allow_privilege_escalation: bool, optional, default None
|
|
304
|
+
- run_as_user: int, optional, default None
|
|
305
|
+
- run_as_group: int, optional, default None
|
|
306
|
+
- run_as_non_root: bool, optional, default None
|
|
307
|
+
"""
|
|
308
|
+
...
|
|
309
|
+
|
|
310
|
+
@typing.overload
|
|
311
|
+
def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
312
|
+
"""
|
|
313
|
+
Decorator prototype for all step decorators. This function gets specialized
|
|
314
|
+
and imported for all decorators types by _import_plugin_decorators().
|
|
315
|
+
"""
|
|
316
|
+
...
|
|
317
|
+
|
|
318
|
+
@typing.overload
|
|
319
|
+
def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
320
|
+
...
|
|
321
|
+
|
|
322
|
+
def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
323
|
+
"""
|
|
324
|
+
Decorator prototype for all step decorators. This function gets specialized
|
|
325
|
+
and imported for all decorators types by _import_plugin_decorators().
|
|
326
|
+
"""
|
|
327
|
+
...
|
|
328
|
+
|
|
170
329
|
def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
171
330
|
"""
|
|
172
331
|
This decorator is used to run vllm APIs as Metaflow task sidecars.
|
|
@@ -217,6 +376,25 @@ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card
|
|
|
217
376
|
"""
|
|
218
377
|
...
|
|
219
378
|
|
|
379
|
+
@typing.overload
|
|
380
|
+
def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
381
|
+
"""
|
|
382
|
+
A simple decorator that demonstrates using CardDecoratorInjector
|
|
383
|
+
to inject a card and render simple markdown content.
|
|
384
|
+
"""
|
|
385
|
+
...
|
|
386
|
+
|
|
387
|
+
@typing.overload
|
|
388
|
+
def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
389
|
+
...
|
|
390
|
+
|
|
391
|
+
def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
392
|
+
"""
|
|
393
|
+
A simple decorator that demonstrates using CardDecoratorInjector
|
|
394
|
+
to inject a card and render simple markdown content.
|
|
395
|
+
"""
|
|
396
|
+
...
|
|
397
|
+
|
|
220
398
|
def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
221
399
|
"""
|
|
222
400
|
Decorator that helps cache, version and store models/datasets from huggingface hub.
|
|
@@ -297,42 +475,6 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
|
|
|
297
475
|
"""
|
|
298
476
|
...
|
|
299
477
|
|
|
300
|
-
@typing.overload
|
|
301
|
-
def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
302
|
-
"""
|
|
303
|
-
Decorator prototype for all step decorators. This function gets specialized
|
|
304
|
-
and imported for all decorators types by _import_plugin_decorators().
|
|
305
|
-
"""
|
|
306
|
-
...
|
|
307
|
-
|
|
308
|
-
@typing.overload
|
|
309
|
-
def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
310
|
-
...
|
|
311
|
-
|
|
312
|
-
def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
313
|
-
"""
|
|
314
|
-
Decorator prototype for all step decorators. This function gets specialized
|
|
315
|
-
and imported for all decorators types by _import_plugin_decorators().
|
|
316
|
-
"""
|
|
317
|
-
...
|
|
318
|
-
|
|
319
|
-
@typing.overload
|
|
320
|
-
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
321
|
-
"""
|
|
322
|
-
Internal decorator to support Fast bakery
|
|
323
|
-
"""
|
|
324
|
-
...
|
|
325
|
-
|
|
326
|
-
@typing.overload
|
|
327
|
-
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
328
|
-
...
|
|
329
|
-
|
|
330
|
-
def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
331
|
-
"""
|
|
332
|
-
Internal decorator to support Fast bakery
|
|
333
|
-
"""
|
|
334
|
-
...
|
|
335
|
-
|
|
336
478
|
@typing.overload
|
|
337
479
|
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
338
480
|
"""
|
|
@@ -366,213 +508,150 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
|
|
|
366
508
|
"""
|
|
367
509
|
...
|
|
368
510
|
|
|
369
|
-
def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
370
|
-
"""
|
|
371
|
-
Specifies that this step should execute on DGX cloud.
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
Parameters
|
|
375
|
-
----------
|
|
376
|
-
gpu : int
|
|
377
|
-
Number of GPUs to use.
|
|
378
|
-
gpu_type : str
|
|
379
|
-
Type of Nvidia GPU to use.
|
|
380
|
-
queue_timeout : int
|
|
381
|
-
Time to keep the job in NVCF's queue.
|
|
382
|
-
"""
|
|
383
|
-
...
|
|
384
|
-
|
|
385
511
|
@typing.overload
|
|
386
|
-
def
|
|
512
|
+
def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
387
513
|
"""
|
|
388
|
-
|
|
514
|
+
Enables checkpointing for a step.
|
|
389
515
|
|
|
390
|
-
|
|
391
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
|
392
|
-
you can use `@pypi_base` to set packages required by all
|
|
393
|
-
steps and use `@pypi` to specify step-specific overrides.
|
|
516
|
+
> Examples
|
|
394
517
|
|
|
518
|
+
- Saving Checkpoints
|
|
395
519
|
|
|
396
|
-
Parameters
|
|
397
|
-
----------
|
|
398
|
-
packages : Dict[str, str], default: {}
|
|
399
|
-
Packages to use for this step. The key is the name of the package
|
|
400
|
-
and the value is the version to use.
|
|
401
|
-
python : str, optional, default: None
|
|
402
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
403
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
404
|
-
"""
|
|
405
|
-
...
|
|
406
|
-
|
|
407
|
-
@typing.overload
|
|
408
|
-
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
409
|
-
...
|
|
410
|
-
|
|
411
|
-
@typing.overload
|
|
412
|
-
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
413
|
-
...
|
|
414
|
-
|
|
415
|
-
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
|
416
|
-
"""
|
|
417
|
-
Specifies the PyPI packages for the step.
|
|
418
|
-
|
|
419
|
-
Information in this decorator will augment any
|
|
420
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
|
421
|
-
you can use `@pypi_base` to set packages required by all
|
|
422
|
-
steps and use `@pypi` to specify step-specific overrides.
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
Parameters
|
|
426
|
-
----------
|
|
427
|
-
packages : Dict[str, str], default: {}
|
|
428
|
-
Packages to use for this step. The key is the name of the package
|
|
429
|
-
and the value is the version to use.
|
|
430
|
-
python : str, optional, default: None
|
|
431
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
432
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
433
|
-
"""
|
|
434
|
-
...
|
|
435
|
-
|
|
436
|
-
@typing.overload
|
|
437
|
-
def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
438
|
-
"""
|
|
439
|
-
Enables loading / saving of models within a step.
|
|
440
|
-
|
|
441
|
-
> Examples
|
|
442
|
-
- Saving Models
|
|
443
520
|
```python
|
|
444
|
-
@
|
|
521
|
+
@checkpoint
|
|
445
522
|
@step
|
|
446
523
|
def train(self):
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
print(os.listdir(current.model.loaded["my_model"]))
|
|
465
|
-
self.next(self.end)
|
|
524
|
+
model = create_model(self.parameters, checkpoint_path = None)
|
|
525
|
+
for i in range(self.epochs):
|
|
526
|
+
# some training logic
|
|
527
|
+
loss = model.train(self.dataset)
|
|
528
|
+
if i % 10 == 0:
|
|
529
|
+
model.save(
|
|
530
|
+
current.checkpoint.directory,
|
|
531
|
+
)
|
|
532
|
+
# saves the contents of the `current.checkpoint.directory` as a checkpoint
|
|
533
|
+
# and returns a reference dictionary to the checkpoint saved in the datastore
|
|
534
|
+
self.latest_checkpoint = current.checkpoint.save(
|
|
535
|
+
name="epoch_checkpoint",
|
|
536
|
+
metadata={
|
|
537
|
+
"epoch": i,
|
|
538
|
+
"loss": loss,
|
|
539
|
+
}
|
|
540
|
+
)
|
|
466
541
|
```
|
|
467
542
|
|
|
468
|
-
-
|
|
543
|
+
- Using Loaded Checkpoints
|
|
544
|
+
|
|
469
545
|
```python
|
|
546
|
+
@retry(times=3)
|
|
547
|
+
@checkpoint
|
|
470
548
|
@step
|
|
471
549
|
def train(self):
|
|
472
|
-
#
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
550
|
+
# Assume that the task has restarted and the previous attempt of the task
|
|
551
|
+
# saved a checkpoint
|
|
552
|
+
checkpoint_path = None
|
|
553
|
+
if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
|
|
554
|
+
print("Loaded checkpoint from the previous attempt")
|
|
555
|
+
checkpoint_path = current.checkpoint.directory
|
|
556
|
+
|
|
557
|
+
model = create_model(self.parameters, checkpoint_path = checkpoint_path)
|
|
558
|
+
for i in range(self.epochs):
|
|
559
|
+
...
|
|
480
560
|
```
|
|
481
561
|
|
|
482
562
|
|
|
483
563
|
Parameters
|
|
484
564
|
----------
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
565
|
+
load_policy : str, default: "fresh"
|
|
566
|
+
The policy for loading the checkpoint. The following policies are supported:
|
|
567
|
+
- "eager": Loads the the latest available checkpoint within the namespace.
|
|
568
|
+
With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
|
|
569
|
+
will be loaded at the start of the task.
|
|
570
|
+
- "none": Do not load any checkpoint
|
|
571
|
+
- "fresh": Loads the lastest checkpoint created within the running Task.
|
|
572
|
+
This mode helps loading checkpoints across various retry attempts of the same task.
|
|
573
|
+
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
|
574
|
+
created within the task will be loaded when the task is retries execution on failure.
|
|
491
575
|
|
|
492
576
|
temp_dir_root : str, default: None
|
|
493
|
-
The root directory under which `current.
|
|
577
|
+
The root directory under which `current.checkpoint.directory` will be created.
|
|
494
578
|
"""
|
|
495
579
|
...
|
|
496
580
|
|
|
497
581
|
@typing.overload
|
|
498
|
-
def
|
|
582
|
+
def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
499
583
|
...
|
|
500
584
|
|
|
501
585
|
@typing.overload
|
|
502
|
-
def
|
|
586
|
+
def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
503
587
|
...
|
|
504
588
|
|
|
505
|
-
def
|
|
589
|
+
def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
|
|
506
590
|
"""
|
|
507
|
-
Enables
|
|
591
|
+
Enables checkpointing for a step.
|
|
508
592
|
|
|
509
593
|
> Examples
|
|
510
|
-
|
|
594
|
+
|
|
595
|
+
- Saving Checkpoints
|
|
596
|
+
|
|
511
597
|
```python
|
|
512
|
-
@
|
|
598
|
+
@checkpoint
|
|
513
599
|
@step
|
|
514
600
|
def train(self):
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
print(os.listdir(current.model.loaded["my_model"]))
|
|
533
|
-
self.next(self.end)
|
|
601
|
+
model = create_model(self.parameters, checkpoint_path = None)
|
|
602
|
+
for i in range(self.epochs):
|
|
603
|
+
# some training logic
|
|
604
|
+
loss = model.train(self.dataset)
|
|
605
|
+
if i % 10 == 0:
|
|
606
|
+
model.save(
|
|
607
|
+
current.checkpoint.directory,
|
|
608
|
+
)
|
|
609
|
+
# saves the contents of the `current.checkpoint.directory` as a checkpoint
|
|
610
|
+
# and returns a reference dictionary to the checkpoint saved in the datastore
|
|
611
|
+
self.latest_checkpoint = current.checkpoint.save(
|
|
612
|
+
name="epoch_checkpoint",
|
|
613
|
+
metadata={
|
|
614
|
+
"epoch": i,
|
|
615
|
+
"loss": loss,
|
|
616
|
+
}
|
|
617
|
+
)
|
|
534
618
|
```
|
|
535
619
|
|
|
536
|
-
-
|
|
620
|
+
- Using Loaded Checkpoints
|
|
621
|
+
|
|
537
622
|
```python
|
|
623
|
+
@retry(times=3)
|
|
624
|
+
@checkpoint
|
|
538
625
|
@step
|
|
539
626
|
def train(self):
|
|
540
|
-
#
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
627
|
+
# Assume that the task has restarted and the previous attempt of the task
|
|
628
|
+
# saved a checkpoint
|
|
629
|
+
checkpoint_path = None
|
|
630
|
+
if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
|
|
631
|
+
print("Loaded checkpoint from the previous attempt")
|
|
632
|
+
checkpoint_path = current.checkpoint.directory
|
|
633
|
+
|
|
634
|
+
model = create_model(self.parameters, checkpoint_path = checkpoint_path)
|
|
635
|
+
for i in range(self.epochs):
|
|
636
|
+
...
|
|
548
637
|
```
|
|
549
638
|
|
|
550
639
|
|
|
551
640
|
Parameters
|
|
552
641
|
----------
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
642
|
+
load_policy : str, default: "fresh"
|
|
643
|
+
The policy for loading the checkpoint. The following policies are supported:
|
|
644
|
+
- "eager": Loads the the latest available checkpoint within the namespace.
|
|
645
|
+
With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
|
|
646
|
+
will be loaded at the start of the task.
|
|
647
|
+
- "none": Do not load any checkpoint
|
|
648
|
+
- "fresh": Loads the lastest checkpoint created within the running Task.
|
|
649
|
+
This mode helps loading checkpoints across various retry attempts of the same task.
|
|
650
|
+
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
|
651
|
+
created within the task will be loaded when the task is retries execution on failure.
|
|
559
652
|
|
|
560
653
|
temp_dir_root : str, default: None
|
|
561
|
-
The root directory under which `current.
|
|
562
|
-
"""
|
|
563
|
-
...
|
|
564
|
-
|
|
565
|
-
def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
566
|
-
"""
|
|
567
|
-
Specifies that this step should execute on DGX cloud.
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
Parameters
|
|
571
|
-
----------
|
|
572
|
-
gpu : int
|
|
573
|
-
Number of GPUs to use.
|
|
574
|
-
gpu_type : str
|
|
575
|
-
Type of Nvidia GPU to use.
|
|
654
|
+
The root directory under which `current.checkpoint.directory` will be created.
|
|
576
655
|
"""
|
|
577
656
|
...
|
|
578
657
|
|
|
@@ -598,113 +677,54 @@ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typin
|
|
|
598
677
|
"""
|
|
599
678
|
...
|
|
600
679
|
|
|
601
|
-
|
|
680
|
+
@typing.overload
|
|
681
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
602
682
|
"""
|
|
603
|
-
Specifies that
|
|
683
|
+
Specifies that the step will success under all circumstances.
|
|
684
|
+
|
|
685
|
+
The decorator will create an optional artifact, specified by `var`, which
|
|
686
|
+
contains the exception raised. You can use it to detect the presence
|
|
687
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
|
688
|
+
are missing.
|
|
604
689
|
|
|
605
690
|
|
|
606
691
|
Parameters
|
|
607
692
|
----------
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
used.
|
|
615
|
-
disk : int, default 10240
|
|
616
|
-
Disk size (in MB) required for this step. If
|
|
617
|
-
`@resources` is also present, the maximum value from all decorators is
|
|
618
|
-
used.
|
|
619
|
-
image : str, optional, default None
|
|
620
|
-
Docker image to use when launching on Kubernetes. If not specified, and
|
|
621
|
-
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
|
622
|
-
not, a default Docker image mapping to the current version of Python is used.
|
|
623
|
-
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
|
624
|
-
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
|
625
|
-
image_pull_secrets: List[str], default []
|
|
626
|
-
The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
|
|
627
|
-
Kubernetes image pull secrets to use when pulling container images
|
|
628
|
-
in Kubernetes.
|
|
629
|
-
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
|
630
|
-
Kubernetes service account to use when launching pod in Kubernetes.
|
|
631
|
-
secrets : List[str], optional, default None
|
|
632
|
-
Kubernetes secrets to use when launching pod in Kubernetes. These
|
|
633
|
-
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
|
634
|
-
in Metaflow configuration.
|
|
635
|
-
node_selector: Union[Dict[str,str], str], optional, default None
|
|
636
|
-
Kubernetes node selector(s) to apply to the pod running the task.
|
|
637
|
-
Can be passed in as a comma separated string of values e.g.
|
|
638
|
-
'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
|
|
639
|
-
{'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
|
|
640
|
-
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
|
641
|
-
Kubernetes namespace to use when launching pod in Kubernetes.
|
|
642
|
-
gpu : int, optional, default None
|
|
643
|
-
Number of GPUs required for this step. A value of zero implies that
|
|
644
|
-
the scheduled node should not have GPUs.
|
|
645
|
-
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
|
646
|
-
The vendor of the GPUs to be used for this step.
|
|
647
|
-
tolerations : List[Dict[str,str]], default []
|
|
648
|
-
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
|
649
|
-
Kubernetes tolerations to use when launching pod in Kubernetes.
|
|
650
|
-
labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
|
|
651
|
-
Kubernetes labels to use when launching pod in Kubernetes.
|
|
652
|
-
annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
|
|
653
|
-
Kubernetes annotations to use when launching pod in Kubernetes.
|
|
654
|
-
use_tmpfs : bool, default False
|
|
655
|
-
This enables an explicit tmpfs mount for this step.
|
|
656
|
-
tmpfs_tempdir : bool, default True
|
|
657
|
-
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
|
658
|
-
tmpfs_size : int, optional, default: None
|
|
659
|
-
The value for the size (in MiB) of the tmpfs mount for this step.
|
|
660
|
-
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
|
661
|
-
memory allocated for this step.
|
|
662
|
-
tmpfs_path : str, optional, default /metaflow_temp
|
|
663
|
-
Path to tmpfs mount for this step.
|
|
664
|
-
persistent_volume_claims : Dict[str, str], optional, default None
|
|
665
|
-
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
|
666
|
-
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
|
667
|
-
shared_memory: int, optional
|
|
668
|
-
Shared memory size (in MiB) required for this step
|
|
669
|
-
port: int, optional
|
|
670
|
-
Port number to specify in the Kubernetes job object
|
|
671
|
-
compute_pool : str, optional, default None
|
|
672
|
-
Compute pool to be used for for this step.
|
|
673
|
-
If not specified, any accessible compute pool within the perimeter is used.
|
|
674
|
-
hostname_resolution_timeout: int, default 10 * 60
|
|
675
|
-
Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
|
|
676
|
-
Only applicable when @parallel is used.
|
|
677
|
-
qos: str, default: Burstable
|
|
678
|
-
Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
|
|
679
|
-
|
|
680
|
-
security_context: Dict[str, Any], optional, default None
|
|
681
|
-
Container security context. Applies to the task container. Allows the following keys:
|
|
682
|
-
- privileged: bool, optional, default None
|
|
683
|
-
- allow_privilege_escalation: bool, optional, default None
|
|
684
|
-
- run_as_user: int, optional, default None
|
|
685
|
-
- run_as_group: int, optional, default None
|
|
686
|
-
- run_as_non_root: bool, optional, default None
|
|
693
|
+
var : str, optional, default None
|
|
694
|
+
Name of the artifact in which to store the caught exception.
|
|
695
|
+
If not specified, the exception is not stored.
|
|
696
|
+
print_exception : bool, default True
|
|
697
|
+
Determines whether or not the exception is printed to
|
|
698
|
+
stdout when caught.
|
|
687
699
|
"""
|
|
688
700
|
...
|
|
689
701
|
|
|
690
702
|
@typing.overload
|
|
691
|
-
def
|
|
692
|
-
"""
|
|
693
|
-
Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
694
|
-
It exists to make it easier for users to know that this decorator should only be used with
|
|
695
|
-
a Neo Cloud like Nebius.
|
|
696
|
-
"""
|
|
703
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
697
704
|
...
|
|
698
705
|
|
|
699
706
|
@typing.overload
|
|
700
|
-
def
|
|
707
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
701
708
|
...
|
|
702
709
|
|
|
703
|
-
def
|
|
710
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
|
704
711
|
"""
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
712
|
+
Specifies that the step will success under all circumstances.
|
|
713
|
+
|
|
714
|
+
The decorator will create an optional artifact, specified by `var`, which
|
|
715
|
+
contains the exception raised. You can use it to detect the presence
|
|
716
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
|
717
|
+
are missing.
|
|
718
|
+
|
|
719
|
+
|
|
720
|
+
Parameters
|
|
721
|
+
----------
|
|
722
|
+
var : str, optional, default None
|
|
723
|
+
Name of the artifact in which to store the caught exception.
|
|
724
|
+
If not specified, the exception is not stored.
|
|
725
|
+
print_exception : bool, default True
|
|
726
|
+
Determines whether or not the exception is printed to
|
|
727
|
+
stdout when caught.
|
|
708
728
|
"""
|
|
709
729
|
...
|
|
710
730
|
|
|
@@ -768,53 +788,41 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
|
768
788
|
...
|
|
769
789
|
|
|
770
790
|
@typing.overload
|
|
771
|
-
def
|
|
791
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
772
792
|
"""
|
|
773
|
-
Specifies
|
|
774
|
-
|
|
775
|
-
The decorator will create an optional artifact, specified by `var`, which
|
|
776
|
-
contains the exception raised. You can use it to detect the presence
|
|
777
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
|
778
|
-
are missing.
|
|
793
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
|
794
|
+
the execution of a step.
|
|
779
795
|
|
|
780
796
|
|
|
781
797
|
Parameters
|
|
782
798
|
----------
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
Determines whether or not the exception is printed to
|
|
788
|
-
stdout when caught.
|
|
799
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
800
|
+
List of secret specs, defining how the secrets are to be retrieved
|
|
801
|
+
role : str, optional, default: None
|
|
802
|
+
Role to use for fetching secrets
|
|
789
803
|
"""
|
|
790
804
|
...
|
|
791
805
|
|
|
792
806
|
@typing.overload
|
|
793
|
-
def
|
|
807
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
794
808
|
...
|
|
795
809
|
|
|
796
810
|
@typing.overload
|
|
797
|
-
def
|
|
811
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
798
812
|
...
|
|
799
813
|
|
|
800
|
-
def
|
|
814
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
|
|
801
815
|
"""
|
|
802
|
-
Specifies
|
|
803
|
-
|
|
804
|
-
The decorator will create an optional artifact, specified by `var`, which
|
|
805
|
-
contains the exception raised. You can use it to detect the presence
|
|
806
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
|
807
|
-
are missing.
|
|
816
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
|
817
|
+
the execution of a step.
|
|
808
818
|
|
|
809
819
|
|
|
810
820
|
Parameters
|
|
811
821
|
----------
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
Determines whether or not the exception is printed to
|
|
817
|
-
stdout when caught.
|
|
822
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
823
|
+
List of secret specs, defining how the secrets are to be retrieved
|
|
824
|
+
role : str, optional, default: None
|
|
825
|
+
Role to use for fetching secrets
|
|
818
826
|
"""
|
|
819
827
|
...
|
|
820
828
|
|
|
@@ -861,52 +869,40 @@ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy:
|
|
|
861
869
|
"""
|
|
862
870
|
...
|
|
863
871
|
|
|
864
|
-
|
|
865
|
-
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
872
|
+
def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
866
873
|
"""
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
Note that you may add multiple `@card` decorators in a step with different parameters.
|
|
874
|
+
Specifies that this step should execute on DGX cloud.
|
|
870
875
|
|
|
871
876
|
|
|
872
877
|
Parameters
|
|
873
878
|
----------
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
timeout : int, default 45
|
|
881
|
-
Interrupt reporting if it takes more than this many seconds.
|
|
879
|
+
gpu : int
|
|
880
|
+
Number of GPUs to use.
|
|
881
|
+
gpu_type : str
|
|
882
|
+
Type of Nvidia GPU to use.
|
|
883
|
+
queue_timeout : int
|
|
884
|
+
Time to keep the job in NVCF's queue.
|
|
882
885
|
"""
|
|
883
886
|
...
|
|
884
887
|
|
|
885
888
|
@typing.overload
|
|
886
|
-
def
|
|
889
|
+
def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
890
|
+
"""
|
|
891
|
+
Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
892
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
893
|
+
a Neo Cloud like Nebius.
|
|
894
|
+
"""
|
|
887
895
|
...
|
|
888
896
|
|
|
889
897
|
@typing.overload
|
|
890
|
-
def
|
|
898
|
+
def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
891
899
|
...
|
|
892
900
|
|
|
893
|
-
def
|
|
901
|
+
def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
894
902
|
"""
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
Parameters
|
|
901
|
-
----------
|
|
902
|
-
type : str, default 'default'
|
|
903
|
-
Card type.
|
|
904
|
-
id : str, optional, default None
|
|
905
|
-
If multiple cards are present, use this id to identify this card.
|
|
906
|
-
options : Dict[str, Any], default {}
|
|
907
|
-
Options passed to the card. The contents depend on the card type.
|
|
908
|
-
timeout : int, default 45
|
|
909
|
-
Interrupt reporting if it takes more than this many seconds.
|
|
903
|
+
Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
|
|
904
|
+
It exists to make it easier for users to know that this decorator should only be used with
|
|
905
|
+
a Neo Cloud like Nebius.
|
|
910
906
|
"""
|
|
911
907
|
...
|
|
912
908
|
|
|
@@ -951,57 +947,145 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
|
951
947
|
...
|
|
952
948
|
|
|
953
949
|
@typing.overload
|
|
954
|
-
def
|
|
950
|
+
def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
955
951
|
"""
|
|
956
|
-
|
|
957
|
-
to a step needs to be retried.
|
|
952
|
+
Enables loading / saving of models within a step.
|
|
958
953
|
|
|
959
|
-
|
|
960
|
-
|
|
961
|
-
|
|
954
|
+
> Examples
|
|
955
|
+
- Saving Models
|
|
956
|
+
```python
|
|
957
|
+
@model
|
|
958
|
+
@step
|
|
959
|
+
def train(self):
|
|
960
|
+
# current.model.save returns a dictionary reference to the model saved
|
|
961
|
+
self.my_model = current.model.save(
|
|
962
|
+
path_to_my_model,
|
|
963
|
+
label="my_model",
|
|
964
|
+
metadata={
|
|
965
|
+
"epochs": 10,
|
|
966
|
+
"batch-size": 32,
|
|
967
|
+
"learning-rate": 0.001,
|
|
968
|
+
}
|
|
969
|
+
)
|
|
970
|
+
self.next(self.test)
|
|
962
971
|
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
972
|
+
@model(load="my_model")
|
|
973
|
+
@step
|
|
974
|
+
def test(self):
|
|
975
|
+
# `current.model.loaded` returns a dictionary of the loaded models
|
|
976
|
+
# where the key is the name of the artifact and the value is the path to the model
|
|
977
|
+
print(os.listdir(current.model.loaded["my_model"]))
|
|
978
|
+
self.next(self.end)
|
|
979
|
+
```
|
|
980
|
+
|
|
981
|
+
- Loading models
|
|
982
|
+
```python
|
|
983
|
+
@step
|
|
984
|
+
def train(self):
|
|
985
|
+
# current.model.load returns the path to the model loaded
|
|
986
|
+
checkpoint_path = current.model.load(
|
|
987
|
+
self.checkpoint_key,
|
|
988
|
+
)
|
|
989
|
+
model_path = current.model.load(
|
|
990
|
+
self.model,
|
|
991
|
+
)
|
|
992
|
+
self.next(self.test)
|
|
993
|
+
```
|
|
966
994
|
|
|
967
995
|
|
|
968
996
|
Parameters
|
|
969
997
|
----------
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
998
|
+
load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
|
|
999
|
+
Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
|
|
1000
|
+
These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
|
|
1001
|
+
If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
|
|
1002
|
+
the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
|
|
1003
|
+
If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
|
|
1004
|
+
|
|
1005
|
+
temp_dir_root : str, default: None
|
|
1006
|
+
The root directory under which `current.model.loaded` will store loaded models
|
|
974
1007
|
"""
|
|
975
1008
|
...
|
|
976
1009
|
|
|
977
1010
|
@typing.overload
|
|
978
|
-
def
|
|
1011
|
+
def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
979
1012
|
...
|
|
980
1013
|
|
|
981
1014
|
@typing.overload
|
|
982
|
-
def
|
|
1015
|
+
def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
983
1016
|
...
|
|
984
1017
|
|
|
985
|
-
def
|
|
1018
|
+
def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
|
|
986
1019
|
"""
|
|
987
|
-
|
|
988
|
-
to a step needs to be retried.
|
|
1020
|
+
Enables loading / saving of models within a step.
|
|
989
1021
|
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
1022
|
+
> Examples
|
|
1023
|
+
- Saving Models
|
|
1024
|
+
```python
|
|
1025
|
+
@model
|
|
1026
|
+
@step
|
|
1027
|
+
def train(self):
|
|
1028
|
+
# current.model.save returns a dictionary reference to the model saved
|
|
1029
|
+
self.my_model = current.model.save(
|
|
1030
|
+
path_to_my_model,
|
|
1031
|
+
label="my_model",
|
|
1032
|
+
metadata={
|
|
1033
|
+
"epochs": 10,
|
|
1034
|
+
"batch-size": 32,
|
|
1035
|
+
"learning-rate": 0.001,
|
|
1036
|
+
}
|
|
1037
|
+
)
|
|
1038
|
+
self.next(self.test)
|
|
993
1039
|
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
1040
|
+
@model(load="my_model")
|
|
1041
|
+
@step
|
|
1042
|
+
def test(self):
|
|
1043
|
+
# `current.model.loaded` returns a dictionary of the loaded models
|
|
1044
|
+
# where the key is the name of the artifact and the value is the path to the model
|
|
1045
|
+
print(os.listdir(current.model.loaded["my_model"]))
|
|
1046
|
+
self.next(self.end)
|
|
1047
|
+
```
|
|
1048
|
+
|
|
1049
|
+
- Loading models
|
|
1050
|
+
```python
|
|
1051
|
+
@step
|
|
1052
|
+
def train(self):
|
|
1053
|
+
# current.model.load returns the path to the model loaded
|
|
1054
|
+
checkpoint_path = current.model.load(
|
|
1055
|
+
self.checkpoint_key,
|
|
1056
|
+
)
|
|
1057
|
+
model_path = current.model.load(
|
|
1058
|
+
self.model,
|
|
1059
|
+
)
|
|
1060
|
+
self.next(self.test)
|
|
1061
|
+
```
|
|
997
1062
|
|
|
998
1063
|
|
|
999
1064
|
Parameters
|
|
1000
1065
|
----------
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1066
|
+
load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
|
|
1067
|
+
Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
|
|
1068
|
+
These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
|
|
1069
|
+
If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
|
|
1070
|
+
the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
|
|
1071
|
+
If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
|
|
1072
|
+
|
|
1073
|
+
temp_dir_root : str, default: None
|
|
1074
|
+
The root directory under which `current.model.loaded` will store loaded models
|
|
1075
|
+
"""
|
|
1076
|
+
...
|
|
1077
|
+
|
|
1078
|
+
def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1079
|
+
"""
|
|
1080
|
+
Specifies that this step should execute on DGX cloud.
|
|
1081
|
+
|
|
1082
|
+
|
|
1083
|
+
Parameters
|
|
1084
|
+
----------
|
|
1085
|
+
gpu : int
|
|
1086
|
+
Number of GPUs to use.
|
|
1087
|
+
gpu_type : str
|
|
1088
|
+
Type of Nvidia GPU to use.
|
|
1005
1089
|
"""
|
|
1006
1090
|
...
|
|
1007
1091
|
|
|
@@ -1065,207 +1149,123 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
|
1065
1149
|
...
|
|
1066
1150
|
|
|
1067
1151
|
@typing.overload
|
|
1068
|
-
def
|
|
1152
|
+
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1069
1153
|
"""
|
|
1070
|
-
|
|
1071
|
-
the execution of a step.
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
Parameters
|
|
1075
|
-
----------
|
|
1076
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
1077
|
-
List of secret specs, defining how the secrets are to be retrieved
|
|
1078
|
-
role : str, optional, default: None
|
|
1079
|
-
Role to use for fetching secrets
|
|
1154
|
+
Internal decorator to support Fast bakery
|
|
1080
1155
|
"""
|
|
1081
1156
|
...
|
|
1082
1157
|
|
|
1083
1158
|
@typing.overload
|
|
1084
|
-
def
|
|
1159
|
+
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1085
1160
|
...
|
|
1086
1161
|
|
|
1087
|
-
|
|
1088
|
-
|
|
1162
|
+
def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
|
1163
|
+
"""
|
|
1164
|
+
Internal decorator to support Fast bakery
|
|
1165
|
+
"""
|
|
1089
1166
|
...
|
|
1090
1167
|
|
|
1091
|
-
|
|
1168
|
+
@typing.overload
|
|
1169
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1092
1170
|
"""
|
|
1093
|
-
Specifies
|
|
1094
|
-
|
|
1171
|
+
Specifies the number of times the task corresponding
|
|
1172
|
+
to a step needs to be retried.
|
|
1173
|
+
|
|
1174
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
|
1175
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
|
1176
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
|
1177
|
+
|
|
1178
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
|
1179
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
|
1180
|
+
ensuring that the flow execution can continue.
|
|
1095
1181
|
|
|
1096
1182
|
|
|
1097
1183
|
Parameters
|
|
1098
1184
|
----------
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1185
|
+
times : int, default 3
|
|
1186
|
+
Number of times to retry this task.
|
|
1187
|
+
minutes_between_retries : int, default 2
|
|
1188
|
+
Number of minutes between retries.
|
|
1103
1189
|
"""
|
|
1104
1190
|
...
|
|
1105
1191
|
|
|
1106
1192
|
@typing.overload
|
|
1107
|
-
def
|
|
1108
|
-
"""
|
|
1109
|
-
A simple decorator that demonstrates using CardDecoratorInjector
|
|
1110
|
-
to inject a card and render simple markdown content.
|
|
1111
|
-
"""
|
|
1193
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1112
1194
|
...
|
|
1113
1195
|
|
|
1114
1196
|
@typing.overload
|
|
1115
|
-
def
|
|
1197
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1116
1198
|
...
|
|
1117
1199
|
|
|
1118
|
-
def
|
|
1200
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
|
1119
1201
|
"""
|
|
1120
|
-
|
|
1121
|
-
to
|
|
1202
|
+
Specifies the number of times the task corresponding
|
|
1203
|
+
to a step needs to be retried.
|
|
1204
|
+
|
|
1205
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
|
1206
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
|
1207
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
|
1208
|
+
|
|
1209
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
|
1210
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
|
1211
|
+
ensuring that the flow execution can continue.
|
|
1212
|
+
|
|
1213
|
+
|
|
1214
|
+
Parameters
|
|
1215
|
+
----------
|
|
1216
|
+
times : int, default 3
|
|
1217
|
+
Number of times to retry this task.
|
|
1218
|
+
minutes_between_retries : int, default 2
|
|
1219
|
+
Number of minutes between retries.
|
|
1122
1220
|
"""
|
|
1123
1221
|
...
|
|
1124
1222
|
|
|
1125
1223
|
@typing.overload
|
|
1126
|
-
def
|
|
1224
|
+
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
|
1127
1225
|
"""
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
> Examples
|
|
1131
|
-
|
|
1132
|
-
- Saving Checkpoints
|
|
1133
|
-
|
|
1134
|
-
```python
|
|
1135
|
-
@checkpoint
|
|
1136
|
-
@step
|
|
1137
|
-
def train(self):
|
|
1138
|
-
model = create_model(self.parameters, checkpoint_path = None)
|
|
1139
|
-
for i in range(self.epochs):
|
|
1140
|
-
# some training logic
|
|
1141
|
-
loss = model.train(self.dataset)
|
|
1142
|
-
if i % 10 == 0:
|
|
1143
|
-
model.save(
|
|
1144
|
-
current.checkpoint.directory,
|
|
1145
|
-
)
|
|
1146
|
-
# saves the contents of the `current.checkpoint.directory` as a checkpoint
|
|
1147
|
-
# and returns a reference dictionary to the checkpoint saved in the datastore
|
|
1148
|
-
self.latest_checkpoint = current.checkpoint.save(
|
|
1149
|
-
name="epoch_checkpoint",
|
|
1150
|
-
metadata={
|
|
1151
|
-
"epoch": i,
|
|
1152
|
-
"loss": loss,
|
|
1153
|
-
}
|
|
1154
|
-
)
|
|
1155
|
-
```
|
|
1156
|
-
|
|
1157
|
-
- Using Loaded Checkpoints
|
|
1158
|
-
|
|
1159
|
-
```python
|
|
1160
|
-
@retry(times=3)
|
|
1161
|
-
@checkpoint
|
|
1162
|
-
@step
|
|
1163
|
-
def train(self):
|
|
1164
|
-
# Assume that the task has restarted and the previous attempt of the task
|
|
1165
|
-
# saved a checkpoint
|
|
1166
|
-
checkpoint_path = None
|
|
1167
|
-
if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
|
|
1168
|
-
print("Loaded checkpoint from the previous attempt")
|
|
1169
|
-
checkpoint_path = current.checkpoint.directory
|
|
1226
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
|
1170
1227
|
|
|
1171
|
-
|
|
1172
|
-
for i in range(self.epochs):
|
|
1173
|
-
...
|
|
1174
|
-
```
|
|
1228
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
|
1175
1229
|
|
|
1176
1230
|
|
|
1177
1231
|
Parameters
|
|
1178
1232
|
----------
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
1187
|
-
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
|
1188
|
-
created within the task will be loaded when the task is retries execution on failure.
|
|
1189
|
-
|
|
1190
|
-
temp_dir_root : str, default: None
|
|
1191
|
-
The root directory under which `current.checkpoint.directory` will be created.
|
|
1233
|
+
type : str, default 'default'
|
|
1234
|
+
Card type.
|
|
1235
|
+
id : str, optional, default None
|
|
1236
|
+
If multiple cards are present, use this id to identify this card.
|
|
1237
|
+
options : Dict[str, Any], default {}
|
|
1238
|
+
Options passed to the card. The contents depend on the card type.
|
|
1239
|
+
timeout : int, default 45
|
|
1240
|
+
Interrupt reporting if it takes more than this many seconds.
|
|
1192
1241
|
"""
|
|
1193
1242
|
...
|
|
1194
1243
|
|
|
1195
1244
|
@typing.overload
|
|
1196
|
-
def
|
|
1245
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
1197
1246
|
...
|
|
1198
1247
|
|
|
1199
1248
|
@typing.overload
|
|
1200
|
-
def
|
|
1249
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
1201
1250
|
...
|
|
1202
1251
|
|
|
1203
|
-
def
|
|
1252
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
|
1204
1253
|
"""
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
> Examples
|
|
1208
|
-
|
|
1209
|
-
- Saving Checkpoints
|
|
1254
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
|
1210
1255
|
|
|
1211
|
-
|
|
1212
|
-
@checkpoint
|
|
1213
|
-
@step
|
|
1214
|
-
def train(self):
|
|
1215
|
-
model = create_model(self.parameters, checkpoint_path = None)
|
|
1216
|
-
for i in range(self.epochs):
|
|
1217
|
-
# some training logic
|
|
1218
|
-
loss = model.train(self.dataset)
|
|
1219
|
-
if i % 10 == 0:
|
|
1220
|
-
model.save(
|
|
1221
|
-
current.checkpoint.directory,
|
|
1222
|
-
)
|
|
1223
|
-
# saves the contents of the `current.checkpoint.directory` as a checkpoint
|
|
1224
|
-
# and returns a reference dictionary to the checkpoint saved in the datastore
|
|
1225
|
-
self.latest_checkpoint = current.checkpoint.save(
|
|
1226
|
-
name="epoch_checkpoint",
|
|
1227
|
-
metadata={
|
|
1228
|
-
"epoch": i,
|
|
1229
|
-
"loss": loss,
|
|
1230
|
-
}
|
|
1231
|
-
)
|
|
1232
|
-
```
|
|
1233
|
-
|
|
1234
|
-
- Using Loaded Checkpoints
|
|
1235
|
-
|
|
1236
|
-
```python
|
|
1237
|
-
@retry(times=3)
|
|
1238
|
-
@checkpoint
|
|
1239
|
-
@step
|
|
1240
|
-
def train(self):
|
|
1241
|
-
# Assume that the task has restarted and the previous attempt of the task
|
|
1242
|
-
# saved a checkpoint
|
|
1243
|
-
checkpoint_path = None
|
|
1244
|
-
if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
|
|
1245
|
-
print("Loaded checkpoint from the previous attempt")
|
|
1246
|
-
checkpoint_path = current.checkpoint.directory
|
|
1247
|
-
|
|
1248
|
-
model = create_model(self.parameters, checkpoint_path = checkpoint_path)
|
|
1249
|
-
for i in range(self.epochs):
|
|
1250
|
-
...
|
|
1251
|
-
```
|
|
1256
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
|
1252
1257
|
|
|
1253
1258
|
|
|
1254
1259
|
Parameters
|
|
1255
1260
|
----------
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
|
|
1265
|
-
created within the task will be loaded when the task is retries execution on failure.
|
|
1266
|
-
|
|
1267
|
-
temp_dir_root : str, default: None
|
|
1268
|
-
The root directory under which `current.checkpoint.directory` will be created.
|
|
1261
|
+
type : str, default 'default'
|
|
1262
|
+
Card type.
|
|
1263
|
+
id : str, optional, default None
|
|
1264
|
+
If multiple cards are present, use this id to identify this card.
|
|
1265
|
+
options : Dict[str, Any], default {}
|
|
1266
|
+
Options passed to the card. The contents depend on the card type.
|
|
1267
|
+
timeout : int, default 45
|
|
1268
|
+
Interrupt reporting if it takes more than this many seconds.
|
|
1269
1269
|
"""
|
|
1270
1270
|
...
|
|
1271
1271
|
|
|
@@ -1348,104 +1348,89 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
|
|
|
1348
1348
|
"""
|
|
1349
1349
|
...
|
|
1350
1350
|
|
|
1351
|
-
|
|
1352
|
-
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1351
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1353
1352
|
"""
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
or
|
|
1360
|
-
```
|
|
1361
|
-
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
|
1362
|
-
```
|
|
1363
|
-
This decorator respects the @project decorator and triggers the flow
|
|
1364
|
-
when upstream runs within the same namespace complete successfully
|
|
1365
|
-
|
|
1366
|
-
Additionally, you can specify project aware upstream flow dependencies
|
|
1367
|
-
by specifying the fully qualified project_flow_name.
|
|
1368
|
-
```
|
|
1369
|
-
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
|
1370
|
-
```
|
|
1371
|
-
or
|
|
1372
|
-
```
|
|
1373
|
-
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
|
1374
|
-
```
|
|
1375
|
-
|
|
1376
|
-
You can also specify just the project or project branch (other values will be
|
|
1377
|
-
inferred from the current project or project branch):
|
|
1378
|
-
```
|
|
1379
|
-
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
|
1380
|
-
```
|
|
1381
|
-
|
|
1382
|
-
Note that `branch` is typically one of:
|
|
1383
|
-
- `prod`
|
|
1384
|
-
- `user.bob`
|
|
1385
|
-
- `test.my_experiment`
|
|
1386
|
-
- `prod.staging`
|
|
1353
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
|
1354
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
|
1355
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
|
1356
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
|
1357
|
+
starts only after all sensors finish.
|
|
1387
1358
|
|
|
1388
1359
|
|
|
1389
1360
|
Parameters
|
|
1390
1361
|
----------
|
|
1391
|
-
|
|
1392
|
-
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
|
|
1362
|
+
timeout : int
|
|
1363
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
|
1364
|
+
poke_interval : int
|
|
1365
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
|
1366
|
+
mode : str
|
|
1367
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
|
1368
|
+
exponential_backoff : bool
|
|
1369
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
|
1370
|
+
pool : str
|
|
1371
|
+
the slot pool this task should run in,
|
|
1372
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
|
1373
|
+
soft_fail : bool
|
|
1374
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
|
1375
|
+
name : str
|
|
1376
|
+
Name of the sensor on Airflow
|
|
1377
|
+
description : str
|
|
1378
|
+
Description of sensor in the Airflow UI
|
|
1379
|
+
bucket_key : Union[str, List[str]]
|
|
1380
|
+
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
|
1381
|
+
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
|
1382
|
+
bucket_name : str
|
|
1383
|
+
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
|
1384
|
+
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
|
1385
|
+
wildcard_match : bool
|
|
1386
|
+
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
|
1387
|
+
aws_conn_id : str
|
|
1388
|
+
a reference to the s3 connection on Airflow. (Default: None)
|
|
1389
|
+
verify : bool
|
|
1390
|
+
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
|
1397
1391
|
"""
|
|
1398
1392
|
...
|
|
1399
1393
|
|
|
1400
|
-
|
|
1401
|
-
def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1402
|
-
...
|
|
1403
|
-
|
|
1404
|
-
def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
|
1394
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1405
1395
|
"""
|
|
1406
|
-
|
|
1407
|
-
|
|
1408
|
-
```
|
|
1409
|
-
@trigger_on_finish(flow='FooFlow')
|
|
1410
|
-
```
|
|
1411
|
-
or
|
|
1412
|
-
```
|
|
1413
|
-
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
|
1414
|
-
```
|
|
1415
|
-
This decorator respects the @project decorator and triggers the flow
|
|
1416
|
-
when upstream runs within the same namespace complete successfully
|
|
1417
|
-
|
|
1418
|
-
Additionally, you can specify project aware upstream flow dependencies
|
|
1419
|
-
by specifying the fully qualified project_flow_name.
|
|
1420
|
-
```
|
|
1421
|
-
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
|
1422
|
-
```
|
|
1423
|
-
or
|
|
1424
|
-
```
|
|
1425
|
-
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
|
1426
|
-
```
|
|
1427
|
-
|
|
1428
|
-
You can also specify just the project or project branch (other values will be
|
|
1429
|
-
inferred from the current project or project branch):
|
|
1430
|
-
```
|
|
1431
|
-
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
|
1432
|
-
```
|
|
1433
|
-
|
|
1434
|
-
Note that `branch` is typically one of:
|
|
1435
|
-
- `prod`
|
|
1436
|
-
- `user.bob`
|
|
1437
|
-
- `test.my_experiment`
|
|
1438
|
-
- `prod.staging`
|
|
1396
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
|
1397
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
|
1439
1398
|
|
|
1440
1399
|
|
|
1441
1400
|
Parameters
|
|
1442
1401
|
----------
|
|
1443
|
-
|
|
1444
|
-
|
|
1445
|
-
|
|
1446
|
-
|
|
1447
|
-
|
|
1448
|
-
|
|
1402
|
+
timeout : int
|
|
1403
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
|
1404
|
+
poke_interval : int
|
|
1405
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
|
1406
|
+
mode : str
|
|
1407
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
|
1408
|
+
exponential_backoff : bool
|
|
1409
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
|
1410
|
+
pool : str
|
|
1411
|
+
the slot pool this task should run in,
|
|
1412
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
|
1413
|
+
soft_fail : bool
|
|
1414
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
|
1415
|
+
name : str
|
|
1416
|
+
Name of the sensor on Airflow
|
|
1417
|
+
description : str
|
|
1418
|
+
Description of sensor in the Airflow UI
|
|
1419
|
+
external_dag_id : str
|
|
1420
|
+
The dag_id that contains the task you want to wait for.
|
|
1421
|
+
external_task_ids : List[str]
|
|
1422
|
+
The list of task_ids that you want to wait for.
|
|
1423
|
+
If None (default value) the sensor waits for the DAG. (Default: None)
|
|
1424
|
+
allowed_states : List[str]
|
|
1425
|
+
Iterable of allowed states, (Default: ['success'])
|
|
1426
|
+
failed_states : List[str]
|
|
1427
|
+
Iterable of failed or dis-allowed states. (Default: None)
|
|
1428
|
+
execution_delta : datetime.timedelta
|
|
1429
|
+
time difference with the previous execution to look at,
|
|
1430
|
+
the default is the same logical date as the current task or DAG. (Default: None)
|
|
1431
|
+
check_existence: bool
|
|
1432
|
+
Set to True to check if the external task exists or check if
|
|
1433
|
+
the DAG to wait for exists. (Default: True)
|
|
1449
1434
|
"""
|
|
1450
1435
|
...
|
|
1451
1436
|
|
|
@@ -1542,53 +1527,180 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
|
|
|
1542
1527
|
"""
|
|
1543
1528
|
...
|
|
1544
1529
|
|
|
1545
|
-
|
|
1530
|
+
@typing.overload
|
|
1531
|
+
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1546
1532
|
"""
|
|
1547
|
-
|
|
1548
|
-
`@checkpoint`/`@model`/`@huggingface_hub` decorators.
|
|
1533
|
+
Specifies the Conda environment for all steps of the flow.
|
|
1549
1534
|
|
|
1550
|
-
|
|
1551
|
-
|
|
1535
|
+
Use `@conda_base` to set common libraries required by all
|
|
1536
|
+
steps and use `@conda` to specify step-specific additions.
|
|
1552
1537
|
|
|
1553
|
-
1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
|
|
1554
|
-
2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
|
|
1555
|
-
- Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
|
|
1556
|
-
3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
|
|
1557
|
-
- Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
|
|
1558
1538
|
|
|
1559
|
-
|
|
1539
|
+
Parameters
|
|
1560
1540
|
----------
|
|
1541
|
+
packages : Dict[str, str], default {}
|
|
1542
|
+
Packages to use for this flow. The key is the name of the package
|
|
1543
|
+
and the value is the version to use.
|
|
1544
|
+
libraries : Dict[str, str], default {}
|
|
1545
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
1546
|
+
python : str, optional, default None
|
|
1547
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1548
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1549
|
+
disabled : bool, default False
|
|
1550
|
+
If set to True, disables Conda.
|
|
1551
|
+
"""
|
|
1552
|
+
...
|
|
1553
|
+
|
|
1554
|
+
@typing.overload
|
|
1555
|
+
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1556
|
+
...
|
|
1557
|
+
|
|
1558
|
+
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
|
1559
|
+
"""
|
|
1560
|
+
Specifies the Conda environment for all steps of the flow.
|
|
1561
1561
|
|
|
1562
|
-
|
|
1563
|
-
|
|
1564
|
-
```python
|
|
1565
|
-
@with_artifact_store(
|
|
1566
|
-
type="s3",
|
|
1567
|
-
config=lambda: {
|
|
1568
|
-
"root": "s3://my-bucket-foo/path/to/root",
|
|
1569
|
-
"role_arn": ROLE,
|
|
1570
|
-
},
|
|
1571
|
-
)
|
|
1572
|
-
class MyFlow(FlowSpec):
|
|
1573
|
-
|
|
1574
|
-
@checkpoint
|
|
1575
|
-
@step
|
|
1576
|
-
def start(self):
|
|
1577
|
-
with open("my_file.txt", "w") as f:
|
|
1578
|
-
f.write("Hello, World!")
|
|
1579
|
-
self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
|
|
1580
|
-
self.next(self.end)
|
|
1581
|
-
|
|
1582
|
-
```
|
|
1562
|
+
Use `@conda_base` to set common libraries required by all
|
|
1563
|
+
steps and use `@conda` to specify step-specific additions.
|
|
1583
1564
|
|
|
1584
|
-
- Using credentials to access the s3-compatible datastore.
|
|
1585
1565
|
|
|
1586
|
-
|
|
1587
|
-
|
|
1588
|
-
|
|
1589
|
-
|
|
1590
|
-
|
|
1591
|
-
|
|
1566
|
+
Parameters
|
|
1567
|
+
----------
|
|
1568
|
+
packages : Dict[str, str], default {}
|
|
1569
|
+
Packages to use for this flow. The key is the name of the package
|
|
1570
|
+
and the value is the version to use.
|
|
1571
|
+
libraries : Dict[str, str], default {}
|
|
1572
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
1573
|
+
python : str, optional, default None
|
|
1574
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1575
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1576
|
+
disabled : bool, default False
|
|
1577
|
+
If set to True, disables Conda.
|
|
1578
|
+
"""
|
|
1579
|
+
...
|
|
1580
|
+
|
|
1581
|
+
def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1582
|
+
"""
|
|
1583
|
+
Specifies what flows belong to the same project.
|
|
1584
|
+
|
|
1585
|
+
A project-specific namespace is created for all flows that
|
|
1586
|
+
use the same `@project(name)`.
|
|
1587
|
+
|
|
1588
|
+
|
|
1589
|
+
Parameters
|
|
1590
|
+
----------
|
|
1591
|
+
name : str
|
|
1592
|
+
Project name. Make sure that the name is unique amongst all
|
|
1593
|
+
projects that use the same production scheduler. The name may
|
|
1594
|
+
contain only lowercase alphanumeric characters and underscores.
|
|
1595
|
+
|
|
1596
|
+
branch : Optional[str], default None
|
|
1597
|
+
The branch to use. If not specified, the branch is set to
|
|
1598
|
+
`user.<username>` unless `production` is set to `True`. This can
|
|
1599
|
+
also be set on the command line using `--branch` as a top-level option.
|
|
1600
|
+
It is an error to specify `branch` in the decorator and on the command line.
|
|
1601
|
+
|
|
1602
|
+
production : bool, default False
|
|
1603
|
+
Whether or not the branch is the production branch. This can also be set on the
|
|
1604
|
+
command line using `--production` as a top-level option. It is an error to specify
|
|
1605
|
+
`production` in the decorator and on the command line.
|
|
1606
|
+
The project branch name will be:
|
|
1607
|
+
- if `branch` is specified:
|
|
1608
|
+
- if `production` is True: `prod.<branch>`
|
|
1609
|
+
- if `production` is False: `test.<branch>`
|
|
1610
|
+
- if `branch` is not specified:
|
|
1611
|
+
- if `production` is True: `prod`
|
|
1612
|
+
- if `production` is False: `user.<username>`
|
|
1613
|
+
"""
|
|
1614
|
+
...
|
|
1615
|
+
|
|
1616
|
+
@typing.overload
|
|
1617
|
+
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1618
|
+
"""
|
|
1619
|
+
Specifies the PyPI packages for all steps of the flow.
|
|
1620
|
+
|
|
1621
|
+
Use `@pypi_base` to set common packages required by all
|
|
1622
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
1623
|
+
|
|
1624
|
+
Parameters
|
|
1625
|
+
----------
|
|
1626
|
+
packages : Dict[str, str], default: {}
|
|
1627
|
+
Packages to use for this flow. The key is the name of the package
|
|
1628
|
+
and the value is the version to use.
|
|
1629
|
+
python : str, optional, default: None
|
|
1630
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1631
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1632
|
+
"""
|
|
1633
|
+
...
|
|
1634
|
+
|
|
1635
|
+
@typing.overload
|
|
1636
|
+
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1637
|
+
...
|
|
1638
|
+
|
|
1639
|
+
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
|
1640
|
+
"""
|
|
1641
|
+
Specifies the PyPI packages for all steps of the flow.
|
|
1642
|
+
|
|
1643
|
+
Use `@pypi_base` to set common packages required by all
|
|
1644
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
1645
|
+
|
|
1646
|
+
Parameters
|
|
1647
|
+
----------
|
|
1648
|
+
packages : Dict[str, str], default: {}
|
|
1649
|
+
Packages to use for this flow. The key is the name of the package
|
|
1650
|
+
and the value is the version to use.
|
|
1651
|
+
python : str, optional, default: None
|
|
1652
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1653
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1654
|
+
"""
|
|
1655
|
+
...
|
|
1656
|
+
|
|
1657
|
+
def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
|
|
1658
|
+
"""
|
|
1659
|
+
Allows setting external datastores to save data for the
|
|
1660
|
+
`@checkpoint`/`@model`/`@huggingface_hub` decorators.
|
|
1661
|
+
|
|
1662
|
+
This decorator is useful when users wish to save data to a different datastore
|
|
1663
|
+
than what is configured in Metaflow. This can be for variety of reasons:
|
|
1664
|
+
|
|
1665
|
+
1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
|
|
1666
|
+
2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
|
|
1667
|
+
- Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
|
|
1668
|
+
3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
|
|
1669
|
+
- Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
|
|
1670
|
+
|
|
1671
|
+
Usage:
|
|
1672
|
+
----------
|
|
1673
|
+
|
|
1674
|
+
- Using a custom IAM role to access the datastore.
|
|
1675
|
+
|
|
1676
|
+
```python
|
|
1677
|
+
@with_artifact_store(
|
|
1678
|
+
type="s3",
|
|
1679
|
+
config=lambda: {
|
|
1680
|
+
"root": "s3://my-bucket-foo/path/to/root",
|
|
1681
|
+
"role_arn": ROLE,
|
|
1682
|
+
},
|
|
1683
|
+
)
|
|
1684
|
+
class MyFlow(FlowSpec):
|
|
1685
|
+
|
|
1686
|
+
@checkpoint
|
|
1687
|
+
@step
|
|
1688
|
+
def start(self):
|
|
1689
|
+
with open("my_file.txt", "w") as f:
|
|
1690
|
+
f.write("Hello, World!")
|
|
1691
|
+
self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
|
|
1692
|
+
self.next(self.end)
|
|
1693
|
+
|
|
1694
|
+
```
|
|
1695
|
+
|
|
1696
|
+
- Using credentials to access the s3-compatible datastore.
|
|
1697
|
+
|
|
1698
|
+
```python
|
|
1699
|
+
@with_artifact_store(
|
|
1700
|
+
type="s3",
|
|
1701
|
+
config=lambda: {
|
|
1702
|
+
"root": "s3://my-bucket-foo/path/to/root",
|
|
1703
|
+
"client_params": {
|
|
1592
1704
|
"aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
|
|
1593
1705
|
"aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
|
|
1594
1706
|
},
|
|
@@ -1657,43 +1769,103 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
|
|
|
1657
1769
|
...
|
|
1658
1770
|
|
|
1659
1771
|
@typing.overload
|
|
1660
|
-
def
|
|
1772
|
+
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1661
1773
|
"""
|
|
1662
|
-
Specifies the
|
|
1774
|
+
Specifies the flow(s) that this flow depends on.
|
|
1775
|
+
|
|
1776
|
+
```
|
|
1777
|
+
@trigger_on_finish(flow='FooFlow')
|
|
1778
|
+
```
|
|
1779
|
+
or
|
|
1780
|
+
```
|
|
1781
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
|
1782
|
+
```
|
|
1783
|
+
This decorator respects the @project decorator and triggers the flow
|
|
1784
|
+
when upstream runs within the same namespace complete successfully
|
|
1785
|
+
|
|
1786
|
+
Additionally, you can specify project aware upstream flow dependencies
|
|
1787
|
+
by specifying the fully qualified project_flow_name.
|
|
1788
|
+
```
|
|
1789
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
|
1790
|
+
```
|
|
1791
|
+
or
|
|
1792
|
+
```
|
|
1793
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
|
1794
|
+
```
|
|
1795
|
+
|
|
1796
|
+
You can also specify just the project or project branch (other values will be
|
|
1797
|
+
inferred from the current project or project branch):
|
|
1798
|
+
```
|
|
1799
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
|
1800
|
+
```
|
|
1801
|
+
|
|
1802
|
+
Note that `branch` is typically one of:
|
|
1803
|
+
- `prod`
|
|
1804
|
+
- `user.bob`
|
|
1805
|
+
- `test.my_experiment`
|
|
1806
|
+
- `prod.staging`
|
|
1663
1807
|
|
|
1664
|
-
Use `@pypi_base` to set common packages required by all
|
|
1665
|
-
steps and use `@pypi` to specify step-specific overrides.
|
|
1666
1808
|
|
|
1667
1809
|
Parameters
|
|
1668
1810
|
----------
|
|
1669
|
-
|
|
1670
|
-
|
|
1671
|
-
|
|
1672
|
-
|
|
1673
|
-
|
|
1674
|
-
|
|
1811
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
|
1812
|
+
Upstream flow dependency for this flow.
|
|
1813
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
|
1814
|
+
Upstream flow dependencies for this flow.
|
|
1815
|
+
options : Dict[str, Any], default {}
|
|
1816
|
+
Backend-specific configuration for tuning eventing behavior.
|
|
1675
1817
|
"""
|
|
1676
1818
|
...
|
|
1677
1819
|
|
|
1678
1820
|
@typing.overload
|
|
1679
|
-
def
|
|
1821
|
+
def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1680
1822
|
...
|
|
1681
1823
|
|
|
1682
|
-
def
|
|
1824
|
+
def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
|
1683
1825
|
"""
|
|
1684
|
-
Specifies the
|
|
1826
|
+
Specifies the flow(s) that this flow depends on.
|
|
1827
|
+
|
|
1828
|
+
```
|
|
1829
|
+
@trigger_on_finish(flow='FooFlow')
|
|
1830
|
+
```
|
|
1831
|
+
or
|
|
1832
|
+
```
|
|
1833
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
|
1834
|
+
```
|
|
1835
|
+
This decorator respects the @project decorator and triggers the flow
|
|
1836
|
+
when upstream runs within the same namespace complete successfully
|
|
1837
|
+
|
|
1838
|
+
Additionally, you can specify project aware upstream flow dependencies
|
|
1839
|
+
by specifying the fully qualified project_flow_name.
|
|
1840
|
+
```
|
|
1841
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
|
1842
|
+
```
|
|
1843
|
+
or
|
|
1844
|
+
```
|
|
1845
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
|
1846
|
+
```
|
|
1847
|
+
|
|
1848
|
+
You can also specify just the project or project branch (other values will be
|
|
1849
|
+
inferred from the current project or project branch):
|
|
1850
|
+
```
|
|
1851
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
|
1852
|
+
```
|
|
1853
|
+
|
|
1854
|
+
Note that `branch` is typically one of:
|
|
1855
|
+
- `prod`
|
|
1856
|
+
- `user.bob`
|
|
1857
|
+
- `test.my_experiment`
|
|
1858
|
+
- `prod.staging`
|
|
1685
1859
|
|
|
1686
|
-
Use `@pypi_base` to set common packages required by all
|
|
1687
|
-
steps and use `@pypi` to specify step-specific overrides.
|
|
1688
1860
|
|
|
1689
1861
|
Parameters
|
|
1690
1862
|
----------
|
|
1691
|
-
|
|
1692
|
-
|
|
1693
|
-
|
|
1694
|
-
|
|
1695
|
-
|
|
1696
|
-
|
|
1863
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
|
1864
|
+
Upstream flow dependency for this flow.
|
|
1865
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
|
1866
|
+
Upstream flow dependencies for this flow.
|
|
1867
|
+
options : Dict[str, Any], default {}
|
|
1868
|
+
Backend-specific configuration for tuning eventing behavior.
|
|
1697
1869
|
"""
|
|
1698
1870
|
...
|
|
1699
1871
|
|
|
@@ -1748,177 +1920,5 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
|
|
|
1748
1920
|
"""
|
|
1749
1921
|
...
|
|
1750
1922
|
|
|
1751
|
-
@typing.overload
|
|
1752
|
-
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1753
|
-
"""
|
|
1754
|
-
Specifies the Conda environment for all steps of the flow.
|
|
1755
|
-
|
|
1756
|
-
Use `@conda_base` to set common libraries required by all
|
|
1757
|
-
steps and use `@conda` to specify step-specific additions.
|
|
1758
|
-
|
|
1759
|
-
|
|
1760
|
-
Parameters
|
|
1761
|
-
----------
|
|
1762
|
-
packages : Dict[str, str], default {}
|
|
1763
|
-
Packages to use for this flow. The key is the name of the package
|
|
1764
|
-
and the value is the version to use.
|
|
1765
|
-
libraries : Dict[str, str], default {}
|
|
1766
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
1767
|
-
python : str, optional, default None
|
|
1768
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1769
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1770
|
-
disabled : bool, default False
|
|
1771
|
-
If set to True, disables Conda.
|
|
1772
|
-
"""
|
|
1773
|
-
...
|
|
1774
|
-
|
|
1775
|
-
@typing.overload
|
|
1776
|
-
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1777
|
-
...
|
|
1778
|
-
|
|
1779
|
-
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
|
1780
|
-
"""
|
|
1781
|
-
Specifies the Conda environment for all steps of the flow.
|
|
1782
|
-
|
|
1783
|
-
Use `@conda_base` to set common libraries required by all
|
|
1784
|
-
steps and use `@conda` to specify step-specific additions.
|
|
1785
|
-
|
|
1786
|
-
|
|
1787
|
-
Parameters
|
|
1788
|
-
----------
|
|
1789
|
-
packages : Dict[str, str], default {}
|
|
1790
|
-
Packages to use for this flow. The key is the name of the package
|
|
1791
|
-
and the value is the version to use.
|
|
1792
|
-
libraries : Dict[str, str], default {}
|
|
1793
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
1794
|
-
python : str, optional, default None
|
|
1795
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1796
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1797
|
-
disabled : bool, default False
|
|
1798
|
-
If set to True, disables Conda.
|
|
1799
|
-
"""
|
|
1800
|
-
...
|
|
1801
|
-
|
|
1802
|
-
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1803
|
-
"""
|
|
1804
|
-
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
|
1805
|
-
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
|
1806
|
-
|
|
1807
|
-
|
|
1808
|
-
Parameters
|
|
1809
|
-
----------
|
|
1810
|
-
timeout : int
|
|
1811
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
|
1812
|
-
poke_interval : int
|
|
1813
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
|
1814
|
-
mode : str
|
|
1815
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
|
1816
|
-
exponential_backoff : bool
|
|
1817
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
|
1818
|
-
pool : str
|
|
1819
|
-
the slot pool this task should run in,
|
|
1820
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
|
1821
|
-
soft_fail : bool
|
|
1822
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
|
1823
|
-
name : str
|
|
1824
|
-
Name of the sensor on Airflow
|
|
1825
|
-
description : str
|
|
1826
|
-
Description of sensor in the Airflow UI
|
|
1827
|
-
external_dag_id : str
|
|
1828
|
-
The dag_id that contains the task you want to wait for.
|
|
1829
|
-
external_task_ids : List[str]
|
|
1830
|
-
The list of task_ids that you want to wait for.
|
|
1831
|
-
If None (default value) the sensor waits for the DAG. (Default: None)
|
|
1832
|
-
allowed_states : List[str]
|
|
1833
|
-
Iterable of allowed states, (Default: ['success'])
|
|
1834
|
-
failed_states : List[str]
|
|
1835
|
-
Iterable of failed or dis-allowed states. (Default: None)
|
|
1836
|
-
execution_delta : datetime.timedelta
|
|
1837
|
-
time difference with the previous execution to look at,
|
|
1838
|
-
the default is the same logical date as the current task or DAG. (Default: None)
|
|
1839
|
-
check_existence: bool
|
|
1840
|
-
Set to True to check if the external task exists or check if
|
|
1841
|
-
the DAG to wait for exists. (Default: True)
|
|
1842
|
-
"""
|
|
1843
|
-
...
|
|
1844
|
-
|
|
1845
|
-
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1846
|
-
"""
|
|
1847
|
-
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
|
1848
|
-
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
|
1849
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
|
1850
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
|
1851
|
-
starts only after all sensors finish.
|
|
1852
|
-
|
|
1853
|
-
|
|
1854
|
-
Parameters
|
|
1855
|
-
----------
|
|
1856
|
-
timeout : int
|
|
1857
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
|
1858
|
-
poke_interval : int
|
|
1859
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
|
1860
|
-
mode : str
|
|
1861
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
|
1862
|
-
exponential_backoff : bool
|
|
1863
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
|
1864
|
-
pool : str
|
|
1865
|
-
the slot pool this task should run in,
|
|
1866
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
|
1867
|
-
soft_fail : bool
|
|
1868
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
|
1869
|
-
name : str
|
|
1870
|
-
Name of the sensor on Airflow
|
|
1871
|
-
description : str
|
|
1872
|
-
Description of sensor in the Airflow UI
|
|
1873
|
-
bucket_key : Union[str, List[str]]
|
|
1874
|
-
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
|
1875
|
-
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
|
1876
|
-
bucket_name : str
|
|
1877
|
-
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
|
1878
|
-
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
|
1879
|
-
wildcard_match : bool
|
|
1880
|
-
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
|
1881
|
-
aws_conn_id : str
|
|
1882
|
-
a reference to the s3 connection on Airflow. (Default: None)
|
|
1883
|
-
verify : bool
|
|
1884
|
-
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
|
1885
|
-
"""
|
|
1886
|
-
...
|
|
1887
|
-
|
|
1888
|
-
def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1889
|
-
"""
|
|
1890
|
-
Specifies what flows belong to the same project.
|
|
1891
|
-
|
|
1892
|
-
A project-specific namespace is created for all flows that
|
|
1893
|
-
use the same `@project(name)`.
|
|
1894
|
-
|
|
1895
|
-
|
|
1896
|
-
Parameters
|
|
1897
|
-
----------
|
|
1898
|
-
name : str
|
|
1899
|
-
Project name. Make sure that the name is unique amongst all
|
|
1900
|
-
projects that use the same production scheduler. The name may
|
|
1901
|
-
contain only lowercase alphanumeric characters and underscores.
|
|
1902
|
-
|
|
1903
|
-
branch : Optional[str], default None
|
|
1904
|
-
The branch to use. If not specified, the branch is set to
|
|
1905
|
-
`user.<username>` unless `production` is set to `True`. This can
|
|
1906
|
-
also be set on the command line using `--branch` as a top-level option.
|
|
1907
|
-
It is an error to specify `branch` in the decorator and on the command line.
|
|
1908
|
-
|
|
1909
|
-
production : bool, default False
|
|
1910
|
-
Whether or not the branch is the production branch. This can also be set on the
|
|
1911
|
-
command line using `--production` as a top-level option. It is an error to specify
|
|
1912
|
-
`production` in the decorator and on the command line.
|
|
1913
|
-
The project branch name will be:
|
|
1914
|
-
- if `branch` is specified:
|
|
1915
|
-
- if `production` is True: `prod.<branch>`
|
|
1916
|
-
- if `production` is False: `test.<branch>`
|
|
1917
|
-
- if `branch` is not specified:
|
|
1918
|
-
- if `production` is True: `prod`
|
|
1919
|
-
- if `production` is False: `user.<username>`
|
|
1920
|
-
"""
|
|
1921
|
-
...
|
|
1922
|
-
|
|
1923
1923
|
pkg_name: str
|
|
1924
1924
|
|