ob-metaflow-stubs 3.6__py2.py3-none-any.whl → 3.8__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +586 -387
- metaflow-stubs/cards.pyi +5 -5
- metaflow-stubs/cli.pyi +45 -20
- metaflow-stubs/client/__init__.pyi +4 -4
- metaflow-stubs/client/core.pyi +6 -6
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/clone_util.pyi +2 -2
- metaflow-stubs/events.pyi +3 -3
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +3 -3
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/metadata/metadata.pyi +2 -2
- metaflow-stubs/metadata/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +4 -2
- metaflow-stubs/metaflow_current.pyi +6 -6
- metaflow-stubs/mflog/mflog.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +4 -4
- metaflow-stubs/plugins/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_cli.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +5 -5
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +5 -5
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
- metaflow-stubs/plugins/cards/card_client.pyi +3 -3
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +2 -2
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +4 -4
- metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/logs_cli.pyi +3 -3
- metaflow-stubs/plugins/package_cli.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/perimeters.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/tag_cli.pyi +4 -4
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/procpoll.pyi +2 -2
- metaflow-stubs/profilers/__init__.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +9 -0
- metaflow-stubs/runner/metaflow_runner.pyi +696 -0
- metaflow-stubs/runner/nbrun.pyi +224 -0
- metaflow-stubs/runner/subprocess_manager.pyi +221 -0
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- {ob_metaflow_stubs-3.6.dist-info → ob_metaflow_stubs-3.8.dist-info}/METADATA +1 -1
- ob_metaflow_stubs-3.8.dist-info/RECORD +142 -0
- ob_metaflow_stubs-3.6.dist-info/RECORD +0 -138
- {ob_metaflow_stubs-3.6.dist-info → ob_metaflow_stubs-3.8.dist-info}/WHEEL +0 -0
- {ob_metaflow_stubs-3.6.dist-info → ob_metaflow_stubs-3.8.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,23 +1,24 @@
|
|
1
1
|
##################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.
|
4
|
-
# Generated on 2024-05-
|
3
|
+
# MF version: 2.12.0.1+ob(v1) #
|
4
|
+
# Generated on 2024-05-30T17:28:59.704529 #
|
5
5
|
##################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
9
|
import typing
|
10
10
|
if typing.TYPE_CHECKING:
|
11
|
-
import metaflow.
|
11
|
+
import metaflow._vendor.click.types
|
12
12
|
import metaflow.parameters
|
13
|
-
import
|
14
|
-
import metaflow.events
|
13
|
+
import io
|
15
14
|
import metaflow.datastore.inputs
|
16
|
-
import
|
17
|
-
import metaflow.
|
15
|
+
import metaflow.runner.metaflow_runner
|
16
|
+
import metaflow.events
|
18
17
|
import metaflow.client.core
|
19
|
-
import
|
18
|
+
import datetime
|
19
|
+
import typing
|
20
20
|
import metaflow.plugins.datatools.s3.s3
|
21
|
+
import metaflow.metaflow_current
|
21
22
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
22
23
|
StepFlag = typing.NewType("StepFlag", bool)
|
23
24
|
|
@@ -725,55 +726,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
725
726
|
"""
|
726
727
|
...
|
727
728
|
|
728
|
-
@typing.overload
|
729
|
-
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
730
|
-
"""
|
731
|
-
Specifies the PyPI packages for the step.
|
732
|
-
|
733
|
-
Information in this decorator will augment any
|
734
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
735
|
-
you can use `@pypi_base` to set packages required by all
|
736
|
-
steps and use `@pypi` to specify step-specific overrides.
|
737
|
-
|
738
|
-
Parameters
|
739
|
-
----------
|
740
|
-
packages : Dict[str, str], default: {}
|
741
|
-
Packages to use for this step. The key is the name of the package
|
742
|
-
and the value is the version to use.
|
743
|
-
python : str, optional, default: None
|
744
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
745
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
746
|
-
"""
|
747
|
-
...
|
748
|
-
|
749
|
-
@typing.overload
|
750
|
-
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
751
|
-
...
|
752
|
-
|
753
|
-
@typing.overload
|
754
|
-
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
755
|
-
...
|
756
|
-
|
757
|
-
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
758
|
-
"""
|
759
|
-
Specifies the PyPI packages for the step.
|
760
|
-
|
761
|
-
Information in this decorator will augment any
|
762
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
763
|
-
you can use `@pypi_base` to set packages required by all
|
764
|
-
steps and use `@pypi` to specify step-specific overrides.
|
765
|
-
|
766
|
-
Parameters
|
767
|
-
----------
|
768
|
-
packages : Dict[str, str], default: {}
|
769
|
-
Packages to use for this step. The key is the name of the package
|
770
|
-
and the value is the version to use.
|
771
|
-
python : str, optional, default: None
|
772
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
773
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
774
|
-
"""
|
775
|
-
...
|
776
|
-
|
777
729
|
@typing.overload
|
778
730
|
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
779
731
|
"""
|
@@ -832,15 +784,15 @@ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optio
|
|
832
784
|
Alias for inferentia. Use only one of the two.
|
833
785
|
efa : int, default 0
|
834
786
|
Number of elastic fabric adapter network devices to attach to container
|
835
|
-
ephemeral_storage: int, default None
|
836
|
-
The total amount, in GiB, of ephemeral storage to set for the task
|
787
|
+
ephemeral_storage : int, default None
|
788
|
+
The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
|
837
789
|
This is only relevant for Fargate compute environments
|
838
790
|
log_driver: str, optional, default None
|
839
791
|
The log driver to use for the Amazon ECS container.
|
840
792
|
log_options: List[str], optional, default None
|
841
793
|
List of strings containing options for the chosen log driver. The configurable values
|
842
794
|
depend on the `log driver` chosen. Validation of these options is not supported yet.
|
843
|
-
Example
|
795
|
+
Example: [`awslogs-group:aws/batch/job`]
|
844
796
|
"""
|
845
797
|
...
|
846
798
|
|
@@ -909,15 +861,15 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
909
861
|
Alias for inferentia. Use only one of the two.
|
910
862
|
efa : int, default 0
|
911
863
|
Number of elastic fabric adapter network devices to attach to container
|
912
|
-
ephemeral_storage: int, default None
|
913
|
-
The total amount, in GiB, of ephemeral storage to set for the task
|
864
|
+
ephemeral_storage : int, default None
|
865
|
+
The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
|
914
866
|
This is only relevant for Fargate compute environments
|
915
867
|
log_driver: str, optional, default None
|
916
868
|
The log driver to use for the Amazon ECS container.
|
917
869
|
log_options: List[str], optional, default None
|
918
870
|
List of strings containing options for the chosen log driver. The configurable values
|
919
871
|
depend on the `log driver` chosen. Validation of these options is not supported yet.
|
920
|
-
Example
|
872
|
+
Example: [`awslogs-group:aws/batch/job`]
|
921
873
|
"""
|
922
874
|
...
|
923
875
|
|
@@ -975,125 +927,59 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
975
927
|
...
|
976
928
|
|
977
929
|
@typing.overload
|
978
|
-
def
|
930
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
979
931
|
"""
|
980
|
-
Specifies
|
932
|
+
Specifies a timeout for your step.
|
981
933
|
|
982
|
-
|
983
|
-
----------
|
984
|
-
vars : Dict[str, str], default {}
|
985
|
-
Dictionary of environment variables to set.
|
986
|
-
"""
|
987
|
-
...
|
988
|
-
|
989
|
-
@typing.overload
|
990
|
-
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
991
|
-
...
|
992
|
-
|
993
|
-
@typing.overload
|
994
|
-
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
995
|
-
...
|
996
|
-
|
997
|
-
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
998
|
-
"""
|
999
|
-
Specifies environment variables to be set prior to the execution of a step.
|
934
|
+
This decorator is useful if this step may hang indefinitely.
|
1000
935
|
|
1001
|
-
|
1002
|
-
|
1003
|
-
|
1004
|
-
Dictionary of environment variables to set.
|
1005
|
-
"""
|
1006
|
-
...
|
1007
|
-
|
1008
|
-
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1009
|
-
"""
|
1010
|
-
Specifies that this step should execute on Kubernetes.
|
936
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
937
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
938
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
1011
939
|
|
1012
|
-
|
1013
|
-
|
1014
|
-
cpu : int, default 1
|
1015
|
-
Number of CPUs required for this step. If `@resources` is
|
1016
|
-
also present, the maximum value from all decorators is used.
|
1017
|
-
memory : int, default 4096
|
1018
|
-
Memory size (in MB) required for this step. If
|
1019
|
-
`@resources` is also present, the maximum value from all decorators is
|
1020
|
-
used.
|
1021
|
-
disk : int, default 10240
|
1022
|
-
Disk size (in MB) required for this step. If
|
1023
|
-
`@resources` is also present, the maximum value from all decorators is
|
1024
|
-
used.
|
1025
|
-
image : str, optional, default None
|
1026
|
-
Docker image to use when launching on Kubernetes. If not specified, and
|
1027
|
-
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
1028
|
-
not, a default Docker image mapping to the current version of Python is used.
|
1029
|
-
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
1030
|
-
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
1031
|
-
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
1032
|
-
Kubernetes service account to use when launching pod in Kubernetes.
|
1033
|
-
secrets : List[str], optional, default None
|
1034
|
-
Kubernetes secrets to use when launching pod in Kubernetes. These
|
1035
|
-
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
1036
|
-
in Metaflow configuration.
|
1037
|
-
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
1038
|
-
Kubernetes namespace to use when launching pod in Kubernetes.
|
1039
|
-
gpu : int, optional, default None
|
1040
|
-
Number of GPUs required for this step. A value of zero implies that
|
1041
|
-
the scheduled node should not have GPUs.
|
1042
|
-
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
1043
|
-
The vendor of the GPUs to be used for this step.
|
1044
|
-
tolerations : List[str], default []
|
1045
|
-
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
1046
|
-
Kubernetes tolerations to use when launching pod in Kubernetes.
|
1047
|
-
use_tmpfs : bool, default False
|
1048
|
-
This enables an explicit tmpfs mount for this step.
|
1049
|
-
tmpfs_tempdir : bool, default True
|
1050
|
-
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
1051
|
-
tmpfs_size : int, optional, default: None
|
1052
|
-
The value for the size (in MiB) of the tmpfs mount for this step.
|
1053
|
-
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
1054
|
-
memory allocated for this step.
|
1055
|
-
tmpfs_path : str, optional, default /metaflow_temp
|
1056
|
-
Path to tmpfs mount for this step.
|
1057
|
-
persistent_volume_claims : Dict[str, str], optional, default None
|
1058
|
-
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
1059
|
-
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
1060
|
-
shared_memory: int, optional
|
1061
|
-
Shared memory size (in MiB) required for this step
|
1062
|
-
port: int, optional
|
1063
|
-
Port number to specify in the Kubernetes job object
|
1064
|
-
"""
|
1065
|
-
...
|
1066
|
-
|
1067
|
-
@typing.overload
|
1068
|
-
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1069
|
-
"""
|
1070
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
1071
|
-
the execution of a step.
|
940
|
+
Note that all the values specified in parameters are added together so if you specify
|
941
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
1072
942
|
|
1073
943
|
Parameters
|
1074
944
|
----------
|
1075
|
-
|
1076
|
-
|
945
|
+
seconds : int, default 0
|
946
|
+
Number of seconds to wait prior to timing out.
|
947
|
+
minutes : int, default 0
|
948
|
+
Number of minutes to wait prior to timing out.
|
949
|
+
hours : int, default 0
|
950
|
+
Number of hours to wait prior to timing out.
|
1077
951
|
"""
|
1078
952
|
...
|
1079
953
|
|
1080
954
|
@typing.overload
|
1081
|
-
def
|
955
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1082
956
|
...
|
1083
957
|
|
1084
958
|
@typing.overload
|
1085
|
-
def
|
959
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1086
960
|
...
|
1087
961
|
|
1088
|
-
def
|
962
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
1089
963
|
"""
|
1090
|
-
Specifies
|
1091
|
-
|
964
|
+
Specifies a timeout for your step.
|
965
|
+
|
966
|
+
This decorator is useful if this step may hang indefinitely.
|
967
|
+
|
968
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
969
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
970
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
971
|
+
|
972
|
+
Note that all the values specified in parameters are added together so if you specify
|
973
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
1092
974
|
|
1093
975
|
Parameters
|
1094
976
|
----------
|
1095
|
-
|
1096
|
-
|
977
|
+
seconds : int, default 0
|
978
|
+
Number of seconds to wait prior to timing out.
|
979
|
+
minutes : int, default 0
|
980
|
+
Number of minutes to wait prior to timing out.
|
981
|
+
hours : int, default 0
|
982
|
+
Number of hours to wait prior to timing out.
|
1097
983
|
"""
|
1098
984
|
...
|
1099
985
|
|
@@ -1232,53 +1118,35 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
|
|
1232
1118
|
...
|
1233
1119
|
|
1234
1120
|
@typing.overload
|
1235
|
-
def
|
1121
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1236
1122
|
"""
|
1237
|
-
|
1238
|
-
|
1239
|
-
Note that you may add multiple `@card` decorators in a step with different parameters.
|
1123
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
1124
|
+
the execution of a step.
|
1240
1125
|
|
1241
1126
|
Parameters
|
1242
1127
|
----------
|
1243
|
-
|
1244
|
-
|
1245
|
-
id : str, optional, default None
|
1246
|
-
If multiple cards are present, use this id to identify this card.
|
1247
|
-
options : Dict[str, Any], default {}
|
1248
|
-
Options passed to the card. The contents depend on the card type.
|
1249
|
-
timeout : int, default 45
|
1250
|
-
Interrupt reporting if it takes more than this many seconds.
|
1251
|
-
|
1252
|
-
|
1128
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
1129
|
+
List of secret specs, defining how the secrets are to be retrieved
|
1253
1130
|
"""
|
1254
1131
|
...
|
1255
1132
|
|
1256
1133
|
@typing.overload
|
1257
|
-
def
|
1134
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1258
1135
|
...
|
1259
1136
|
|
1260
1137
|
@typing.overload
|
1261
|
-
def
|
1138
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1262
1139
|
...
|
1263
1140
|
|
1264
|
-
def
|
1141
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
1265
1142
|
"""
|
1266
|
-
|
1267
|
-
|
1268
|
-
Note that you may add multiple `@card` decorators in a step with different parameters.
|
1143
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
1144
|
+
the execution of a step.
|
1269
1145
|
|
1270
1146
|
Parameters
|
1271
1147
|
----------
|
1272
|
-
|
1273
|
-
|
1274
|
-
id : str, optional, default None
|
1275
|
-
If multiple cards are present, use this id to identify this card.
|
1276
|
-
options : Dict[str, Any], default {}
|
1277
|
-
Options passed to the card. The contents depend on the card type.
|
1278
|
-
timeout : int, default 45
|
1279
|
-
Interrupt reporting if it takes more than this many seconds.
|
1280
|
-
|
1281
|
-
|
1148
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
1149
|
+
List of secret specs, defining how the secrets are to be retrieved
|
1282
1150
|
"""
|
1283
1151
|
...
|
1284
1152
|
|
@@ -1331,60 +1199,211 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
1331
1199
|
"""
|
1332
1200
|
...
|
1333
1201
|
|
1334
|
-
|
1335
|
-
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1202
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1336
1203
|
"""
|
1337
|
-
Specifies
|
1338
|
-
|
1339
|
-
This decorator is useful if this step may hang indefinitely.
|
1340
|
-
|
1341
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
1342
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
1343
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
1344
|
-
|
1345
|
-
Note that all the values specified in parameters are added together so if you specify
|
1346
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
1204
|
+
Specifies that this step should execute on Kubernetes.
|
1347
1205
|
|
1348
1206
|
Parameters
|
1349
1207
|
----------
|
1350
|
-
|
1351
|
-
Number of
|
1352
|
-
|
1353
|
-
|
1354
|
-
|
1355
|
-
|
1356
|
-
|
1208
|
+
cpu : int, default 1
|
1209
|
+
Number of CPUs required for this step. If `@resources` is
|
1210
|
+
also present, the maximum value from all decorators is used.
|
1211
|
+
memory : int, default 4096
|
1212
|
+
Memory size (in MB) required for this step. If
|
1213
|
+
`@resources` is also present, the maximum value from all decorators is
|
1214
|
+
used.
|
1215
|
+
disk : int, default 10240
|
1216
|
+
Disk size (in MB) required for this step. If
|
1217
|
+
`@resources` is also present, the maximum value from all decorators is
|
1218
|
+
used.
|
1219
|
+
image : str, optional, default None
|
1220
|
+
Docker image to use when launching on Kubernetes. If not specified, and
|
1221
|
+
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
1222
|
+
not, a default Docker image mapping to the current version of Python is used.
|
1223
|
+
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
1224
|
+
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
1225
|
+
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
1226
|
+
Kubernetes service account to use when launching pod in Kubernetes.
|
1227
|
+
secrets : List[str], optional, default None
|
1228
|
+
Kubernetes secrets to use when launching pod in Kubernetes. These
|
1229
|
+
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
1230
|
+
in Metaflow configuration.
|
1231
|
+
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
1232
|
+
Kubernetes namespace to use when launching pod in Kubernetes.
|
1233
|
+
gpu : int, optional, default None
|
1234
|
+
Number of GPUs required for this step. A value of zero implies that
|
1235
|
+
the scheduled node should not have GPUs.
|
1236
|
+
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
1237
|
+
The vendor of the GPUs to be used for this step.
|
1238
|
+
tolerations : List[str], default []
|
1239
|
+
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
1240
|
+
Kubernetes tolerations to use when launching pod in Kubernetes.
|
1241
|
+
use_tmpfs : bool, default False
|
1242
|
+
This enables an explicit tmpfs mount for this step.
|
1243
|
+
tmpfs_tempdir : bool, default True
|
1244
|
+
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
1245
|
+
tmpfs_size : int, optional, default: None
|
1246
|
+
The value for the size (in MiB) of the tmpfs mount for this step.
|
1247
|
+
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
1248
|
+
memory allocated for this step.
|
1249
|
+
tmpfs_path : str, optional, default /metaflow_temp
|
1250
|
+
Path to tmpfs mount for this step.
|
1251
|
+
persistent_volume_claims : Dict[str, str], optional, default None
|
1252
|
+
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
1253
|
+
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
1254
|
+
shared_memory: int, optional
|
1255
|
+
Shared memory size (in MiB) required for this step
|
1256
|
+
port: int, optional
|
1257
|
+
Port number to specify in the Kubernetes job object
|
1258
|
+
"""
|
1357
1259
|
...
|
1358
1260
|
|
1359
1261
|
@typing.overload
|
1360
|
-
def
|
1262
|
+
def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1263
|
+
"""
|
1264
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
1265
|
+
|
1266
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
1267
|
+
|
1268
|
+
Parameters
|
1269
|
+
----------
|
1270
|
+
type : str, default 'default'
|
1271
|
+
Card type.
|
1272
|
+
id : str, optional, default None
|
1273
|
+
If multiple cards are present, use this id to identify this card.
|
1274
|
+
options : Dict[str, Any], default {}
|
1275
|
+
Options passed to the card. The contents depend on the card type.
|
1276
|
+
timeout : int, default 45
|
1277
|
+
Interrupt reporting if it takes more than this many seconds.
|
1278
|
+
|
1279
|
+
|
1280
|
+
"""
|
1361
1281
|
...
|
1362
1282
|
|
1363
1283
|
@typing.overload
|
1364
|
-
def
|
1284
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1365
1285
|
...
|
1366
1286
|
|
1367
|
-
|
1287
|
+
@typing.overload
|
1288
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1289
|
+
...
|
1290
|
+
|
1291
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
1368
1292
|
"""
|
1369
|
-
|
1293
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
1370
1294
|
|
1371
|
-
|
1295
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
1372
1296
|
|
1373
|
-
|
1374
|
-
|
1375
|
-
|
1297
|
+
Parameters
|
1298
|
+
----------
|
1299
|
+
type : str, default 'default'
|
1300
|
+
Card type.
|
1301
|
+
id : str, optional, default None
|
1302
|
+
If multiple cards are present, use this id to identify this card.
|
1303
|
+
options : Dict[str, Any], default {}
|
1304
|
+
Options passed to the card. The contents depend on the card type.
|
1305
|
+
timeout : int, default 45
|
1306
|
+
Interrupt reporting if it takes more than this many seconds.
|
1376
1307
|
|
1377
|
-
|
1378
|
-
|
1308
|
+
|
1309
|
+
"""
|
1310
|
+
...
|
1311
|
+
|
1312
|
+
@typing.overload
|
1313
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1314
|
+
"""
|
1315
|
+
Specifies the PyPI packages for the step.
|
1316
|
+
|
1317
|
+
Information in this decorator will augment any
|
1318
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
1319
|
+
you can use `@pypi_base` to set packages required by all
|
1320
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1379
1321
|
|
1380
1322
|
Parameters
|
1381
1323
|
----------
|
1382
|
-
|
1383
|
-
|
1384
|
-
|
1385
|
-
|
1386
|
-
|
1387
|
-
|
1324
|
+
packages : Dict[str, str], default: {}
|
1325
|
+
Packages to use for this step. The key is the name of the package
|
1326
|
+
and the value is the version to use.
|
1327
|
+
python : str, optional, default: None
|
1328
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1329
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1330
|
+
"""
|
1331
|
+
...
|
1332
|
+
|
1333
|
+
@typing.overload
|
1334
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1335
|
+
...
|
1336
|
+
|
1337
|
+
@typing.overload
|
1338
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1339
|
+
...
|
1340
|
+
|
1341
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
1342
|
+
"""
|
1343
|
+
Specifies the PyPI packages for the step.
|
1344
|
+
|
1345
|
+
Information in this decorator will augment any
|
1346
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
1347
|
+
you can use `@pypi_base` to set packages required by all
|
1348
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1349
|
+
|
1350
|
+
Parameters
|
1351
|
+
----------
|
1352
|
+
packages : Dict[str, str], default: {}
|
1353
|
+
Packages to use for this step. The key is the name of the package
|
1354
|
+
and the value is the version to use.
|
1355
|
+
python : str, optional, default: None
|
1356
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1357
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1358
|
+
"""
|
1359
|
+
...
|
1360
|
+
|
1361
|
+
@typing.overload
|
1362
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1363
|
+
"""
|
1364
|
+
Specifies environment variables to be set prior to the execution of a step.
|
1365
|
+
|
1366
|
+
Parameters
|
1367
|
+
----------
|
1368
|
+
vars : Dict[str, str], default {}
|
1369
|
+
Dictionary of environment variables to set.
|
1370
|
+
"""
|
1371
|
+
...
|
1372
|
+
|
1373
|
+
@typing.overload
|
1374
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1375
|
+
...
|
1376
|
+
|
1377
|
+
@typing.overload
|
1378
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1379
|
+
...
|
1380
|
+
|
1381
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
1382
|
+
"""
|
1383
|
+
Specifies environment variables to be set prior to the execution of a step.
|
1384
|
+
|
1385
|
+
Parameters
|
1386
|
+
----------
|
1387
|
+
vars : Dict[str, str], default {}
|
1388
|
+
Dictionary of environment variables to set.
|
1389
|
+
"""
|
1390
|
+
...
|
1391
|
+
|
1392
|
+
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1393
|
+
"""
|
1394
|
+
Specifies what flows belong to the same project.
|
1395
|
+
|
1396
|
+
A project-specific namespace is created for all flows that
|
1397
|
+
use the same `@project(name)`.
|
1398
|
+
|
1399
|
+
Parameters
|
1400
|
+
----------
|
1401
|
+
name : str
|
1402
|
+
Project name. Make sure that the name is unique amongst all
|
1403
|
+
projects that use the same production scheduler. The name may
|
1404
|
+
contain only lowercase alphanumeric characters and underscores.
|
1405
|
+
|
1406
|
+
|
1388
1407
|
"""
|
1389
1408
|
...
|
1390
1409
|
|
@@ -1480,109 +1499,95 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
|
|
1480
1499
|
...
|
1481
1500
|
|
1482
1501
|
@typing.overload
|
1483
|
-
def
|
1502
|
+
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1484
1503
|
"""
|
1485
|
-
Specifies the
|
1504
|
+
Specifies the event(s) that this flow depends on.
|
1486
1505
|
|
1487
|
-
|
1488
|
-
|
1506
|
+
```
|
1507
|
+
@trigger(event='foo')
|
1508
|
+
```
|
1509
|
+
or
|
1510
|
+
```
|
1511
|
+
@trigger(events=['foo', 'bar'])
|
1512
|
+
```
|
1513
|
+
|
1514
|
+
Additionally, you can specify the parameter mappings
|
1515
|
+
to map event payload to Metaflow parameters for the flow.
|
1516
|
+
```
|
1517
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1518
|
+
```
|
1519
|
+
or
|
1520
|
+
```
|
1521
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1522
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1523
|
+
```
|
1524
|
+
|
1525
|
+
'parameters' can also be a list of strings and tuples like so:
|
1526
|
+
```
|
1527
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1528
|
+
```
|
1529
|
+
This is equivalent to:
|
1530
|
+
```
|
1531
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1532
|
+
```
|
1489
1533
|
|
1490
1534
|
Parameters
|
1491
1535
|
----------
|
1492
|
-
|
1493
|
-
|
1494
|
-
|
1495
|
-
|
1496
|
-
|
1497
|
-
|
1498
|
-
|
1499
|
-
|
1500
|
-
disabled : bool, default False
|
1501
|
-
If set to True, disables Conda.
|
1536
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1537
|
+
Event dependency for this flow.
|
1538
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1539
|
+
Events dependency for this flow.
|
1540
|
+
options : Dict[str, Any], default {}
|
1541
|
+
Backend-specific configuration for tuning eventing behavior.
|
1542
|
+
|
1543
|
+
|
1502
1544
|
"""
|
1503
1545
|
...
|
1504
1546
|
|
1505
1547
|
@typing.overload
|
1506
|
-
def
|
1548
|
+
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1507
1549
|
...
|
1508
1550
|
|
1509
|
-
def
|
1551
|
+
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
1510
1552
|
"""
|
1511
|
-
Specifies the
|
1512
|
-
|
1513
|
-
Use `@conda_base` to set common libraries required by all
|
1514
|
-
steps and use `@conda` to specify step-specific additions.
|
1553
|
+
Specifies the event(s) that this flow depends on.
|
1515
1554
|
|
1516
|
-
|
1517
|
-
|
1518
|
-
|
1519
|
-
|
1520
|
-
|
1521
|
-
|
1522
|
-
|
1523
|
-
python : str, optional, default None
|
1524
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1525
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1526
|
-
disabled : bool, default False
|
1527
|
-
If set to True, disables Conda.
|
1528
|
-
"""
|
1529
|
-
...
|
1530
|
-
|
1531
|
-
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1532
|
-
"""
|
1533
|
-
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
1534
|
-
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
1555
|
+
```
|
1556
|
+
@trigger(event='foo')
|
1557
|
+
```
|
1558
|
+
or
|
1559
|
+
```
|
1560
|
+
@trigger(events=['foo', 'bar'])
|
1561
|
+
```
|
1535
1562
|
|
1536
|
-
|
1537
|
-
|
1538
|
-
|
1539
|
-
|
1540
|
-
|
1541
|
-
|
1542
|
-
|
1543
|
-
|
1544
|
-
|
1545
|
-
|
1546
|
-
pool : str
|
1547
|
-
the slot pool this task should run in,
|
1548
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1549
|
-
soft_fail : bool
|
1550
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1551
|
-
name : str
|
1552
|
-
Name of the sensor on Airflow
|
1553
|
-
description : str
|
1554
|
-
Description of sensor in the Airflow UI
|
1555
|
-
external_dag_id : str
|
1556
|
-
The dag_id that contains the task you want to wait for.
|
1557
|
-
external_task_ids : List[str]
|
1558
|
-
The list of task_ids that you want to wait for.
|
1559
|
-
If None (default value) the sensor waits for the DAG. (Default: None)
|
1560
|
-
allowed_states : List[str]
|
1561
|
-
Iterable of allowed states, (Default: ['success'])
|
1562
|
-
failed_states : List[str]
|
1563
|
-
Iterable of failed or dis-allowed states. (Default: None)
|
1564
|
-
execution_delta : datetime.timedelta
|
1565
|
-
time difference with the previous execution to look at,
|
1566
|
-
the default is the same logical date as the current task or DAG. (Default: None)
|
1567
|
-
check_existence: bool
|
1568
|
-
Set to True to check if the external task exists or check if
|
1569
|
-
the DAG to wait for exists. (Default: True)
|
1570
|
-
"""
|
1571
|
-
...
|
1572
|
-
|
1573
|
-
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1574
|
-
"""
|
1575
|
-
Specifies what flows belong to the same project.
|
1563
|
+
Additionally, you can specify the parameter mappings
|
1564
|
+
to map event payload to Metaflow parameters for the flow.
|
1565
|
+
```
|
1566
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1567
|
+
```
|
1568
|
+
or
|
1569
|
+
```
|
1570
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1571
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1572
|
+
```
|
1576
1573
|
|
1577
|
-
|
1578
|
-
|
1574
|
+
'parameters' can also be a list of strings and tuples like so:
|
1575
|
+
```
|
1576
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1577
|
+
```
|
1578
|
+
This is equivalent to:
|
1579
|
+
```
|
1580
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1581
|
+
```
|
1579
1582
|
|
1580
1583
|
Parameters
|
1581
|
-
----------
|
1582
|
-
|
1583
|
-
|
1584
|
-
|
1585
|
-
|
1584
|
+
----------
|
1585
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1586
|
+
Event dependency for this flow.
|
1587
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1588
|
+
Events dependency for this flow.
|
1589
|
+
options : Dict[str, Any], default {}
|
1590
|
+
Backend-specific configuration for tuning eventing behavior.
|
1586
1591
|
|
1587
1592
|
|
1588
1593
|
"""
|
@@ -1731,97 +1736,93 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
|
|
1731
1736
|
...
|
1732
1737
|
|
1733
1738
|
@typing.overload
|
1734
|
-
def
|
1739
|
+
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1735
1740
|
"""
|
1736
|
-
Specifies the
|
1737
|
-
|
1738
|
-
```
|
1739
|
-
@trigger(event='foo')
|
1740
|
-
```
|
1741
|
-
or
|
1742
|
-
```
|
1743
|
-
@trigger(events=['foo', 'bar'])
|
1744
|
-
```
|
1745
|
-
|
1746
|
-
Additionally, you can specify the parameter mappings
|
1747
|
-
to map event payload to Metaflow parameters for the flow.
|
1748
|
-
```
|
1749
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1750
|
-
```
|
1751
|
-
or
|
1752
|
-
```
|
1753
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1754
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1755
|
-
```
|
1741
|
+
Specifies the Conda environment for all steps of the flow.
|
1756
1742
|
|
1757
|
-
|
1758
|
-
|
1759
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1760
|
-
```
|
1761
|
-
This is equivalent to:
|
1762
|
-
```
|
1763
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1764
|
-
```
|
1743
|
+
Use `@conda_base` to set common libraries required by all
|
1744
|
+
steps and use `@conda` to specify step-specific additions.
|
1765
1745
|
|
1766
1746
|
Parameters
|
1767
1747
|
----------
|
1768
|
-
|
1769
|
-
|
1770
|
-
|
1771
|
-
|
1772
|
-
|
1773
|
-
|
1774
|
-
|
1775
|
-
|
1748
|
+
packages : Dict[str, str], default {}
|
1749
|
+
Packages to use for this flow. The key is the name of the package
|
1750
|
+
and the value is the version to use.
|
1751
|
+
libraries : Dict[str, str], default {}
|
1752
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1753
|
+
python : str, optional, default None
|
1754
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1755
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1756
|
+
disabled : bool, default False
|
1757
|
+
If set to True, disables Conda.
|
1776
1758
|
"""
|
1777
1759
|
...
|
1778
1760
|
|
1779
1761
|
@typing.overload
|
1780
|
-
def
|
1762
|
+
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1781
1763
|
...
|
1782
1764
|
|
1783
|
-
def
|
1765
|
+
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1784
1766
|
"""
|
1785
|
-
Specifies the
|
1786
|
-
|
1787
|
-
```
|
1788
|
-
@trigger(event='foo')
|
1789
|
-
```
|
1790
|
-
or
|
1791
|
-
```
|
1792
|
-
@trigger(events=['foo', 'bar'])
|
1793
|
-
```
|
1794
|
-
|
1795
|
-
Additionally, you can specify the parameter mappings
|
1796
|
-
to map event payload to Metaflow parameters for the flow.
|
1797
|
-
```
|
1798
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1799
|
-
```
|
1800
|
-
or
|
1801
|
-
```
|
1802
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1803
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1804
|
-
```
|
1767
|
+
Specifies the Conda environment for all steps of the flow.
|
1805
1768
|
|
1806
|
-
|
1807
|
-
|
1808
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1809
|
-
```
|
1810
|
-
This is equivalent to:
|
1811
|
-
```
|
1812
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1813
|
-
```
|
1769
|
+
Use `@conda_base` to set common libraries required by all
|
1770
|
+
steps and use `@conda` to specify step-specific additions.
|
1814
1771
|
|
1815
1772
|
Parameters
|
1816
1773
|
----------
|
1817
|
-
|
1818
|
-
|
1819
|
-
|
1820
|
-
|
1821
|
-
|
1822
|
-
|
1823
|
-
|
1774
|
+
packages : Dict[str, str], default {}
|
1775
|
+
Packages to use for this flow. The key is the name of the package
|
1776
|
+
and the value is the version to use.
|
1777
|
+
libraries : Dict[str, str], default {}
|
1778
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1779
|
+
python : str, optional, default None
|
1780
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1781
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1782
|
+
disabled : bool, default False
|
1783
|
+
If set to True, disables Conda.
|
1784
|
+
"""
|
1785
|
+
...
|
1786
|
+
|
1787
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1788
|
+
"""
|
1789
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
1790
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
1824
1791
|
|
1792
|
+
Parameters
|
1793
|
+
----------
|
1794
|
+
timeout : int
|
1795
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
1796
|
+
poke_interval : int
|
1797
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
1798
|
+
mode : str
|
1799
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1800
|
+
exponential_backoff : bool
|
1801
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1802
|
+
pool : str
|
1803
|
+
the slot pool this task should run in,
|
1804
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1805
|
+
soft_fail : bool
|
1806
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1807
|
+
name : str
|
1808
|
+
Name of the sensor on Airflow
|
1809
|
+
description : str
|
1810
|
+
Description of sensor in the Airflow UI
|
1811
|
+
external_dag_id : str
|
1812
|
+
The dag_id that contains the task you want to wait for.
|
1813
|
+
external_task_ids : List[str]
|
1814
|
+
The list of task_ids that you want to wait for.
|
1815
|
+
If None (default value) the sensor waits for the DAG. (Default: None)
|
1816
|
+
allowed_states : List[str]
|
1817
|
+
Iterable of allowed states, (Default: ['success'])
|
1818
|
+
failed_states : List[str]
|
1819
|
+
Iterable of failed or dis-allowed states. (Default: None)
|
1820
|
+
execution_delta : datetime.timedelta
|
1821
|
+
time difference with the previous execution to look at,
|
1822
|
+
the default is the same logical date as the current task or DAG. (Default: None)
|
1823
|
+
check_existence: bool
|
1824
|
+
Set to True to check if the external task exists or check if
|
1825
|
+
the DAG to wait for exists. (Default: True)
|
1825
1826
|
"""
|
1826
1827
|
...
|
1827
1828
|
|
@@ -2775,6 +2776,204 @@ class DataArtifact(metaflow.client.core.MetaflowObject, metaclass=type):
|
|
2775
2776
|
...
|
2776
2777
|
...
|
2777
2778
|
|
2779
|
+
class Runner(object, metaclass=type):
|
2780
|
+
def __init__(self, flow_file: str, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, cwd: typing.Optional[str] = None, **kwargs):
|
2781
|
+
...
|
2782
|
+
def __enter__(self) -> metaflow.runner.metaflow_runner.Runner:
|
2783
|
+
...
|
2784
|
+
def __aenter__(self) -> metaflow.runner.metaflow_runner.Runner:
|
2785
|
+
...
|
2786
|
+
def _Runner__get_executing_run(self, tfp_runner_attribute, command_obj):
|
2787
|
+
...
|
2788
|
+
def run(self, **kwargs) -> metaflow.runner.metaflow_runner.ExecutingRun:
|
2789
|
+
"""
|
2790
|
+
Blocking execution of the run. This method will wait until
|
2791
|
+
the run has completed execution.
|
2792
|
+
|
2793
|
+
Parameters
|
2794
|
+
----------
|
2795
|
+
**kwargs : Any
|
2796
|
+
Additional arguments that you would pass to `python myflow.py` after
|
2797
|
+
the `run` command, in particular, any parameters accepted by the flow.
|
2798
|
+
|
2799
|
+
Returns
|
2800
|
+
-------
|
2801
|
+
ExecutingRun
|
2802
|
+
ExecutingRun containing the results of the run.
|
2803
|
+
"""
|
2804
|
+
...
|
2805
|
+
def resume(self, **kwargs):
|
2806
|
+
"""
|
2807
|
+
Blocking resume execution of the run.
|
2808
|
+
This method will wait until the resumed run has completed execution.
|
2809
|
+
|
2810
|
+
Parameters
|
2811
|
+
----------
|
2812
|
+
**kwargs : Any
|
2813
|
+
Additional arguments that you would pass to `python ./myflow.py` after
|
2814
|
+
the `resume` command.
|
2815
|
+
|
2816
|
+
Returns
|
2817
|
+
-------
|
2818
|
+
ExecutingRun
|
2819
|
+
ExecutingRun containing the results of the resumed run.
|
2820
|
+
"""
|
2821
|
+
...
|
2822
|
+
def async_run(self, **kwargs) -> metaflow.runner.metaflow_runner.ExecutingRun:
|
2823
|
+
"""
|
2824
|
+
Non-blocking execution of the run. This method will return as soon as the
|
2825
|
+
run has launched.
|
2826
|
+
|
2827
|
+
Note that this method is asynchronous and needs to be `await`ed.
|
2828
|
+
|
2829
|
+
Parameters
|
2830
|
+
----------
|
2831
|
+
**kwargs : Any
|
2832
|
+
Additional arguments that you would pass to `python myflow.py` after
|
2833
|
+
the `run` command, in particular, any parameters accepted by the flow.
|
2834
|
+
|
2835
|
+
Returns
|
2836
|
+
-------
|
2837
|
+
ExecutingRun
|
2838
|
+
ExecutingRun representing the run that was started.
|
2839
|
+
"""
|
2840
|
+
...
|
2841
|
+
def async_resume(self, **kwargs):
|
2842
|
+
"""
|
2843
|
+
Non-blocking resume execution of the run.
|
2844
|
+
This method will return as soon as the resume has launched.
|
2845
|
+
|
2846
|
+
Note that this method is asynchronous and needs to be `await`ed.
|
2847
|
+
|
2848
|
+
Parameters
|
2849
|
+
----------
|
2850
|
+
**kwargs : Any
|
2851
|
+
Additional arguments that you would pass to `python myflow.py` after
|
2852
|
+
the `resume` command.
|
2853
|
+
|
2854
|
+
Returns
|
2855
|
+
-------
|
2856
|
+
ExecutingRun
|
2857
|
+
ExecutingRun representing the resumed run that was started.
|
2858
|
+
"""
|
2859
|
+
...
|
2860
|
+
def __exit__(self, exc_type, exc_value, traceback):
|
2861
|
+
...
|
2862
|
+
def __aexit__(self, exc_type, exc_value, traceback):
|
2863
|
+
...
|
2864
|
+
def cleanup(self):
|
2865
|
+
"""
|
2866
|
+
Delete any temporary files created during execution.
|
2867
|
+
"""
|
2868
|
+
...
|
2869
|
+
...
|
2870
|
+
|
2871
|
+
class NBRunner(object, metaclass=type):
|
2872
|
+
def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: str = "/tmp", **kwargs):
|
2873
|
+
...
|
2874
|
+
def nbrun(self, **kwargs):
|
2875
|
+
"""
|
2876
|
+
Blocking execution of the run. This method will wait until
|
2877
|
+
the run has completed execution.
|
2878
|
+
|
2879
|
+
Note that in contrast to `run`, this method returns a
|
2880
|
+
`metaflow.Run` object directly and calls `cleanup()` internally
|
2881
|
+
to support a common notebook pattern of executing a flow and
|
2882
|
+
retrieving its results immediately.
|
2883
|
+
|
2884
|
+
Parameters
|
2885
|
+
----------
|
2886
|
+
**kwargs : Any
|
2887
|
+
Additional arguments that you would pass to `python myflow.py` after
|
2888
|
+
the `run` command, in particular, any parameters accepted by the flow.
|
2889
|
+
|
2890
|
+
Returns
|
2891
|
+
-------
|
2892
|
+
Run
|
2893
|
+
A `metaflow.Run` object representing the finished run.
|
2894
|
+
"""
|
2895
|
+
...
|
2896
|
+
def nbresume(self, **kwargs):
|
2897
|
+
"""
|
2898
|
+
Blocking resuming of a run. This method will wait until
|
2899
|
+
the resumed run has completed execution.
|
2900
|
+
|
2901
|
+
Note that in contrast to `resume`, this method returns a
|
2902
|
+
`metaflow.Run` object directly and calls `cleanup()` internally
|
2903
|
+
to support a common notebook pattern of executing a flow and
|
2904
|
+
retrieving its results immediately.
|
2905
|
+
|
2906
|
+
Parameters
|
2907
|
+
----------
|
2908
|
+
**kwargs : Any
|
2909
|
+
Additional arguments that you would pass to `python myflow.py` after
|
2910
|
+
the `resume` command.
|
2911
|
+
|
2912
|
+
Returns
|
2913
|
+
-------
|
2914
|
+
Run
|
2915
|
+
A `metaflow.Run` object representing the resumed run.
|
2916
|
+
"""
|
2917
|
+
...
|
2918
|
+
def run(self, **kwargs):
|
2919
|
+
"""
|
2920
|
+
Runs the flow.
|
2921
|
+
"""
|
2922
|
+
...
|
2923
|
+
def resume(self, **kwargs):
|
2924
|
+
"""
|
2925
|
+
Resumes the flow.
|
2926
|
+
"""
|
2927
|
+
...
|
2928
|
+
def async_run(self, **kwargs):
|
2929
|
+
"""
|
2930
|
+
Non-blocking execution of the run. This method will return as soon as the
|
2931
|
+
run has launched. This method is equivalent to `Runner.async_run`.
|
2932
|
+
|
2933
|
+
Note that this method is asynchronous and needs to be `await`ed.
|
2934
|
+
|
2935
|
+
|
2936
|
+
Parameters
|
2937
|
+
----------
|
2938
|
+
**kwargs : Any
|
2939
|
+
Additional arguments that you would pass to `python myflow.py` after
|
2940
|
+
the `run` command, in particular, any parameters accepted by the flow.
|
2941
|
+
|
2942
|
+
Returns
|
2943
|
+
-------
|
2944
|
+
ExecutingRun
|
2945
|
+
ExecutingRun representing the run that was started.
|
2946
|
+
"""
|
2947
|
+
...
|
2948
|
+
def async_resume(self, **kwargs):
|
2949
|
+
"""
|
2950
|
+
Non-blocking execution of the run. This method will return as soon as the
|
2951
|
+
run has launched. This method is equivalent to `Runner.async_resume`.
|
2952
|
+
|
2953
|
+
Note that this method is asynchronous and needs to be `await`ed.
|
2954
|
+
|
2955
|
+
Parameters
|
2956
|
+
----------
|
2957
|
+
**kwargs : Any
|
2958
|
+
Additional arguments that you would pass to `python myflow.py` after
|
2959
|
+
the `run` command, in particular, any parameters accepted by the flow.
|
2960
|
+
|
2961
|
+
Returns
|
2962
|
+
-------
|
2963
|
+
ExecutingRun
|
2964
|
+
ExecutingRun representing the run that was started.
|
2965
|
+
"""
|
2966
|
+
...
|
2967
|
+
def cleanup(self):
|
2968
|
+
"""
|
2969
|
+
Delete any temporary files created during execution.
|
2970
|
+
|
2971
|
+
Call this method after using `async_run` or `async_resume`. You don't
|
2972
|
+
have to call this after `nbrun` or `nbresume`.
|
2973
|
+
"""
|
2974
|
+
...
|
2975
|
+
...
|
2976
|
+
|
2778
2977
|
def get_aws_client(module, with_error = False, role_arn = None, session_vars = None, client_params = None):
|
2779
2978
|
...
|
2780
2979
|
|