metaflow-stubs 2.11.3__py2.py3-none-any.whl → 2.11.5__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +471 -459
- metaflow-stubs/cards.pyi +6 -6
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +4 -4
- metaflow-stubs/client/core.pyi +6 -6
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/clone_util.pyi +31 -0
- metaflow-stubs/events.pyi +3 -3
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +5 -5
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +5 -5
- metaflow-stubs/metadata/metadata.pyi +3 -3
- metaflow-stubs/metadata/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +4 -2
- metaflow-stubs/metaflow_current.pyi +6 -6
- metaflow-stubs/mflog/mflog.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +4 -4
- metaflow-stubs/plugins/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
- metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +4 -4
- metaflow-stubs/plugins/aws/batch/batch_cli.pyi +4 -4
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +3 -3
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +4 -4
- metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
- metaflow-stubs/plugins/cards/card_client.pyi +3 -3
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +3 -3
- metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
- metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
- metaflow-stubs/plugins/package_cli.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/tag_cli.pyi +4 -4
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
- metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
- metaflow-stubs/procpoll.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/version.pyi +2 -2
- {metaflow_stubs-2.11.3.dist-info → metaflow_stubs-2.11.5.dist-info}/METADATA +2 -2
- metaflow_stubs-2.11.5.dist-info/RECORD +132 -0
- {metaflow_stubs-2.11.3.dist-info → metaflow_stubs-2.11.5.dist-info}/WHEEL +1 -1
- metaflow_stubs-2.11.3.dist-info/RECORD +0 -131
- {metaflow_stubs-2.11.3.dist-info → metaflow_stubs-2.11.5.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,23 +1,23 @@
|
|
1
1
|
##################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.11.
|
4
|
-
# Generated on 2024-
|
3
|
+
# MF version: 2.11.5 #
|
4
|
+
# Generated on 2024-03-14T18:36:24.873925 #
|
5
5
|
##################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
9
|
import typing
|
10
10
|
if typing.TYPE_CHECKING:
|
11
|
+
import metaflow.metaflow_current
|
12
|
+
import metaflow._vendor.click.types
|
11
13
|
import datetime
|
12
|
-
import metaflow.datastore.inputs
|
13
14
|
import metaflow.events
|
14
|
-
import
|
15
|
-
import metaflow.plugins.datatools.s3.s3
|
15
|
+
import metaflow.parameters
|
16
16
|
import metaflow.client.core
|
17
|
-
import metaflow.
|
17
|
+
import metaflow.plugins.datatools.s3.s3
|
18
|
+
import metaflow.datastore.inputs
|
18
19
|
import io
|
19
|
-
import
|
20
|
-
import metaflow._vendor.click.types
|
20
|
+
import typing
|
21
21
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
22
22
|
StepFlag = typing.NewType("StepFlag", bool)
|
23
23
|
|
@@ -725,120 +725,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
725
725
|
"""
|
726
726
|
...
|
727
727
|
|
728
|
-
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
729
|
-
"""
|
730
|
-
Specifies that this step should execute on Kubernetes.
|
731
|
-
|
732
|
-
Parameters
|
733
|
-
----------
|
734
|
-
cpu : int, default 1
|
735
|
-
Number of CPUs required for this step. If `@resources` is
|
736
|
-
also present, the maximum value from all decorators is used.
|
737
|
-
memory : int, default 4096
|
738
|
-
Memory size (in MB) required for this step. If
|
739
|
-
`@resources` is also present, the maximum value from all decorators is
|
740
|
-
used.
|
741
|
-
disk : int, default 10240
|
742
|
-
Disk size (in MB) required for this step. If
|
743
|
-
`@resources` is also present, the maximum value from all decorators is
|
744
|
-
used.
|
745
|
-
image : str, optional, default None
|
746
|
-
Docker image to use when launching on Kubernetes. If not specified, and
|
747
|
-
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
748
|
-
not, a default Docker image mapping to the current version of Python is used.
|
749
|
-
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
750
|
-
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
751
|
-
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
752
|
-
Kubernetes service account to use when launching pod in Kubernetes.
|
753
|
-
secrets : List[str], optional, default None
|
754
|
-
Kubernetes secrets to use when launching pod in Kubernetes. These
|
755
|
-
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
756
|
-
in Metaflow configuration.
|
757
|
-
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
758
|
-
Kubernetes namespace to use when launching pod in Kubernetes.
|
759
|
-
gpu : int, optional, default None
|
760
|
-
Number of GPUs required for this step. A value of zero implies that
|
761
|
-
the scheduled node should not have GPUs.
|
762
|
-
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
763
|
-
The vendor of the GPUs to be used for this step.
|
764
|
-
tolerations : List[str], default []
|
765
|
-
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
766
|
-
Kubernetes tolerations to use when launching pod in Kubernetes.
|
767
|
-
use_tmpfs : bool, default False
|
768
|
-
This enables an explicit tmpfs mount for this step.
|
769
|
-
tmpfs_tempdir : bool, default True
|
770
|
-
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
771
|
-
tmpfs_size : int, optional, default: None
|
772
|
-
The value for the size (in MiB) of the tmpfs mount for this step.
|
773
|
-
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
774
|
-
memory allocated for this step.
|
775
|
-
tmpfs_path : str, optional, default /metaflow_temp
|
776
|
-
Path to tmpfs mount for this step.
|
777
|
-
persistent_volume_claims : Dict[str, str], optional, default None
|
778
|
-
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
779
|
-
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
780
|
-
shared_memory: int, optional
|
781
|
-
Shared memory size (in MiB) required for this step
|
782
|
-
"""
|
783
|
-
...
|
784
|
-
|
785
|
-
@typing.overload
|
786
|
-
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
787
|
-
"""
|
788
|
-
Specifies a timeout for your step.
|
789
|
-
|
790
|
-
This decorator is useful if this step may hang indefinitely.
|
791
|
-
|
792
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
793
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
794
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
795
|
-
|
796
|
-
Note that all the values specified in parameters are added together so if you specify
|
797
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
798
|
-
|
799
|
-
Parameters
|
800
|
-
----------
|
801
|
-
seconds : int, default 0
|
802
|
-
Number of seconds to wait prior to timing out.
|
803
|
-
minutes : int, default 0
|
804
|
-
Number of minutes to wait prior to timing out.
|
805
|
-
hours : int, default 0
|
806
|
-
Number of hours to wait prior to timing out.
|
807
|
-
"""
|
808
|
-
...
|
809
|
-
|
810
|
-
@typing.overload
|
811
|
-
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
812
|
-
...
|
813
|
-
|
814
|
-
@typing.overload
|
815
|
-
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
816
|
-
...
|
817
|
-
|
818
|
-
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
819
|
-
"""
|
820
|
-
Specifies a timeout for your step.
|
821
|
-
|
822
|
-
This decorator is useful if this step may hang indefinitely.
|
823
|
-
|
824
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
825
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
826
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
827
|
-
|
828
|
-
Note that all the values specified in parameters are added together so if you specify
|
829
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
830
|
-
|
831
|
-
Parameters
|
832
|
-
----------
|
833
|
-
seconds : int, default 0
|
834
|
-
Number of seconds to wait prior to timing out.
|
835
|
-
minutes : int, default 0
|
836
|
-
Number of minutes to wait prior to timing out.
|
837
|
-
hours : int, default 0
|
838
|
-
Number of hours to wait prior to timing out.
|
839
|
-
"""
|
840
|
-
...
|
841
|
-
|
842
728
|
@typing.overload
|
843
729
|
def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
844
730
|
"""
|
@@ -917,188 +803,172 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
|
|
917
803
|
...
|
918
804
|
|
919
805
|
@typing.overload
|
920
|
-
def
|
806
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
921
807
|
"""
|
922
|
-
Specifies
|
923
|
-
to a step needs to be retried.
|
924
|
-
|
925
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
926
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
927
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
928
|
-
|
929
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
930
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
931
|
-
ensuring that the flow execution can continue.
|
808
|
+
Specifies environment variables to be set prior to the execution of a step.
|
932
809
|
|
933
810
|
Parameters
|
934
811
|
----------
|
935
|
-
|
936
|
-
|
937
|
-
minutes_between_retries : int, default 2
|
938
|
-
Number of minutes between retries.
|
812
|
+
vars : Dict[str, str], default {}
|
813
|
+
Dictionary of environment variables to set.
|
939
814
|
"""
|
940
815
|
...
|
941
816
|
|
942
817
|
@typing.overload
|
943
|
-
def
|
818
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
944
819
|
...
|
945
820
|
|
946
821
|
@typing.overload
|
947
|
-
def
|
822
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
948
823
|
...
|
949
824
|
|
950
|
-
def
|
825
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
951
826
|
"""
|
952
|
-
Specifies
|
953
|
-
to a step needs to be retried.
|
954
|
-
|
955
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
956
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
957
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
958
|
-
|
959
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
960
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
961
|
-
ensuring that the flow execution can continue.
|
827
|
+
Specifies environment variables to be set prior to the execution of a step.
|
962
828
|
|
963
829
|
Parameters
|
964
830
|
----------
|
965
|
-
|
966
|
-
|
967
|
-
minutes_between_retries : int, default 2
|
968
|
-
Number of minutes between retries.
|
831
|
+
vars : Dict[str, str], default {}
|
832
|
+
Dictionary of environment variables to set.
|
969
833
|
"""
|
970
834
|
...
|
971
835
|
|
972
836
|
@typing.overload
|
973
|
-
def
|
837
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
974
838
|
"""
|
975
|
-
Specifies
|
976
|
-
|
977
|
-
The decorator will create an optional artifact, specified by `var`, which
|
978
|
-
contains the exception raised. You can use it to detect the presence
|
979
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
980
|
-
are missing.
|
839
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
840
|
+
the execution of a step.
|
981
841
|
|
982
842
|
Parameters
|
983
843
|
----------
|
984
|
-
|
985
|
-
|
986
|
-
If not specified, the exception is not stored.
|
987
|
-
print_exception : bool, default True
|
988
|
-
Determines whether or not the exception is printed to
|
989
|
-
stdout when caught.
|
844
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
845
|
+
List of secret specs, defining how the secrets are to be retrieved
|
990
846
|
"""
|
991
847
|
...
|
992
848
|
|
993
849
|
@typing.overload
|
994
|
-
def
|
850
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
995
851
|
...
|
996
852
|
|
997
853
|
@typing.overload
|
998
|
-
def
|
854
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
999
855
|
...
|
1000
856
|
|
1001
|
-
def
|
857
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
1002
858
|
"""
|
1003
|
-
Specifies
|
1004
|
-
|
1005
|
-
The decorator will create an optional artifact, specified by `var`, which
|
1006
|
-
contains the exception raised. You can use it to detect the presence
|
1007
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
1008
|
-
are missing.
|
859
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
860
|
+
the execution of a step.
|
1009
861
|
|
1010
862
|
Parameters
|
1011
863
|
----------
|
1012
|
-
|
1013
|
-
|
1014
|
-
If not specified, the exception is not stored.
|
1015
|
-
print_exception : bool, default True
|
1016
|
-
Determines whether or not the exception is printed to
|
1017
|
-
stdout when caught.
|
864
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
865
|
+
List of secret specs, defining how the secrets are to be retrieved
|
1018
866
|
"""
|
1019
867
|
...
|
1020
868
|
|
1021
869
|
@typing.overload
|
1022
|
-
def
|
870
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1023
871
|
"""
|
1024
|
-
|
872
|
+
Specifies a timeout for your step.
|
1025
873
|
|
1026
|
-
|
874
|
+
This decorator is useful if this step may hang indefinitely.
|
1027
875
|
|
1028
|
-
|
1029
|
-
|
1030
|
-
|
1031
|
-
Card type.
|
1032
|
-
id : str, optional, default None
|
1033
|
-
If multiple cards are present, use this id to identify this card.
|
1034
|
-
options : Dict[str, Any], default {}
|
1035
|
-
Options passed to the card. The contents depend on the card type.
|
1036
|
-
timeout : int, default 45
|
1037
|
-
Interrupt reporting if it takes more than this many seconds.
|
876
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
877
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
878
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
1038
879
|
|
880
|
+
Note that all the values specified in parameters are added together so if you specify
|
881
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
1039
882
|
|
883
|
+
Parameters
|
884
|
+
----------
|
885
|
+
seconds : int, default 0
|
886
|
+
Number of seconds to wait prior to timing out.
|
887
|
+
minutes : int, default 0
|
888
|
+
Number of minutes to wait prior to timing out.
|
889
|
+
hours : int, default 0
|
890
|
+
Number of hours to wait prior to timing out.
|
1040
891
|
"""
|
1041
892
|
...
|
1042
893
|
|
1043
894
|
@typing.overload
|
1044
|
-
def
|
895
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1045
896
|
...
|
1046
897
|
|
1047
898
|
@typing.overload
|
1048
|
-
def
|
899
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1049
900
|
...
|
1050
901
|
|
1051
|
-
def
|
902
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
1052
903
|
"""
|
1053
|
-
|
904
|
+
Specifies a timeout for your step.
|
1054
905
|
|
1055
|
-
|
906
|
+
This decorator is useful if this step may hang indefinitely.
|
907
|
+
|
908
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
909
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
910
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
911
|
+
|
912
|
+
Note that all the values specified in parameters are added together so if you specify
|
913
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
1056
914
|
|
1057
915
|
Parameters
|
1058
916
|
----------
|
1059
|
-
|
1060
|
-
|
1061
|
-
|
1062
|
-
|
1063
|
-
|
1064
|
-
|
1065
|
-
timeout : int, default 45
|
1066
|
-
Interrupt reporting if it takes more than this many seconds.
|
1067
|
-
|
1068
|
-
|
917
|
+
seconds : int, default 0
|
918
|
+
Number of seconds to wait prior to timing out.
|
919
|
+
minutes : int, default 0
|
920
|
+
Number of minutes to wait prior to timing out.
|
921
|
+
hours : int, default 0
|
922
|
+
Number of hours to wait prior to timing out.
|
1069
923
|
"""
|
1070
924
|
...
|
1071
925
|
|
1072
926
|
@typing.overload
|
1073
|
-
def
|
927
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1074
928
|
"""
|
1075
|
-
Specifies
|
1076
|
-
|
929
|
+
Specifies the PyPI packages for the step.
|
930
|
+
|
931
|
+
Information in this decorator will augment any
|
932
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
933
|
+
you can use `@pypi_base` to set packages required by all
|
934
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1077
935
|
|
1078
936
|
Parameters
|
1079
937
|
----------
|
1080
|
-
|
1081
|
-
|
938
|
+
packages : Dict[str, str], default: {}
|
939
|
+
Packages to use for this step. The key is the name of the package
|
940
|
+
and the value is the version to use.
|
941
|
+
python : str, optional, default: None
|
942
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
943
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1082
944
|
"""
|
1083
945
|
...
|
1084
946
|
|
1085
947
|
@typing.overload
|
1086
|
-
def
|
948
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1087
949
|
...
|
1088
950
|
|
1089
951
|
@typing.overload
|
1090
|
-
def
|
952
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1091
953
|
...
|
1092
954
|
|
1093
|
-
def
|
955
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
1094
956
|
"""
|
1095
|
-
Specifies
|
1096
|
-
|
957
|
+
Specifies the PyPI packages for the step.
|
958
|
+
|
959
|
+
Information in this decorator will augment any
|
960
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
961
|
+
you can use `@pypi_base` to set packages required by all
|
962
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1097
963
|
|
1098
964
|
Parameters
|
1099
965
|
----------
|
1100
|
-
|
1101
|
-
|
966
|
+
packages : Dict[str, str], default: {}
|
967
|
+
Packages to use for this step. The key is the name of the package
|
968
|
+
and the value is the version to use.
|
969
|
+
python : str, optional, default: None
|
970
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
971
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1102
972
|
"""
|
1103
973
|
...
|
1104
974
|
|
@@ -1160,7 +1030,58 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
1160
1030
|
...
|
1161
1031
|
|
1162
1032
|
@typing.overload
|
1163
|
-
def
|
1033
|
+
def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1034
|
+
"""
|
1035
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
1036
|
+
|
1037
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
1038
|
+
|
1039
|
+
Parameters
|
1040
|
+
----------
|
1041
|
+
type : str, default 'default'
|
1042
|
+
Card type.
|
1043
|
+
id : str, optional, default None
|
1044
|
+
If multiple cards are present, use this id to identify this card.
|
1045
|
+
options : Dict[str, Any], default {}
|
1046
|
+
Options passed to the card. The contents depend on the card type.
|
1047
|
+
timeout : int, default 45
|
1048
|
+
Interrupt reporting if it takes more than this many seconds.
|
1049
|
+
|
1050
|
+
|
1051
|
+
"""
|
1052
|
+
...
|
1053
|
+
|
1054
|
+
@typing.overload
|
1055
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1056
|
+
...
|
1057
|
+
|
1058
|
+
@typing.overload
|
1059
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1060
|
+
...
|
1061
|
+
|
1062
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
1063
|
+
"""
|
1064
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
1065
|
+
|
1066
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
1067
|
+
|
1068
|
+
Parameters
|
1069
|
+
----------
|
1070
|
+
type : str, default 'default'
|
1071
|
+
Card type.
|
1072
|
+
id : str, optional, default None
|
1073
|
+
If multiple cards are present, use this id to identify this card.
|
1074
|
+
options : Dict[str, Any], default {}
|
1075
|
+
Options passed to the card. The contents depend on the card type.
|
1076
|
+
timeout : int, default 45
|
1077
|
+
Interrupt reporting if it takes more than this many seconds.
|
1078
|
+
|
1079
|
+
|
1080
|
+
"""
|
1081
|
+
...
|
1082
|
+
|
1083
|
+
@typing.overload
|
1084
|
+
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1164
1085
|
"""
|
1165
1086
|
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
1166
1087
|
|
@@ -1201,7 +1122,8 @@ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optio
|
|
1201
1122
|
necessary. A swappiness value of 100 causes pages to be swapped very
|
1202
1123
|
aggressively. Accepted values are whole numbers between 0 and 100.
|
1203
1124
|
use_tmpfs : bool, default False
|
1204
|
-
This enables an explicit tmpfs mount for this step.
|
1125
|
+
This enables an explicit tmpfs mount for this step. Note that tmpfs is
|
1126
|
+
not available on Fargate compute environments
|
1205
1127
|
tmpfs_tempdir : bool, default True
|
1206
1128
|
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
1207
1129
|
tmpfs_size : int, optional, default None
|
@@ -1212,8 +1134,13 @@ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optio
|
|
1212
1134
|
Path to tmpfs mount for this step. Defaults to /metaflow_temp.
|
1213
1135
|
inferentia : int, default 0
|
1214
1136
|
Number of Inferentia chips required for this step.
|
1137
|
+
trainium : int, default None
|
1138
|
+
Alias for inferentia. Use only one of the two.
|
1215
1139
|
efa : int, default 0
|
1216
1140
|
Number of elastic fabric adapter network devices to attach to container
|
1141
|
+
ephemeral_storage: int, default None
|
1142
|
+
The total amount, in GiB, of ephemeral storage to set for the task (21-200)
|
1143
|
+
This is only relevant for Fargate compute environments
|
1217
1144
|
"""
|
1218
1145
|
...
|
1219
1146
|
|
@@ -1225,7 +1152,7 @@ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Calla
|
|
1225
1152
|
def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1226
1153
|
...
|
1227
1154
|
|
1228
|
-
def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, efa: int = 0):
|
1155
|
+
def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None):
|
1229
1156
|
"""
|
1230
1157
|
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
1231
1158
|
|
@@ -1266,7 +1193,8 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
1266
1193
|
necessary. A swappiness value of 100 causes pages to be swapped very
|
1267
1194
|
aggressively. Accepted values are whole numbers between 0 and 100.
|
1268
1195
|
use_tmpfs : bool, default False
|
1269
|
-
This enables an explicit tmpfs mount for this step.
|
1196
|
+
This enables an explicit tmpfs mount for this step. Note that tmpfs is
|
1197
|
+
not available on Fargate compute environments
|
1270
1198
|
tmpfs_tempdir : bool, default True
|
1271
1199
|
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
1272
1200
|
tmpfs_size : int, optional, default None
|
@@ -1277,90 +1205,283 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
1277
1205
|
Path to tmpfs mount for this step. Defaults to /metaflow_temp.
|
1278
1206
|
inferentia : int, default 0
|
1279
1207
|
Number of Inferentia chips required for this step.
|
1208
|
+
trainium : int, default None
|
1209
|
+
Alias for inferentia. Use only one of the two.
|
1280
1210
|
efa : int, default 0
|
1281
1211
|
Number of elastic fabric adapter network devices to attach to container
|
1212
|
+
ephemeral_storage: int, default None
|
1213
|
+
The total amount, in GiB, of ephemeral storage to set for the task (21-200)
|
1214
|
+
This is only relevant for Fargate compute environments
|
1282
1215
|
"""
|
1283
1216
|
...
|
1284
1217
|
|
1285
1218
|
@typing.overload
|
1286
|
-
def
|
1219
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1287
1220
|
"""
|
1288
|
-
Specifies
|
1221
|
+
Specifies that the step will success under all circumstances.
|
1222
|
+
|
1223
|
+
The decorator will create an optional artifact, specified by `var`, which
|
1224
|
+
contains the exception raised. You can use it to detect the presence
|
1225
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
1226
|
+
are missing.
|
1289
1227
|
|
1290
1228
|
Parameters
|
1291
1229
|
----------
|
1292
|
-
|
1293
|
-
|
1230
|
+
var : str, optional, default None
|
1231
|
+
Name of the artifact in which to store the caught exception.
|
1232
|
+
If not specified, the exception is not stored.
|
1233
|
+
print_exception : bool, default True
|
1234
|
+
Determines whether or not the exception is printed to
|
1235
|
+
stdout when caught.
|
1294
1236
|
"""
|
1295
1237
|
...
|
1296
1238
|
|
1297
1239
|
@typing.overload
|
1298
|
-
def
|
1240
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1299
1241
|
...
|
1300
1242
|
|
1301
1243
|
@typing.overload
|
1302
|
-
def
|
1244
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1303
1245
|
...
|
1304
1246
|
|
1305
|
-
def
|
1247
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
1306
1248
|
"""
|
1307
|
-
Specifies
|
1249
|
+
Specifies that the step will success under all circumstances.
|
1250
|
+
|
1251
|
+
The decorator will create an optional artifact, specified by `var`, which
|
1252
|
+
contains the exception raised. You can use it to detect the presence
|
1253
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
1254
|
+
are missing.
|
1308
1255
|
|
1309
1256
|
Parameters
|
1310
1257
|
----------
|
1311
|
-
|
1312
|
-
|
1258
|
+
var : str, optional, default None
|
1259
|
+
Name of the artifact in which to store the caught exception.
|
1260
|
+
If not specified, the exception is not stored.
|
1261
|
+
print_exception : bool, default True
|
1262
|
+
Determines whether or not the exception is printed to
|
1263
|
+
stdout when caught.
|
1264
|
+
"""
|
1265
|
+
...
|
1266
|
+
|
1267
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1268
|
+
"""
|
1269
|
+
Specifies that this step should execute on Kubernetes.
|
1270
|
+
|
1271
|
+
Parameters
|
1272
|
+
----------
|
1273
|
+
cpu : int, default 1
|
1274
|
+
Number of CPUs required for this step. If `@resources` is
|
1275
|
+
also present, the maximum value from all decorators is used.
|
1276
|
+
memory : int, default 4096
|
1277
|
+
Memory size (in MB) required for this step. If
|
1278
|
+
`@resources` is also present, the maximum value from all decorators is
|
1279
|
+
used.
|
1280
|
+
disk : int, default 10240
|
1281
|
+
Disk size (in MB) required for this step. If
|
1282
|
+
`@resources` is also present, the maximum value from all decorators is
|
1283
|
+
used.
|
1284
|
+
image : str, optional, default None
|
1285
|
+
Docker image to use when launching on Kubernetes. If not specified, and
|
1286
|
+
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
1287
|
+
not, a default Docker image mapping to the current version of Python is used.
|
1288
|
+
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
1289
|
+
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
1290
|
+
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
1291
|
+
Kubernetes service account to use when launching pod in Kubernetes.
|
1292
|
+
secrets : List[str], optional, default None
|
1293
|
+
Kubernetes secrets to use when launching pod in Kubernetes. These
|
1294
|
+
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
1295
|
+
in Metaflow configuration.
|
1296
|
+
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
1297
|
+
Kubernetes namespace to use when launching pod in Kubernetes.
|
1298
|
+
gpu : int, optional, default None
|
1299
|
+
Number of GPUs required for this step. A value of zero implies that
|
1300
|
+
the scheduled node should not have GPUs.
|
1301
|
+
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
1302
|
+
The vendor of the GPUs to be used for this step.
|
1303
|
+
tolerations : List[str], default []
|
1304
|
+
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
1305
|
+
Kubernetes tolerations to use when launching pod in Kubernetes.
|
1306
|
+
use_tmpfs : bool, default False
|
1307
|
+
This enables an explicit tmpfs mount for this step.
|
1308
|
+
tmpfs_tempdir : bool, default True
|
1309
|
+
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
1310
|
+
tmpfs_size : int, optional, default: None
|
1311
|
+
The value for the size (in MiB) of the tmpfs mount for this step.
|
1312
|
+
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
1313
|
+
memory allocated for this step.
|
1314
|
+
tmpfs_path : str, optional, default /metaflow_temp
|
1315
|
+
Path to tmpfs mount for this step.
|
1316
|
+
persistent_volume_claims : Dict[str, str], optional, default None
|
1317
|
+
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
1318
|
+
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
1319
|
+
shared_memory: int, optional
|
1320
|
+
Shared memory size (in MiB) required for this step
|
1313
1321
|
"""
|
1314
1322
|
...
|
1315
1323
|
|
1316
1324
|
@typing.overload
|
1317
|
-
def
|
1325
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1318
1326
|
"""
|
1319
|
-
Specifies the
|
1327
|
+
Specifies the number of times the task corresponding
|
1328
|
+
to a step needs to be retried.
|
1320
1329
|
|
1321
|
-
|
1322
|
-
|
1323
|
-
|
1324
|
-
|
1330
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
1331
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
1332
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
1333
|
+
|
1334
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
1335
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
1336
|
+
ensuring that the flow execution can continue.
|
1325
1337
|
|
1326
1338
|
Parameters
|
1327
1339
|
----------
|
1328
|
-
|
1329
|
-
|
1330
|
-
|
1331
|
-
|
1332
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1333
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1340
|
+
times : int, default 3
|
1341
|
+
Number of times to retry this task.
|
1342
|
+
minutes_between_retries : int, default 2
|
1343
|
+
Number of minutes between retries.
|
1334
1344
|
"""
|
1335
1345
|
...
|
1336
1346
|
|
1337
1347
|
@typing.overload
|
1338
|
-
def
|
1348
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1339
1349
|
...
|
1340
1350
|
|
1341
1351
|
@typing.overload
|
1342
|
-
def
|
1352
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1343
1353
|
...
|
1344
1354
|
|
1345
|
-
def
|
1355
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
1346
1356
|
"""
|
1347
|
-
Specifies the
|
1357
|
+
Specifies the number of times the task corresponding
|
1358
|
+
to a step needs to be retried.
|
1348
1359
|
|
1349
|
-
|
1350
|
-
|
1351
|
-
|
1352
|
-
|
1360
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
1361
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
1362
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
1363
|
+
|
1364
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
1365
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
1366
|
+
ensuring that the flow execution can continue.
|
1353
1367
|
|
1354
1368
|
Parameters
|
1355
1369
|
----------
|
1356
|
-
|
1357
|
-
|
1358
|
-
|
1359
|
-
|
1360
|
-
|
1361
|
-
|
1362
|
-
|
1363
|
-
|
1370
|
+
times : int, default 3
|
1371
|
+
Number of times to retry this task.
|
1372
|
+
minutes_between_retries : int, default 2
|
1373
|
+
Number of minutes between retries.
|
1374
|
+
"""
|
1375
|
+
...
|
1376
|
+
|
1377
|
+
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1378
|
+
"""
|
1379
|
+
Specifies what flows belong to the same project.
|
1380
|
+
|
1381
|
+
A project-specific namespace is created for all flows that
|
1382
|
+
use the same `@project(name)`.
|
1383
|
+
|
1384
|
+
Parameters
|
1385
|
+
----------
|
1386
|
+
name : str
|
1387
|
+
Project name. Make sure that the name is unique amongst all
|
1388
|
+
projects that use the same production scheduler. The name may
|
1389
|
+
contain only lowercase alphanumeric characters and underscores.
|
1390
|
+
|
1391
|
+
|
1392
|
+
"""
|
1393
|
+
...
|
1394
|
+
|
1395
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1396
|
+
"""
|
1397
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
1398
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
1399
|
+
|
1400
|
+
Parameters
|
1401
|
+
----------
|
1402
|
+
timeout : int
|
1403
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
1404
|
+
poke_interval : int
|
1405
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
1406
|
+
mode : str
|
1407
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1408
|
+
exponential_backoff : bool
|
1409
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1410
|
+
pool : str
|
1411
|
+
the slot pool this task should run in,
|
1412
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1413
|
+
soft_fail : bool
|
1414
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1415
|
+
name : str
|
1416
|
+
Name of the sensor on Airflow
|
1417
|
+
description : str
|
1418
|
+
Description of sensor in the Airflow UI
|
1419
|
+
external_dag_id : str
|
1420
|
+
The dag_id that contains the task you want to wait for.
|
1421
|
+
external_task_ids : List[str]
|
1422
|
+
The list of task_ids that you want to wait for.
|
1423
|
+
If None (default value) the sensor waits for the DAG. (Default: None)
|
1424
|
+
allowed_states : List[str]
|
1425
|
+
Iterable of allowed states, (Default: ['success'])
|
1426
|
+
failed_states : List[str]
|
1427
|
+
Iterable of failed or dis-allowed states. (Default: None)
|
1428
|
+
execution_delta : datetime.timedelta
|
1429
|
+
time difference with the previous execution to look at,
|
1430
|
+
the default is the same logical date as the current task or DAG. (Default: None)
|
1431
|
+
check_existence: bool
|
1432
|
+
Set to True to check if the external task exists or check if
|
1433
|
+
the DAG to wait for exists. (Default: True)
|
1434
|
+
"""
|
1435
|
+
...
|
1436
|
+
|
1437
|
+
@typing.overload
|
1438
|
+
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1439
|
+
"""
|
1440
|
+
Specifies the Conda environment for all steps of the flow.
|
1441
|
+
|
1442
|
+
Use `@conda_base` to set common libraries required by all
|
1443
|
+
steps and use `@conda` to specify step-specific additions.
|
1444
|
+
|
1445
|
+
Parameters
|
1446
|
+
----------
|
1447
|
+
packages : Dict[str, str], default {}
|
1448
|
+
Packages to use for this flow. The key is the name of the package
|
1449
|
+
and the value is the version to use.
|
1450
|
+
libraries : Dict[str, str], default {}
|
1451
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1452
|
+
python : str, optional, default None
|
1453
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1454
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1455
|
+
disabled : bool, default False
|
1456
|
+
If set to True, disables Conda.
|
1457
|
+
"""
|
1458
|
+
...
|
1459
|
+
|
1460
|
+
@typing.overload
|
1461
|
+
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1462
|
+
...
|
1463
|
+
|
1464
|
+
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1465
|
+
"""
|
1466
|
+
Specifies the Conda environment for all steps of the flow.
|
1467
|
+
|
1468
|
+
Use `@conda_base` to set common libraries required by all
|
1469
|
+
steps and use `@conda` to specify step-specific additions.
|
1470
|
+
|
1471
|
+
Parameters
|
1472
|
+
----------
|
1473
|
+
packages : Dict[str, str], default {}
|
1474
|
+
Packages to use for this flow. The key is the name of the package
|
1475
|
+
and the value is the version to use.
|
1476
|
+
libraries : Dict[str, str], default {}
|
1477
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1478
|
+
python : str, optional, default None
|
1479
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1480
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1481
|
+
disabled : bool, default False
|
1482
|
+
If set to True, disables Conda.
|
1483
|
+
"""
|
1484
|
+
...
|
1364
1485
|
|
1365
1486
|
@typing.overload
|
1366
1487
|
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
@@ -1411,67 +1532,45 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
|
|
1411
1532
|
"""
|
1412
1533
|
...
|
1413
1534
|
|
1414
|
-
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1415
|
-
"""
|
1416
|
-
Specifies what flows belong to the same project.
|
1417
|
-
|
1418
|
-
A project-specific namespace is created for all flows that
|
1419
|
-
use the same `@project(name)`.
|
1420
|
-
|
1421
|
-
Parameters
|
1422
|
-
----------
|
1423
|
-
name : str
|
1424
|
-
Project name. Make sure that the name is unique amongst all
|
1425
|
-
projects that use the same production scheduler. The name may
|
1426
|
-
contain only lowercase alphanumeric characters and underscores.
|
1427
|
-
|
1428
|
-
|
1429
|
-
"""
|
1430
|
-
...
|
1431
|
-
|
1432
1535
|
@typing.overload
|
1433
|
-
def
|
1536
|
+
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1434
1537
|
"""
|
1435
|
-
Specifies the
|
1538
|
+
Specifies the event(s) that this flow depends on.
|
1436
1539
|
|
1437
1540
|
```
|
1438
|
-
@
|
1541
|
+
@trigger(event='foo')
|
1439
1542
|
```
|
1440
1543
|
or
|
1441
1544
|
```
|
1442
|
-
@
|
1545
|
+
@trigger(events=['foo', 'bar'])
|
1443
1546
|
```
|
1444
|
-
This decorator respects the @project decorator and triggers the flow
|
1445
|
-
when upstream runs within the same namespace complete successfully
|
1446
1547
|
|
1447
|
-
Additionally, you can specify
|
1448
|
-
|
1548
|
+
Additionally, you can specify the parameter mappings
|
1549
|
+
to map event payload to Metaflow parameters for the flow.
|
1449
1550
|
```
|
1450
|
-
@
|
1551
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1451
1552
|
```
|
1452
1553
|
or
|
1453
1554
|
```
|
1454
|
-
@
|
1555
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1556
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1455
1557
|
```
|
1456
1558
|
|
1457
|
-
|
1458
|
-
inferred from the current project or project branch):
|
1559
|
+
'parameters' can also be a list of strings and tuples like so:
|
1459
1560
|
```
|
1460
|
-
@
|
1561
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1562
|
+
```
|
1563
|
+
This is equivalent to:
|
1564
|
+
```
|
1565
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1461
1566
|
```
|
1462
|
-
|
1463
|
-
Note that `branch` is typically one of:
|
1464
|
-
- `prod`
|
1465
|
-
- `user.bob`
|
1466
|
-
- `test.my_experiment`
|
1467
|
-
- `prod.staging`
|
1468
1567
|
|
1469
1568
|
Parameters
|
1470
1569
|
----------
|
1471
|
-
|
1472
|
-
|
1473
|
-
|
1474
|
-
|
1570
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1571
|
+
Event dependency for this flow.
|
1572
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1573
|
+
Events dependency for this flow.
|
1475
1574
|
options : Dict[str, Any], default {}
|
1476
1575
|
Backend-specific configuration for tuning eventing behavior.
|
1477
1576
|
|
@@ -1480,51 +1579,47 @@ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] =
|
|
1480
1579
|
...
|
1481
1580
|
|
1482
1581
|
@typing.overload
|
1483
|
-
def
|
1582
|
+
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1484
1583
|
...
|
1485
1584
|
|
1486
|
-
def
|
1585
|
+
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
1487
1586
|
"""
|
1488
|
-
Specifies the
|
1587
|
+
Specifies the event(s) that this flow depends on.
|
1489
1588
|
|
1490
1589
|
```
|
1491
|
-
@
|
1590
|
+
@trigger(event='foo')
|
1492
1591
|
```
|
1493
1592
|
or
|
1494
1593
|
```
|
1495
|
-
@
|
1594
|
+
@trigger(events=['foo', 'bar'])
|
1496
1595
|
```
|
1497
|
-
This decorator respects the @project decorator and triggers the flow
|
1498
|
-
when upstream runs within the same namespace complete successfully
|
1499
1596
|
|
1500
|
-
Additionally, you can specify
|
1501
|
-
|
1597
|
+
Additionally, you can specify the parameter mappings
|
1598
|
+
to map event payload to Metaflow parameters for the flow.
|
1502
1599
|
```
|
1503
|
-
@
|
1600
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1504
1601
|
```
|
1505
1602
|
or
|
1506
1603
|
```
|
1507
|
-
@
|
1604
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1605
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1508
1606
|
```
|
1509
1607
|
|
1510
|
-
|
1511
|
-
inferred from the current project or project branch):
|
1608
|
+
'parameters' can also be a list of strings and tuples like so:
|
1512
1609
|
```
|
1513
|
-
@
|
1610
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1611
|
+
```
|
1612
|
+
This is equivalent to:
|
1613
|
+
```
|
1614
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1514
1615
|
```
|
1515
|
-
|
1516
|
-
Note that `branch` is typically one of:
|
1517
|
-
- `prod`
|
1518
|
-
- `user.bob`
|
1519
|
-
- `test.my_experiment`
|
1520
|
-
- `prod.staging`
|
1521
1616
|
|
1522
1617
|
Parameters
|
1523
1618
|
----------
|
1524
|
-
|
1525
|
-
|
1526
|
-
|
1527
|
-
|
1619
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1620
|
+
Event dependency for this flow.
|
1621
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1622
|
+
Events dependency for this flow.
|
1528
1623
|
options : Dict[str, Any], default {}
|
1529
1624
|
Backend-specific configuration for tuning eventing behavior.
|
1530
1625
|
|
@@ -1532,48 +1627,6 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
|
|
1532
1627
|
"""
|
1533
1628
|
...
|
1534
1629
|
|
1535
|
-
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1536
|
-
"""
|
1537
|
-
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
1538
|
-
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
1539
|
-
|
1540
|
-
Parameters
|
1541
|
-
----------
|
1542
|
-
timeout : int
|
1543
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
1544
|
-
poke_interval : int
|
1545
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
1546
|
-
mode : str
|
1547
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1548
|
-
exponential_backoff : bool
|
1549
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1550
|
-
pool : str
|
1551
|
-
the slot pool this task should run in,
|
1552
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1553
|
-
soft_fail : bool
|
1554
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1555
|
-
name : str
|
1556
|
-
Name of the sensor on Airflow
|
1557
|
-
description : str
|
1558
|
-
Description of sensor in the Airflow UI
|
1559
|
-
external_dag_id : str
|
1560
|
-
The dag_id that contains the task you want to wait for.
|
1561
|
-
external_task_ids : List[str]
|
1562
|
-
The list of task_ids that you want to wait for.
|
1563
|
-
If None (default value) the sensor waits for the DAG. (Default: None)
|
1564
|
-
allowed_states : List[str]
|
1565
|
-
Iterable of allowed states, (Default: ['success'])
|
1566
|
-
failed_states : List[str]
|
1567
|
-
Iterable of failed or dis-allowed states. (Default: None)
|
1568
|
-
execution_delta : datetime.timedelta
|
1569
|
-
time difference with the previous execution to look at,
|
1570
|
-
the default is the same logical date as the current task or DAG. (Default: None)
|
1571
|
-
check_existence: bool
|
1572
|
-
Set to True to check if the external task exists or check if
|
1573
|
-
the DAG to wait for exists. (Default: True)
|
1574
|
-
"""
|
1575
|
-
...
|
1576
|
-
|
1577
1630
|
@typing.overload
|
1578
1631
|
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1579
1632
|
"""
|
@@ -1656,93 +1709,48 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
|
|
1656
1709
|
...
|
1657
1710
|
|
1658
1711
|
@typing.overload
|
1659
|
-
def
|
1660
|
-
"""
|
1661
|
-
Specifies the Conda environment for all steps of the flow.
|
1662
|
-
|
1663
|
-
Use `@conda_base` to set common libraries required by all
|
1664
|
-
steps and use `@conda` to specify step-specific additions.
|
1665
|
-
|
1666
|
-
Parameters
|
1667
|
-
----------
|
1668
|
-
packages : Dict[str, str], default {}
|
1669
|
-
Packages to use for this flow. The key is the name of the package
|
1670
|
-
and the value is the version to use.
|
1671
|
-
libraries : Dict[str, str], default {}
|
1672
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1673
|
-
python : str, optional, default None
|
1674
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1675
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1676
|
-
disabled : bool, default False
|
1677
|
-
If set to True, disables Conda.
|
1678
|
-
"""
|
1679
|
-
...
|
1680
|
-
|
1681
|
-
@typing.overload
|
1682
|
-
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1683
|
-
...
|
1684
|
-
|
1685
|
-
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1686
|
-
"""
|
1687
|
-
Specifies the Conda environment for all steps of the flow.
|
1688
|
-
|
1689
|
-
Use `@conda_base` to set common libraries required by all
|
1690
|
-
steps and use `@conda` to specify step-specific additions.
|
1691
|
-
|
1692
|
-
Parameters
|
1693
|
-
----------
|
1694
|
-
packages : Dict[str, str], default {}
|
1695
|
-
Packages to use for this flow. The key is the name of the package
|
1696
|
-
and the value is the version to use.
|
1697
|
-
libraries : Dict[str, str], default {}
|
1698
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1699
|
-
python : str, optional, default None
|
1700
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1701
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1702
|
-
disabled : bool, default False
|
1703
|
-
If set to True, disables Conda.
|
1704
|
-
"""
|
1705
|
-
...
|
1706
|
-
|
1707
|
-
@typing.overload
|
1708
|
-
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1712
|
+
def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1709
1713
|
"""
|
1710
|
-
Specifies the
|
1714
|
+
Specifies the flow(s) that this flow depends on.
|
1711
1715
|
|
1712
1716
|
```
|
1713
|
-
@
|
1717
|
+
@trigger_on_finish(flow='FooFlow')
|
1714
1718
|
```
|
1715
1719
|
or
|
1716
1720
|
```
|
1717
|
-
@
|
1721
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
1718
1722
|
```
|
1723
|
+
This decorator respects the @project decorator and triggers the flow
|
1724
|
+
when upstream runs within the same namespace complete successfully
|
1719
1725
|
|
1720
|
-
Additionally, you can specify
|
1721
|
-
|
1726
|
+
Additionally, you can specify project aware upstream flow dependencies
|
1727
|
+
by specifying the fully qualified project_flow_name.
|
1722
1728
|
```
|
1723
|
-
@
|
1729
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
1724
1730
|
```
|
1725
1731
|
or
|
1726
1732
|
```
|
1727
|
-
@
|
1728
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1733
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
1729
1734
|
```
|
1730
1735
|
|
1731
|
-
|
1732
|
-
|
1733
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1734
|
-
```
|
1735
|
-
This is equivalent to:
|
1736
|
+
You can also specify just the project or project branch (other values will be
|
1737
|
+
inferred from the current project or project branch):
|
1736
1738
|
```
|
1737
|
-
@
|
1739
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
1738
1740
|
```
|
1739
1741
|
|
1742
|
+
Note that `branch` is typically one of:
|
1743
|
+
- `prod`
|
1744
|
+
- `user.bob`
|
1745
|
+
- `test.my_experiment`
|
1746
|
+
- `prod.staging`
|
1747
|
+
|
1740
1748
|
Parameters
|
1741
1749
|
----------
|
1742
|
-
|
1743
|
-
|
1744
|
-
|
1745
|
-
|
1750
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
1751
|
+
Upstream flow dependency for this flow.
|
1752
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
1753
|
+
Upstream flow dependencies for this flow.
|
1746
1754
|
options : Dict[str, Any], default {}
|
1747
1755
|
Backend-specific configuration for tuning eventing behavior.
|
1748
1756
|
|
@@ -1751,47 +1759,51 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
|
|
1751
1759
|
...
|
1752
1760
|
|
1753
1761
|
@typing.overload
|
1754
|
-
def
|
1762
|
+
def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1755
1763
|
...
|
1756
1764
|
|
1757
|
-
def
|
1765
|
+
def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
1758
1766
|
"""
|
1759
|
-
Specifies the
|
1767
|
+
Specifies the flow(s) that this flow depends on.
|
1760
1768
|
|
1761
1769
|
```
|
1762
|
-
@
|
1770
|
+
@trigger_on_finish(flow='FooFlow')
|
1763
1771
|
```
|
1764
1772
|
or
|
1765
1773
|
```
|
1766
|
-
@
|
1774
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
1767
1775
|
```
|
1776
|
+
This decorator respects the @project decorator and triggers the flow
|
1777
|
+
when upstream runs within the same namespace complete successfully
|
1768
1778
|
|
1769
|
-
Additionally, you can specify
|
1770
|
-
|
1779
|
+
Additionally, you can specify project aware upstream flow dependencies
|
1780
|
+
by specifying the fully qualified project_flow_name.
|
1771
1781
|
```
|
1772
|
-
@
|
1782
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
1773
1783
|
```
|
1774
1784
|
or
|
1775
1785
|
```
|
1776
|
-
@
|
1777
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1786
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
1778
1787
|
```
|
1779
1788
|
|
1780
|
-
|
1781
|
-
|
1782
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1783
|
-
```
|
1784
|
-
This is equivalent to:
|
1789
|
+
You can also specify just the project or project branch (other values will be
|
1790
|
+
inferred from the current project or project branch):
|
1785
1791
|
```
|
1786
|
-
@
|
1792
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
1787
1793
|
```
|
1788
1794
|
|
1795
|
+
Note that `branch` is typically one of:
|
1796
|
+
- `prod`
|
1797
|
+
- `user.bob`
|
1798
|
+
- `test.my_experiment`
|
1799
|
+
- `prod.staging`
|
1800
|
+
|
1789
1801
|
Parameters
|
1790
1802
|
----------
|
1791
|
-
|
1792
|
-
|
1793
|
-
|
1794
|
-
|
1803
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
1804
|
+
Upstream flow dependency for this flow.
|
1805
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
1806
|
+
Upstream flow dependencies for this flow.
|
1795
1807
|
options : Dict[str, Any], default {}
|
1796
1808
|
Backend-specific configuration for tuning eventing behavior.
|
1797
1809
|
|