metaflow-stubs 2.12.14__py2.py3-none-any.whl → 2.12.16__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +424 -424
- metaflow-stubs/cards.pyi +4 -4
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +3 -3
- metaflow-stubs/client/core.pyi +7 -7
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/clone_util.pyi +2 -2
- metaflow-stubs/events.pyi +2 -2
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +4 -4
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +5 -5
- metaflow-stubs/metadata/metadata.pyi +3 -3
- metaflow-stubs/metadata/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +17 -17
- metaflow-stubs/mflog/mflog.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +4 -4
- metaflow-stubs/plugins/__init__.pyi +3 -3
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +5 -5
- metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +9 -7
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +7 -7
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
- metaflow-stubs/plugins/cards/card_client.pyi +3 -3
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +3 -3
- metaflow-stubs/plugins/datatools/__init__.pyi +4 -4
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
- metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +3 -3
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/logs_cli.pyi +2 -2
- metaflow-stubs/plugins/package_cli.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +3 -3
- metaflow-stubs/plugins/project_decorator.pyi +3 -3
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/tag_cli.pyi +5 -5
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/procpoll.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +3 -3
- metaflow-stubs/runner/metaflow_runner.pyi +5 -5
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +2 -2
- metaflow-stubs/system/__init__.pyi +3 -3
- metaflow-stubs/system/system_logger.pyi +3 -3
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/version.pyi +2 -2
- {metaflow_stubs-2.12.14.dist-info → metaflow_stubs-2.12.16.dist-info}/METADATA +2 -2
- metaflow_stubs-2.12.16.dist-info/RECORD +150 -0
- metaflow_stubs-2.12.14.dist-info/RECORD +0 -150
- {metaflow_stubs-2.12.14.dist-info → metaflow_stubs-2.12.16.dist-info}/WHEEL +0 -0
- {metaflow_stubs-2.12.14.dist-info → metaflow_stubs-2.12.16.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,25 +1,25 @@
|
|
1
1
|
##################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.12.
|
4
|
-
# Generated on 2024-08-
|
3
|
+
# MF version: 2.12.16 #
|
4
|
+
# Generated on 2024-08-26T21:06:14.574066 #
|
5
5
|
##################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
9
|
import typing
|
10
10
|
if typing.TYPE_CHECKING:
|
11
|
-
import
|
12
|
-
import metaflow.
|
13
|
-
import metaflow.
|
11
|
+
import typing
|
12
|
+
import metaflow.client.core
|
13
|
+
import metaflow.flowspec
|
14
|
+
import metaflow._vendor.click.types
|
14
15
|
import metaflow.parameters
|
16
|
+
import metaflow.metaflow_current
|
15
17
|
import io
|
16
|
-
import metaflow._vendor.click.types
|
17
18
|
import datetime
|
18
|
-
import metaflow.flowspec
|
19
|
-
import metaflow.runner.metaflow_runner
|
20
19
|
import metaflow.datastore.inputs
|
21
|
-
import metaflow.
|
22
|
-
import
|
20
|
+
import metaflow.events
|
21
|
+
import metaflow.runner.metaflow_runner
|
22
|
+
import metaflow.plugins.datatools.s3.s3
|
23
23
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
24
24
|
StepFlag = typing.NewType("StepFlag", bool)
|
25
25
|
|
@@ -728,136 +728,182 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
728
728
|
...
|
729
729
|
|
730
730
|
@typing.overload
|
731
|
-
def
|
731
|
+
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
732
732
|
"""
|
733
|
-
Specifies
|
734
|
-
|
735
|
-
Use `@resources` to specify the resource requirements
|
736
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
737
|
-
|
738
|
-
You can choose the compute layer on the command line by executing e.g.
|
739
|
-
```
|
740
|
-
python myflow.py run --with batch
|
741
|
-
```
|
742
|
-
or
|
743
|
-
```
|
744
|
-
python myflow.py run --with kubernetes
|
745
|
-
```
|
746
|
-
which executes the flow on the desired system using the
|
747
|
-
requirements specified in `@resources`.
|
733
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
748
734
|
|
749
735
|
Parameters
|
750
736
|
----------
|
751
737
|
cpu : int, default 1
|
752
|
-
Number of CPUs required for this step.
|
738
|
+
Number of CPUs required for this step. If `@resources` is
|
739
|
+
also present, the maximum value from all decorators is used.
|
753
740
|
gpu : int, default 0
|
754
|
-
Number of GPUs required for this step.
|
755
|
-
|
756
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
741
|
+
Number of GPUs required for this step. If `@resources` is
|
742
|
+
also present, the maximum value from all decorators is used.
|
757
743
|
memory : int, default 4096
|
758
|
-
Memory size (in MB) required for this step.
|
744
|
+
Memory size (in MB) required for this step. If
|
745
|
+
`@resources` is also present, the maximum value from all decorators is
|
746
|
+
used.
|
747
|
+
image : str, optional, default None
|
748
|
+
Docker image to use when launching on AWS Batch. If not specified, and
|
749
|
+
METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
|
750
|
+
not, a default Docker image mapping to the current version of Python is used.
|
751
|
+
queue : str, default METAFLOW_BATCH_JOB_QUEUE
|
752
|
+
AWS Batch Job Queue to submit the job to.
|
753
|
+
iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
|
754
|
+
AWS IAM role that AWS Batch container uses to access AWS cloud resources.
|
755
|
+
execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
|
756
|
+
AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
|
757
|
+
(https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
|
759
758
|
shared_memory : int, optional, default None
|
760
759
|
The value for the size (in MiB) of the /dev/shm volume for this step.
|
761
760
|
This parameter maps to the `--shm-size` option in Docker.
|
761
|
+
max_swap : int, optional, default None
|
762
|
+
The total amount of swap memory (in MiB) a container can use for this
|
763
|
+
step. This parameter is translated to the `--memory-swap` option in
|
764
|
+
Docker where the value is the sum of the container memory plus the
|
765
|
+
`max_swap` value.
|
766
|
+
swappiness : int, optional, default None
|
767
|
+
This allows you to tune memory swappiness behavior for this step.
|
768
|
+
A swappiness value of 0 causes swapping not to happen unless absolutely
|
769
|
+
necessary. A swappiness value of 100 causes pages to be swapped very
|
770
|
+
aggressively. Accepted values are whole numbers between 0 and 100.
|
771
|
+
use_tmpfs : bool, default False
|
772
|
+
This enables an explicit tmpfs mount for this step. Note that tmpfs is
|
773
|
+
not available on Fargate compute environments
|
774
|
+
tmpfs_tempdir : bool, default True
|
775
|
+
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
776
|
+
tmpfs_size : int, optional, default None
|
777
|
+
The value for the size (in MiB) of the tmpfs mount for this step.
|
778
|
+
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
779
|
+
memory allocated for this step.
|
780
|
+
tmpfs_path : str, optional, default None
|
781
|
+
Path to tmpfs mount for this step. Defaults to /metaflow_temp.
|
782
|
+
inferentia : int, default 0
|
783
|
+
Number of Inferentia chips required for this step.
|
784
|
+
trainium : int, default None
|
785
|
+
Alias for inferentia. Use only one of the two.
|
786
|
+
efa : int, default 0
|
787
|
+
Number of elastic fabric adapter network devices to attach to container
|
788
|
+
ephemeral_storage : int, default None
|
789
|
+
The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
|
790
|
+
This is only relevant for Fargate compute environments
|
791
|
+
log_driver: str, optional, default None
|
792
|
+
The log driver to use for the Amazon ECS container.
|
793
|
+
log_options: List[str], optional, default None
|
794
|
+
List of strings containing options for the chosen log driver. The configurable values
|
795
|
+
depend on the `log driver` chosen. Validation of these options is not supported yet.
|
796
|
+
Example: [`awslogs-group:aws/batch/job`]
|
762
797
|
"""
|
763
798
|
...
|
764
799
|
|
765
800
|
@typing.overload
|
766
|
-
def
|
801
|
+
def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
767
802
|
...
|
768
803
|
|
769
804
|
@typing.overload
|
770
|
-
def
|
805
|
+
def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
771
806
|
...
|
772
807
|
|
773
|
-
def
|
808
|
+
def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
|
774
809
|
"""
|
775
|
-
Specifies
|
776
|
-
|
777
|
-
Use `@resources` to specify the resource requirements
|
778
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
779
|
-
|
780
|
-
You can choose the compute layer on the command line by executing e.g.
|
781
|
-
```
|
782
|
-
python myflow.py run --with batch
|
783
|
-
```
|
784
|
-
or
|
785
|
-
```
|
786
|
-
python myflow.py run --with kubernetes
|
787
|
-
```
|
788
|
-
which executes the flow on the desired system using the
|
789
|
-
requirements specified in `@resources`.
|
810
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
790
811
|
|
791
812
|
Parameters
|
792
813
|
----------
|
793
814
|
cpu : int, default 1
|
794
|
-
Number of CPUs required for this step.
|
815
|
+
Number of CPUs required for this step. If `@resources` is
|
816
|
+
also present, the maximum value from all decorators is used.
|
795
817
|
gpu : int, default 0
|
796
|
-
Number of GPUs required for this step.
|
797
|
-
|
798
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
818
|
+
Number of GPUs required for this step. If `@resources` is
|
819
|
+
also present, the maximum value from all decorators is used.
|
799
820
|
memory : int, default 4096
|
800
|
-
Memory size (in MB) required for this step.
|
821
|
+
Memory size (in MB) required for this step. If
|
822
|
+
`@resources` is also present, the maximum value from all decorators is
|
823
|
+
used.
|
824
|
+
image : str, optional, default None
|
825
|
+
Docker image to use when launching on AWS Batch. If not specified, and
|
826
|
+
METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
|
827
|
+
not, a default Docker image mapping to the current version of Python is used.
|
828
|
+
queue : str, default METAFLOW_BATCH_JOB_QUEUE
|
829
|
+
AWS Batch Job Queue to submit the job to.
|
830
|
+
iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
|
831
|
+
AWS IAM role that AWS Batch container uses to access AWS cloud resources.
|
832
|
+
execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
|
833
|
+
AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
|
834
|
+
(https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
|
801
835
|
shared_memory : int, optional, default None
|
802
836
|
The value for the size (in MiB) of the /dev/shm volume for this step.
|
803
837
|
This parameter maps to the `--shm-size` option in Docker.
|
838
|
+
max_swap : int, optional, default None
|
839
|
+
The total amount of swap memory (in MiB) a container can use for this
|
840
|
+
step. This parameter is translated to the `--memory-swap` option in
|
841
|
+
Docker where the value is the sum of the container memory plus the
|
842
|
+
`max_swap` value.
|
843
|
+
swappiness : int, optional, default None
|
844
|
+
This allows you to tune memory swappiness behavior for this step.
|
845
|
+
A swappiness value of 0 causes swapping not to happen unless absolutely
|
846
|
+
necessary. A swappiness value of 100 causes pages to be swapped very
|
847
|
+
aggressively. Accepted values are whole numbers between 0 and 100.
|
848
|
+
use_tmpfs : bool, default False
|
849
|
+
This enables an explicit tmpfs mount for this step. Note that tmpfs is
|
850
|
+
not available on Fargate compute environments
|
851
|
+
tmpfs_tempdir : bool, default True
|
852
|
+
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
853
|
+
tmpfs_size : int, optional, default None
|
854
|
+
The value for the size (in MiB) of the tmpfs mount for this step.
|
855
|
+
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
856
|
+
memory allocated for this step.
|
857
|
+
tmpfs_path : str, optional, default None
|
858
|
+
Path to tmpfs mount for this step. Defaults to /metaflow_temp.
|
859
|
+
inferentia : int, default 0
|
860
|
+
Number of Inferentia chips required for this step.
|
861
|
+
trainium : int, default None
|
862
|
+
Alias for inferentia. Use only one of the two.
|
863
|
+
efa : int, default 0
|
864
|
+
Number of elastic fabric adapter network devices to attach to container
|
865
|
+
ephemeral_storage : int, default None
|
866
|
+
The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
|
867
|
+
This is only relevant for Fargate compute environments
|
868
|
+
log_driver: str, optional, default None
|
869
|
+
The log driver to use for the Amazon ECS container.
|
870
|
+
log_options: List[str], optional, default None
|
871
|
+
List of strings containing options for the chosen log driver. The configurable values
|
872
|
+
depend on the `log driver` chosen. Validation of these options is not supported yet.
|
873
|
+
Example: [`awslogs-group:aws/batch/job`]
|
804
874
|
"""
|
805
875
|
...
|
806
876
|
|
807
877
|
@typing.overload
|
808
|
-
def
|
878
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
809
879
|
"""
|
810
|
-
Specifies
|
811
|
-
|
812
|
-
Information in this decorator will augment any
|
813
|
-
attributes set in the `@conda_base` flow-level decorator. Hence,
|
814
|
-
you can use `@conda_base` to set packages required by all
|
815
|
-
steps and use `@conda` to specify step-specific overrides.
|
880
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
881
|
+
the execution of a step.
|
816
882
|
|
817
883
|
Parameters
|
818
884
|
----------
|
819
|
-
|
820
|
-
|
821
|
-
and the value is the version to use.
|
822
|
-
libraries : Dict[str, str], default {}
|
823
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
824
|
-
python : str, optional, default None
|
825
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
826
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
827
|
-
disabled : bool, default False
|
828
|
-
If set to True, disables @conda.
|
885
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
886
|
+
List of secret specs, defining how the secrets are to be retrieved
|
829
887
|
"""
|
830
888
|
...
|
831
889
|
|
832
890
|
@typing.overload
|
833
|
-
def
|
891
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
834
892
|
...
|
835
893
|
|
836
894
|
@typing.overload
|
837
|
-
def
|
895
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
838
896
|
...
|
839
897
|
|
840
|
-
def
|
898
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
841
899
|
"""
|
842
|
-
Specifies
|
843
|
-
|
844
|
-
Information in this decorator will augment any
|
845
|
-
attributes set in the `@conda_base` flow-level decorator. Hence,
|
846
|
-
you can use `@conda_base` to set packages required by all
|
847
|
-
steps and use `@conda` to specify step-specific overrides.
|
900
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
901
|
+
the execution of a step.
|
848
902
|
|
849
903
|
Parameters
|
850
904
|
----------
|
851
|
-
|
852
|
-
|
853
|
-
and the value is the version to use.
|
854
|
-
libraries : Dict[str, str], default {}
|
855
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
856
|
-
python : str, optional, default None
|
857
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
858
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
859
|
-
disabled : bool, default False
|
860
|
-
If set to True, disables @conda.
|
905
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
906
|
+
List of secret specs, defining how the secrets are to be retrieved
|
861
907
|
"""
|
862
908
|
...
|
863
909
|
|
@@ -913,239 +959,79 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
|
|
913
959
|
...
|
914
960
|
|
915
961
|
@typing.overload
|
916
|
-
def
|
962
|
+
def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
917
963
|
"""
|
918
|
-
Specifies
|
919
|
-
|
920
|
-
This decorator is useful if this step may hang indefinitely.
|
964
|
+
Specifies the resources needed when executing this step.
|
921
965
|
|
922
|
-
|
923
|
-
|
924
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
966
|
+
Use `@resources` to specify the resource requirements
|
967
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
925
968
|
|
926
|
-
|
927
|
-
|
928
|
-
|
929
|
-
|
930
|
-
|
931
|
-
|
932
|
-
|
933
|
-
|
934
|
-
|
935
|
-
|
936
|
-
Number of hours to wait prior to timing out.
|
937
|
-
"""
|
938
|
-
...
|
939
|
-
|
940
|
-
@typing.overload
|
941
|
-
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
942
|
-
...
|
943
|
-
|
944
|
-
@typing.overload
|
945
|
-
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
946
|
-
...
|
947
|
-
|
948
|
-
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
949
|
-
"""
|
950
|
-
Specifies a timeout for your step.
|
951
|
-
|
952
|
-
This decorator is useful if this step may hang indefinitely.
|
953
|
-
|
954
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
955
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
956
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
957
|
-
|
958
|
-
Note that all the values specified in parameters are added together so if you specify
|
959
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
960
|
-
|
961
|
-
Parameters
|
962
|
-
----------
|
963
|
-
seconds : int, default 0
|
964
|
-
Number of seconds to wait prior to timing out.
|
965
|
-
minutes : int, default 0
|
966
|
-
Number of minutes to wait prior to timing out.
|
967
|
-
hours : int, default 0
|
968
|
-
Number of hours to wait prior to timing out.
|
969
|
-
"""
|
970
|
-
...
|
971
|
-
|
972
|
-
@typing.overload
|
973
|
-
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
974
|
-
"""
|
975
|
-
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
969
|
+
You can choose the compute layer on the command line by executing e.g.
|
970
|
+
```
|
971
|
+
python myflow.py run --with batch
|
972
|
+
```
|
973
|
+
or
|
974
|
+
```
|
975
|
+
python myflow.py run --with kubernetes
|
976
|
+
```
|
977
|
+
which executes the flow on the desired system using the
|
978
|
+
requirements specified in `@resources`.
|
976
979
|
|
977
980
|
Parameters
|
978
981
|
----------
|
979
982
|
cpu : int, default 1
|
980
|
-
Number of CPUs required for this step.
|
981
|
-
also present, the maximum value from all decorators is used.
|
983
|
+
Number of CPUs required for this step.
|
982
984
|
gpu : int, default 0
|
983
|
-
Number of GPUs required for this step.
|
984
|
-
|
985
|
+
Number of GPUs required for this step.
|
986
|
+
disk : int, optional, default None
|
987
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
985
988
|
memory : int, default 4096
|
986
|
-
Memory size (in MB) required for this step.
|
987
|
-
`@resources` is also present, the maximum value from all decorators is
|
988
|
-
used.
|
989
|
-
image : str, optional, default None
|
990
|
-
Docker image to use when launching on AWS Batch. If not specified, and
|
991
|
-
METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
|
992
|
-
not, a default Docker image mapping to the current version of Python is used.
|
993
|
-
queue : str, default METAFLOW_BATCH_JOB_QUEUE
|
994
|
-
AWS Batch Job Queue to submit the job to.
|
995
|
-
iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
|
996
|
-
AWS IAM role that AWS Batch container uses to access AWS cloud resources.
|
997
|
-
execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
|
998
|
-
AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
|
999
|
-
(https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
|
989
|
+
Memory size (in MB) required for this step.
|
1000
990
|
shared_memory : int, optional, default None
|
1001
991
|
The value for the size (in MiB) of the /dev/shm volume for this step.
|
1002
992
|
This parameter maps to the `--shm-size` option in Docker.
|
1003
|
-
max_swap : int, optional, default None
|
1004
|
-
The total amount of swap memory (in MiB) a container can use for this
|
1005
|
-
step. This parameter is translated to the `--memory-swap` option in
|
1006
|
-
Docker where the value is the sum of the container memory plus the
|
1007
|
-
`max_swap` value.
|
1008
|
-
swappiness : int, optional, default None
|
1009
|
-
This allows you to tune memory swappiness behavior for this step.
|
1010
|
-
A swappiness value of 0 causes swapping not to happen unless absolutely
|
1011
|
-
necessary. A swappiness value of 100 causes pages to be swapped very
|
1012
|
-
aggressively. Accepted values are whole numbers between 0 and 100.
|
1013
|
-
use_tmpfs : bool, default False
|
1014
|
-
This enables an explicit tmpfs mount for this step. Note that tmpfs is
|
1015
|
-
not available on Fargate compute environments
|
1016
|
-
tmpfs_tempdir : bool, default True
|
1017
|
-
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
1018
|
-
tmpfs_size : int, optional, default None
|
1019
|
-
The value for the size (in MiB) of the tmpfs mount for this step.
|
1020
|
-
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
1021
|
-
memory allocated for this step.
|
1022
|
-
tmpfs_path : str, optional, default None
|
1023
|
-
Path to tmpfs mount for this step. Defaults to /metaflow_temp.
|
1024
|
-
inferentia : int, default 0
|
1025
|
-
Number of Inferentia chips required for this step.
|
1026
|
-
trainium : int, default None
|
1027
|
-
Alias for inferentia. Use only one of the two.
|
1028
|
-
efa : int, default 0
|
1029
|
-
Number of elastic fabric adapter network devices to attach to container
|
1030
|
-
ephemeral_storage : int, default None
|
1031
|
-
The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
|
1032
|
-
This is only relevant for Fargate compute environments
|
1033
|
-
log_driver: str, optional, default None
|
1034
|
-
The log driver to use for the Amazon ECS container.
|
1035
|
-
log_options: List[str], optional, default None
|
1036
|
-
List of strings containing options for the chosen log driver. The configurable values
|
1037
|
-
depend on the `log driver` chosen. Validation of these options is not supported yet.
|
1038
|
-
Example: [`awslogs-group:aws/batch/job`]
|
1039
993
|
"""
|
1040
994
|
...
|
1041
995
|
|
1042
996
|
@typing.overload
|
1043
|
-
def
|
997
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1044
998
|
...
|
1045
999
|
|
1046
1000
|
@typing.overload
|
1047
|
-
def
|
1001
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1048
1002
|
...
|
1049
1003
|
|
1050
|
-
def
|
1004
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
1051
1005
|
"""
|
1052
|
-
Specifies
|
1006
|
+
Specifies the resources needed when executing this step.
|
1007
|
+
|
1008
|
+
Use `@resources` to specify the resource requirements
|
1009
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
1010
|
+
|
1011
|
+
You can choose the compute layer on the command line by executing e.g.
|
1012
|
+
```
|
1013
|
+
python myflow.py run --with batch
|
1014
|
+
```
|
1015
|
+
or
|
1016
|
+
```
|
1017
|
+
python myflow.py run --with kubernetes
|
1018
|
+
```
|
1019
|
+
which executes the flow on the desired system using the
|
1020
|
+
requirements specified in `@resources`.
|
1053
1021
|
|
1054
1022
|
Parameters
|
1055
1023
|
----------
|
1056
1024
|
cpu : int, default 1
|
1057
|
-
Number of CPUs required for this step.
|
1058
|
-
also present, the maximum value from all decorators is used.
|
1025
|
+
Number of CPUs required for this step.
|
1059
1026
|
gpu : int, default 0
|
1060
|
-
Number of GPUs required for this step.
|
1061
|
-
|
1027
|
+
Number of GPUs required for this step.
|
1028
|
+
disk : int, optional, default None
|
1029
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
1062
1030
|
memory : int, default 4096
|
1063
|
-
Memory size (in MB) required for this step.
|
1064
|
-
`@resources` is also present, the maximum value from all decorators is
|
1065
|
-
used.
|
1066
|
-
image : str, optional, default None
|
1067
|
-
Docker image to use when launching on AWS Batch. If not specified, and
|
1068
|
-
METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
|
1069
|
-
not, a default Docker image mapping to the current version of Python is used.
|
1070
|
-
queue : str, default METAFLOW_BATCH_JOB_QUEUE
|
1071
|
-
AWS Batch Job Queue to submit the job to.
|
1072
|
-
iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
|
1073
|
-
AWS IAM role that AWS Batch container uses to access AWS cloud resources.
|
1074
|
-
execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
|
1075
|
-
AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
|
1076
|
-
(https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
|
1031
|
+
Memory size (in MB) required for this step.
|
1077
1032
|
shared_memory : int, optional, default None
|
1078
1033
|
The value for the size (in MiB) of the /dev/shm volume for this step.
|
1079
1034
|
This parameter maps to the `--shm-size` option in Docker.
|
1080
|
-
max_swap : int, optional, default None
|
1081
|
-
The total amount of swap memory (in MiB) a container can use for this
|
1082
|
-
step. This parameter is translated to the `--memory-swap` option in
|
1083
|
-
Docker where the value is the sum of the container memory plus the
|
1084
|
-
`max_swap` value.
|
1085
|
-
swappiness : int, optional, default None
|
1086
|
-
This allows you to tune memory swappiness behavior for this step.
|
1087
|
-
A swappiness value of 0 causes swapping not to happen unless absolutely
|
1088
|
-
necessary. A swappiness value of 100 causes pages to be swapped very
|
1089
|
-
aggressively. Accepted values are whole numbers between 0 and 100.
|
1090
|
-
use_tmpfs : bool, default False
|
1091
|
-
This enables an explicit tmpfs mount for this step. Note that tmpfs is
|
1092
|
-
not available on Fargate compute environments
|
1093
|
-
tmpfs_tempdir : bool, default True
|
1094
|
-
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
1095
|
-
tmpfs_size : int, optional, default None
|
1096
|
-
The value for the size (in MiB) of the tmpfs mount for this step.
|
1097
|
-
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
1098
|
-
memory allocated for this step.
|
1099
|
-
tmpfs_path : str, optional, default None
|
1100
|
-
Path to tmpfs mount for this step. Defaults to /metaflow_temp.
|
1101
|
-
inferentia : int, default 0
|
1102
|
-
Number of Inferentia chips required for this step.
|
1103
|
-
trainium : int, default None
|
1104
|
-
Alias for inferentia. Use only one of the two.
|
1105
|
-
efa : int, default 0
|
1106
|
-
Number of elastic fabric adapter network devices to attach to container
|
1107
|
-
ephemeral_storage : int, default None
|
1108
|
-
The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
|
1109
|
-
This is only relevant for Fargate compute environments
|
1110
|
-
log_driver: str, optional, default None
|
1111
|
-
The log driver to use for the Amazon ECS container.
|
1112
|
-
log_options: List[str], optional, default None
|
1113
|
-
List of strings containing options for the chosen log driver. The configurable values
|
1114
|
-
depend on the `log driver` chosen. Validation of these options is not supported yet.
|
1115
|
-
Example: [`awslogs-group:aws/batch/job`]
|
1116
|
-
"""
|
1117
|
-
...
|
1118
|
-
|
1119
|
-
@typing.overload
|
1120
|
-
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1121
|
-
"""
|
1122
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
1123
|
-
the execution of a step.
|
1124
|
-
|
1125
|
-
Parameters
|
1126
|
-
----------
|
1127
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
1128
|
-
List of secret specs, defining how the secrets are to be retrieved
|
1129
|
-
"""
|
1130
|
-
...
|
1131
|
-
|
1132
|
-
@typing.overload
|
1133
|
-
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1134
|
-
...
|
1135
|
-
|
1136
|
-
@typing.overload
|
1137
|
-
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1138
|
-
...
|
1139
|
-
|
1140
|
-
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
1141
|
-
"""
|
1142
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
1143
|
-
the execution of a step.
|
1144
|
-
|
1145
|
-
Parameters
|
1146
|
-
----------
|
1147
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
1148
|
-
List of secret specs, defining how the secrets are to be retrieved
|
1149
1035
|
"""
|
1150
1036
|
...
|
1151
1037
|
|
@@ -1203,33 +1089,59 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
1203
1089
|
...
|
1204
1090
|
|
1205
1091
|
@typing.overload
|
1206
|
-
def
|
1092
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1207
1093
|
"""
|
1208
|
-
Specifies
|
1094
|
+
Specifies a timeout for your step.
|
1095
|
+
|
1096
|
+
This decorator is useful if this step may hang indefinitely.
|
1097
|
+
|
1098
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
1099
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
1100
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
1101
|
+
|
1102
|
+
Note that all the values specified in parameters are added together so if you specify
|
1103
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
1209
1104
|
|
1210
1105
|
Parameters
|
1211
1106
|
----------
|
1212
|
-
|
1213
|
-
|
1107
|
+
seconds : int, default 0
|
1108
|
+
Number of seconds to wait prior to timing out.
|
1109
|
+
minutes : int, default 0
|
1110
|
+
Number of minutes to wait prior to timing out.
|
1111
|
+
hours : int, default 0
|
1112
|
+
Number of hours to wait prior to timing out.
|
1214
1113
|
"""
|
1215
1114
|
...
|
1216
1115
|
|
1217
1116
|
@typing.overload
|
1218
|
-
def
|
1117
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1219
1118
|
...
|
1220
1119
|
|
1221
1120
|
@typing.overload
|
1222
|
-
def
|
1121
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1223
1122
|
...
|
1224
1123
|
|
1225
|
-
def
|
1124
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
1226
1125
|
"""
|
1227
|
-
Specifies
|
1126
|
+
Specifies a timeout for your step.
|
1127
|
+
|
1128
|
+
This decorator is useful if this step may hang indefinitely.
|
1129
|
+
|
1130
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
1131
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
1132
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
1133
|
+
|
1134
|
+
Note that all the values specified in parameters are added together so if you specify
|
1135
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
1228
1136
|
|
1229
1137
|
Parameters
|
1230
1138
|
----------
|
1231
|
-
|
1232
|
-
|
1139
|
+
seconds : int, default 0
|
1140
|
+
Number of seconds to wait prior to timing out.
|
1141
|
+
minutes : int, default 0
|
1142
|
+
Number of minutes to wait prior to timing out.
|
1143
|
+
hours : int, default 0
|
1144
|
+
Number of hours to wait prior to timing out.
|
1233
1145
|
"""
|
1234
1146
|
...
|
1235
1147
|
|
@@ -1282,6 +1194,37 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
|
|
1282
1194
|
"""
|
1283
1195
|
...
|
1284
1196
|
|
1197
|
+
@typing.overload
|
1198
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1199
|
+
"""
|
1200
|
+
Specifies environment variables to be set prior to the execution of a step.
|
1201
|
+
|
1202
|
+
Parameters
|
1203
|
+
----------
|
1204
|
+
vars : Dict[str, str], default {}
|
1205
|
+
Dictionary of environment variables to set.
|
1206
|
+
"""
|
1207
|
+
...
|
1208
|
+
|
1209
|
+
@typing.overload
|
1210
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1211
|
+
...
|
1212
|
+
|
1213
|
+
@typing.overload
|
1214
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1215
|
+
...
|
1216
|
+
|
1217
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
1218
|
+
"""
|
1219
|
+
Specifies environment variables to be set prior to the execution of a step.
|
1220
|
+
|
1221
|
+
Parameters
|
1222
|
+
----------
|
1223
|
+
vars : Dict[str, str], default {}
|
1224
|
+
Dictionary of environment variables to set.
|
1225
|
+
"""
|
1226
|
+
...
|
1227
|
+
|
1285
1228
|
@typing.overload
|
1286
1229
|
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1287
1230
|
"""
|
@@ -1393,6 +1336,63 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
|
|
1393
1336
|
"""
|
1394
1337
|
...
|
1395
1338
|
|
1339
|
+
@typing.overload
|
1340
|
+
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1341
|
+
"""
|
1342
|
+
Specifies the Conda environment for the step.
|
1343
|
+
|
1344
|
+
Information in this decorator will augment any
|
1345
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
1346
|
+
you can use `@conda_base` to set packages required by all
|
1347
|
+
steps and use `@conda` to specify step-specific overrides.
|
1348
|
+
|
1349
|
+
Parameters
|
1350
|
+
----------
|
1351
|
+
packages : Dict[str, str], default {}
|
1352
|
+
Packages to use for this step. The key is the name of the package
|
1353
|
+
and the value is the version to use.
|
1354
|
+
libraries : Dict[str, str], default {}
|
1355
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1356
|
+
python : str, optional, default None
|
1357
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1358
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1359
|
+
disabled : bool, default False
|
1360
|
+
If set to True, disables @conda.
|
1361
|
+
"""
|
1362
|
+
...
|
1363
|
+
|
1364
|
+
@typing.overload
|
1365
|
+
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1366
|
+
...
|
1367
|
+
|
1368
|
+
@typing.overload
|
1369
|
+
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1370
|
+
...
|
1371
|
+
|
1372
|
+
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1373
|
+
"""
|
1374
|
+
Specifies the Conda environment for the step.
|
1375
|
+
|
1376
|
+
Information in this decorator will augment any
|
1377
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
1378
|
+
you can use `@conda_base` to set packages required by all
|
1379
|
+
steps and use `@conda` to specify step-specific overrides.
|
1380
|
+
|
1381
|
+
Parameters
|
1382
|
+
----------
|
1383
|
+
packages : Dict[str, str], default {}
|
1384
|
+
Packages to use for this step. The key is the name of the package
|
1385
|
+
and the value is the version to use.
|
1386
|
+
libraries : Dict[str, str], default {}
|
1387
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1388
|
+
python : str, optional, default None
|
1389
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1390
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1391
|
+
disabled : bool, default False
|
1392
|
+
If set to True, disables @conda.
|
1393
|
+
"""
|
1394
|
+
...
|
1395
|
+
|
1396
1396
|
@typing.overload
|
1397
1397
|
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1398
1398
|
"""
|
@@ -1454,6 +1454,63 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
|
|
1454
1454
|
"""
|
1455
1455
|
...
|
1456
1456
|
|
1457
|
+
@typing.overload
|
1458
|
+
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1459
|
+
"""
|
1460
|
+
Specifies the PyPI packages for all steps of the flow.
|
1461
|
+
|
1462
|
+
Use `@pypi_base` to set common packages required by all
|
1463
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1464
|
+
Parameters
|
1465
|
+
----------
|
1466
|
+
packages : Dict[str, str], default: {}
|
1467
|
+
Packages to use for this flow. The key is the name of the package
|
1468
|
+
and the value is the version to use.
|
1469
|
+
python : str, optional, default: None
|
1470
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1471
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1472
|
+
"""
|
1473
|
+
...
|
1474
|
+
|
1475
|
+
@typing.overload
|
1476
|
+
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1477
|
+
...
|
1478
|
+
|
1479
|
+
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
1480
|
+
"""
|
1481
|
+
Specifies the PyPI packages for all steps of the flow.
|
1482
|
+
|
1483
|
+
Use `@pypi_base` to set common packages required by all
|
1484
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1485
|
+
Parameters
|
1486
|
+
----------
|
1487
|
+
packages : Dict[str, str], default: {}
|
1488
|
+
Packages to use for this flow. The key is the name of the package
|
1489
|
+
and the value is the version to use.
|
1490
|
+
python : str, optional, default: None
|
1491
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1492
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1493
|
+
"""
|
1494
|
+
...
|
1495
|
+
|
1496
|
+
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1497
|
+
"""
|
1498
|
+
Specifies what flows belong to the same project.
|
1499
|
+
|
1500
|
+
A project-specific namespace is created for all flows that
|
1501
|
+
use the same `@project(name)`.
|
1502
|
+
|
1503
|
+
Parameters
|
1504
|
+
----------
|
1505
|
+
name : str
|
1506
|
+
Project name. Make sure that the name is unique amongst all
|
1507
|
+
projects that use the same production scheduler. The name may
|
1508
|
+
contain only lowercase alphanumeric characters and underscores.
|
1509
|
+
|
1510
|
+
|
1511
|
+
"""
|
1512
|
+
...
|
1513
|
+
|
1457
1514
|
@typing.overload
|
1458
1515
|
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1459
1516
|
"""
|
@@ -1598,115 +1655,6 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
|
|
1598
1655
|
"""
|
1599
1656
|
...
|
1600
1657
|
|
1601
|
-
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1602
|
-
"""
|
1603
|
-
Specifies what flows belong to the same project.
|
1604
|
-
|
1605
|
-
A project-specific namespace is created for all flows that
|
1606
|
-
use the same `@project(name)`.
|
1607
|
-
|
1608
|
-
Parameters
|
1609
|
-
----------
|
1610
|
-
name : str
|
1611
|
-
Project name. Make sure that the name is unique amongst all
|
1612
|
-
projects that use the same production scheduler. The name may
|
1613
|
-
contain only lowercase alphanumeric characters and underscores.
|
1614
|
-
|
1615
|
-
|
1616
|
-
"""
|
1617
|
-
...
|
1618
|
-
|
1619
|
-
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1620
|
-
"""
|
1621
|
-
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
1622
|
-
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
1623
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
1624
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
1625
|
-
starts only after all sensors finish.
|
1626
|
-
|
1627
|
-
Parameters
|
1628
|
-
----------
|
1629
|
-
timeout : int
|
1630
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
1631
|
-
poke_interval : int
|
1632
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
1633
|
-
mode : str
|
1634
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1635
|
-
exponential_backoff : bool
|
1636
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1637
|
-
pool : str
|
1638
|
-
the slot pool this task should run in,
|
1639
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1640
|
-
soft_fail : bool
|
1641
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1642
|
-
name : str
|
1643
|
-
Name of the sensor on Airflow
|
1644
|
-
description : str
|
1645
|
-
Description of sensor in the Airflow UI
|
1646
|
-
bucket_key : Union[str, List[str]]
|
1647
|
-
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
1648
|
-
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
1649
|
-
bucket_name : str
|
1650
|
-
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
1651
|
-
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
1652
|
-
wildcard_match : bool
|
1653
|
-
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
1654
|
-
aws_conn_id : str
|
1655
|
-
a reference to the s3 connection on Airflow. (Default: None)
|
1656
|
-
verify : bool
|
1657
|
-
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
1658
|
-
"""
|
1659
|
-
...
|
1660
|
-
|
1661
|
-
@typing.overload
|
1662
|
-
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1663
|
-
"""
|
1664
|
-
Specifies the Conda environment for all steps of the flow.
|
1665
|
-
|
1666
|
-
Use `@conda_base` to set common libraries required by all
|
1667
|
-
steps and use `@conda` to specify step-specific additions.
|
1668
|
-
|
1669
|
-
Parameters
|
1670
|
-
----------
|
1671
|
-
packages : Dict[str, str], default {}
|
1672
|
-
Packages to use for this flow. The key is the name of the package
|
1673
|
-
and the value is the version to use.
|
1674
|
-
libraries : Dict[str, str], default {}
|
1675
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1676
|
-
python : str, optional, default None
|
1677
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1678
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1679
|
-
disabled : bool, default False
|
1680
|
-
If set to True, disables Conda.
|
1681
|
-
"""
|
1682
|
-
...
|
1683
|
-
|
1684
|
-
@typing.overload
|
1685
|
-
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1686
|
-
...
|
1687
|
-
|
1688
|
-
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1689
|
-
"""
|
1690
|
-
Specifies the Conda environment for all steps of the flow.
|
1691
|
-
|
1692
|
-
Use `@conda_base` to set common libraries required by all
|
1693
|
-
steps and use `@conda` to specify step-specific additions.
|
1694
|
-
|
1695
|
-
Parameters
|
1696
|
-
----------
|
1697
|
-
packages : Dict[str, str], default {}
|
1698
|
-
Packages to use for this flow. The key is the name of the package
|
1699
|
-
and the value is the version to use.
|
1700
|
-
libraries : Dict[str, str], default {}
|
1701
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1702
|
-
python : str, optional, default None
|
1703
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1704
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1705
|
-
disabled : bool, default False
|
1706
|
-
If set to True, disables Conda.
|
1707
|
-
"""
|
1708
|
-
...
|
1709
|
-
|
1710
1658
|
@typing.overload
|
1711
1659
|
def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1712
1660
|
"""
|
@@ -1810,42 +1758,94 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
|
|
1810
1758
|
"""
|
1811
1759
|
...
|
1812
1760
|
|
1761
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1762
|
+
"""
|
1763
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
1764
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
1765
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
1766
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
1767
|
+
starts only after all sensors finish.
|
1768
|
+
|
1769
|
+
Parameters
|
1770
|
+
----------
|
1771
|
+
timeout : int
|
1772
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
1773
|
+
poke_interval : int
|
1774
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
1775
|
+
mode : str
|
1776
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1777
|
+
exponential_backoff : bool
|
1778
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1779
|
+
pool : str
|
1780
|
+
the slot pool this task should run in,
|
1781
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1782
|
+
soft_fail : bool
|
1783
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1784
|
+
name : str
|
1785
|
+
Name of the sensor on Airflow
|
1786
|
+
description : str
|
1787
|
+
Description of sensor in the Airflow UI
|
1788
|
+
bucket_key : Union[str, List[str]]
|
1789
|
+
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
1790
|
+
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
1791
|
+
bucket_name : str
|
1792
|
+
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
1793
|
+
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
1794
|
+
wildcard_match : bool
|
1795
|
+
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
1796
|
+
aws_conn_id : str
|
1797
|
+
a reference to the s3 connection on Airflow. (Default: None)
|
1798
|
+
verify : bool
|
1799
|
+
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
1800
|
+
"""
|
1801
|
+
...
|
1802
|
+
|
1813
1803
|
@typing.overload
|
1814
|
-
def
|
1804
|
+
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1815
1805
|
"""
|
1816
|
-
Specifies the
|
1806
|
+
Specifies the Conda environment for all steps of the flow.
|
1807
|
+
|
1808
|
+
Use `@conda_base` to set common libraries required by all
|
1809
|
+
steps and use `@conda` to specify step-specific additions.
|
1817
1810
|
|
1818
|
-
Use `@pypi_base` to set common packages required by all
|
1819
|
-
steps and use `@pypi` to specify step-specific overrides.
|
1820
1811
|
Parameters
|
1821
1812
|
----------
|
1822
|
-
packages : Dict[str, str], default
|
1813
|
+
packages : Dict[str, str], default {}
|
1823
1814
|
Packages to use for this flow. The key is the name of the package
|
1824
1815
|
and the value is the version to use.
|
1825
|
-
|
1816
|
+
libraries : Dict[str, str], default {}
|
1817
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1818
|
+
python : str, optional, default None
|
1826
1819
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1827
1820
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1821
|
+
disabled : bool, default False
|
1822
|
+
If set to True, disables Conda.
|
1828
1823
|
"""
|
1829
1824
|
...
|
1830
1825
|
|
1831
1826
|
@typing.overload
|
1832
|
-
def
|
1827
|
+
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1833
1828
|
...
|
1834
1829
|
|
1835
|
-
def
|
1830
|
+
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1836
1831
|
"""
|
1837
|
-
Specifies the
|
1832
|
+
Specifies the Conda environment for all steps of the flow.
|
1833
|
+
|
1834
|
+
Use `@conda_base` to set common libraries required by all
|
1835
|
+
steps and use `@conda` to specify step-specific additions.
|
1838
1836
|
|
1839
|
-
Use `@pypi_base` to set common packages required by all
|
1840
|
-
steps and use `@pypi` to specify step-specific overrides.
|
1841
1837
|
Parameters
|
1842
1838
|
----------
|
1843
|
-
packages : Dict[str, str], default
|
1839
|
+
packages : Dict[str, str], default {}
|
1844
1840
|
Packages to use for this flow. The key is the name of the package
|
1845
1841
|
and the value is the version to use.
|
1846
|
-
|
1842
|
+
libraries : Dict[str, str], default {}
|
1843
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1844
|
+
python : str, optional, default None
|
1847
1845
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1848
1846
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1847
|
+
disabled : bool, default False
|
1848
|
+
If set to True, disables Conda.
|
1849
1849
|
"""
|
1850
1850
|
...
|
1851
1851
|
|