ob-metaflow-stubs 3.5__py2.py3-none-any.whl → 3.7__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +469 -469
- metaflow-stubs/cards.pyi +5 -5
- metaflow-stubs/cli.pyi +3 -3
- metaflow-stubs/client/__init__.pyi +4 -4
- metaflow-stubs/client/core.pyi +6 -6
- metaflow-stubs/client/filecache.pyi +3 -3
- metaflow-stubs/clone_util.pyi +2 -2
- metaflow-stubs/events.pyi +2 -2
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +5 -5
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/metadata/metadata.pyi +3 -3
- metaflow-stubs/metadata/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +10 -4
- metaflow-stubs/metaflow_current.pyi +4 -4
- metaflow-stubs/mflog/mflog.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +3 -3
- metaflow-stubs/plugins/__init__.pyi +4 -4
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow.pyi +8 -4
- metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
- metaflow-stubs/plugins/cards/card_client.pyi +3 -3
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +3 -3
- metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
- metaflow-stubs/plugins/datatools/s3/s3.pyi +9 -6
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +3 -3
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +4 -4
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +8 -4
- metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +15 -4
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +24 -7
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +127 -0
- metaflow-stubs/plugins/logs_cli.pyi +3 -3
- metaflow-stubs/plugins/package_cli.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/perimeters.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +3 -3
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/tag_cli.pyi +4 -4
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
- metaflow-stubs/plugins/timeout_decorator.pyi +5 -3
- metaflow-stubs/procpoll.pyi +2 -2
- metaflow-stubs/profilers/__init__.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +14 -0
- {ob_metaflow_stubs-3.5.dist-info → ob_metaflow_stubs-3.7.dist-info}/METADATA +1 -1
- ob_metaflow_stubs-3.7.dist-info/RECORD +138 -0
- ob_metaflow_stubs-3.5.dist-info/RECORD +0 -136
- {ob_metaflow_stubs-3.5.dist-info → ob_metaflow_stubs-3.7.dist-info}/WHEEL +0 -0
- {ob_metaflow_stubs-3.5.dist-info → ob_metaflow_stubs-3.7.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,23 +1,23 @@
|
|
1
1
|
##################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.11.
|
4
|
-
# Generated on 2024-05-
|
3
|
+
# MF version: 2.11.16.2+ob(v1) #
|
4
|
+
# Generated on 2024-05-22T22:08:54.490721 #
|
5
5
|
##################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
9
|
import typing
|
10
10
|
if typing.TYPE_CHECKING:
|
11
|
-
import typing
|
12
|
-
import metaflow.datastore.inputs
|
13
|
-
import metaflow.plugins.datatools.s3.s3
|
14
|
-
import metaflow.events
|
15
|
-
import datetime
|
16
|
-
import metaflow._vendor.click.types
|
17
11
|
import metaflow.client.core
|
12
|
+
import metaflow.metaflow_current
|
18
13
|
import io
|
14
|
+
import metaflow.events
|
15
|
+
import datetime
|
19
16
|
import metaflow.parameters
|
20
|
-
import metaflow.
|
17
|
+
import metaflow.datastore.inputs
|
18
|
+
import typing
|
19
|
+
import metaflow.plugins.datatools.s3.s3
|
20
|
+
import metaflow._vendor.click.types
|
21
21
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
22
22
|
StepFlag = typing.NewType("StepFlag", bool)
|
23
23
|
|
@@ -726,51 +726,244 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
726
726
|
...
|
727
727
|
|
728
728
|
@typing.overload
|
729
|
-
def
|
729
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
730
730
|
"""
|
731
|
-
Specifies
|
731
|
+
Specifies a timeout for your step.
|
732
|
+
|
733
|
+
This decorator is useful if this step may hang indefinitely.
|
734
|
+
|
735
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
736
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
737
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
738
|
+
|
739
|
+
Note that all the values specified in parameters are added together so if you specify
|
740
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
741
|
+
|
742
|
+
Parameters
|
743
|
+
----------
|
744
|
+
seconds : int, default 0
|
745
|
+
Number of seconds to wait prior to timing out.
|
746
|
+
minutes : int, default 0
|
747
|
+
Number of minutes to wait prior to timing out.
|
748
|
+
hours : int, default 0
|
749
|
+
Number of hours to wait prior to timing out.
|
750
|
+
"""
|
751
|
+
...
|
752
|
+
|
753
|
+
@typing.overload
|
754
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
755
|
+
...
|
756
|
+
|
757
|
+
@typing.overload
|
758
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
759
|
+
...
|
760
|
+
|
761
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
762
|
+
"""
|
763
|
+
Specifies a timeout for your step.
|
764
|
+
|
765
|
+
This decorator is useful if this step may hang indefinitely.
|
766
|
+
|
767
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
768
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
769
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
770
|
+
|
771
|
+
Note that all the values specified in parameters are added together so if you specify
|
772
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
773
|
+
|
774
|
+
Parameters
|
775
|
+
----------
|
776
|
+
seconds : int, default 0
|
777
|
+
Number of seconds to wait prior to timing out.
|
778
|
+
minutes : int, default 0
|
779
|
+
Number of minutes to wait prior to timing out.
|
780
|
+
hours : int, default 0
|
781
|
+
Number of hours to wait prior to timing out.
|
782
|
+
"""
|
783
|
+
...
|
784
|
+
|
785
|
+
@typing.overload
|
786
|
+
def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
787
|
+
"""
|
788
|
+
Specifies the resources needed when executing this step.
|
789
|
+
|
790
|
+
Use `@resources` to specify the resource requirements
|
791
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
792
|
+
|
793
|
+
You can choose the compute layer on the command line by executing e.g.
|
794
|
+
```
|
795
|
+
python myflow.py run --with batch
|
796
|
+
```
|
797
|
+
or
|
798
|
+
```
|
799
|
+
python myflow.py run --with kubernetes
|
800
|
+
```
|
801
|
+
which executes the flow on the desired system using the
|
802
|
+
requirements specified in `@resources`.
|
803
|
+
|
804
|
+
Parameters
|
805
|
+
----------
|
806
|
+
cpu : int, default 1
|
807
|
+
Number of CPUs required for this step.
|
808
|
+
gpu : int, default 0
|
809
|
+
Number of GPUs required for this step.
|
810
|
+
disk : int, optional, default None
|
811
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
812
|
+
memory : int, default 4096
|
813
|
+
Memory size (in MB) required for this step.
|
814
|
+
shared_memory : int, optional, default None
|
815
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
816
|
+
This parameter maps to the `--shm-size` option in Docker.
|
817
|
+
"""
|
818
|
+
...
|
819
|
+
|
820
|
+
@typing.overload
|
821
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
822
|
+
...
|
823
|
+
|
824
|
+
@typing.overload
|
825
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
826
|
+
...
|
827
|
+
|
828
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
829
|
+
"""
|
830
|
+
Specifies the resources needed when executing this step.
|
831
|
+
|
832
|
+
Use `@resources` to specify the resource requirements
|
833
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
834
|
+
|
835
|
+
You can choose the compute layer on the command line by executing e.g.
|
836
|
+
```
|
837
|
+
python myflow.py run --with batch
|
838
|
+
```
|
839
|
+
or
|
840
|
+
```
|
841
|
+
python myflow.py run --with kubernetes
|
842
|
+
```
|
843
|
+
which executes the flow on the desired system using the
|
844
|
+
requirements specified in `@resources`.
|
845
|
+
|
846
|
+
Parameters
|
847
|
+
----------
|
848
|
+
cpu : int, default 1
|
849
|
+
Number of CPUs required for this step.
|
850
|
+
gpu : int, default 0
|
851
|
+
Number of GPUs required for this step.
|
852
|
+
disk : int, optional, default None
|
853
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
854
|
+
memory : int, default 4096
|
855
|
+
Memory size (in MB) required for this step.
|
856
|
+
shared_memory : int, optional, default None
|
857
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
858
|
+
This parameter maps to the `--shm-size` option in Docker.
|
859
|
+
"""
|
860
|
+
...
|
861
|
+
|
862
|
+
@typing.overload
|
863
|
+
def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
864
|
+
"""
|
865
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
866
|
+
|
867
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
868
|
+
|
869
|
+
Parameters
|
870
|
+
----------
|
871
|
+
type : str, default 'default'
|
872
|
+
Card type.
|
873
|
+
id : str, optional, default None
|
874
|
+
If multiple cards are present, use this id to identify this card.
|
875
|
+
options : Dict[str, Any], default {}
|
876
|
+
Options passed to the card. The contents depend on the card type.
|
877
|
+
timeout : int, default 45
|
878
|
+
Interrupt reporting if it takes more than this many seconds.
|
879
|
+
|
880
|
+
|
881
|
+
"""
|
882
|
+
...
|
883
|
+
|
884
|
+
@typing.overload
|
885
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
886
|
+
...
|
887
|
+
|
888
|
+
@typing.overload
|
889
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
890
|
+
...
|
891
|
+
|
892
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
893
|
+
"""
|
894
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
895
|
+
|
896
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
897
|
+
|
898
|
+
Parameters
|
899
|
+
----------
|
900
|
+
type : str, default 'default'
|
901
|
+
Card type.
|
902
|
+
id : str, optional, default None
|
903
|
+
If multiple cards are present, use this id to identify this card.
|
904
|
+
options : Dict[str, Any], default {}
|
905
|
+
Options passed to the card. The contents depend on the card type.
|
906
|
+
timeout : int, default 45
|
907
|
+
Interrupt reporting if it takes more than this many seconds.
|
908
|
+
|
909
|
+
|
910
|
+
"""
|
911
|
+
...
|
912
|
+
|
913
|
+
@typing.overload
|
914
|
+
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
915
|
+
"""
|
916
|
+
Specifies the Conda environment for the step.
|
732
917
|
|
733
918
|
Information in this decorator will augment any
|
734
|
-
attributes set in the `@
|
735
|
-
you can use `@
|
736
|
-
steps and use `@
|
919
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
920
|
+
you can use `@conda_base` to set packages required by all
|
921
|
+
steps and use `@conda` to specify step-specific overrides.
|
737
922
|
|
738
923
|
Parameters
|
739
924
|
----------
|
740
|
-
packages : Dict[str, str], default
|
925
|
+
packages : Dict[str, str], default {}
|
741
926
|
Packages to use for this step. The key is the name of the package
|
742
927
|
and the value is the version to use.
|
743
|
-
|
928
|
+
libraries : Dict[str, str], default {}
|
929
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
930
|
+
python : str, optional, default None
|
744
931
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
745
932
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
933
|
+
disabled : bool, default False
|
934
|
+
If set to True, disables @conda.
|
746
935
|
"""
|
747
936
|
...
|
748
937
|
|
749
938
|
@typing.overload
|
750
|
-
def
|
939
|
+
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
751
940
|
...
|
752
941
|
|
753
942
|
@typing.overload
|
754
|
-
def
|
943
|
+
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
755
944
|
...
|
756
945
|
|
757
|
-
def
|
946
|
+
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
758
947
|
"""
|
759
|
-
Specifies the
|
948
|
+
Specifies the Conda environment for the step.
|
760
949
|
|
761
950
|
Information in this decorator will augment any
|
762
|
-
attributes set in the `@
|
763
|
-
you can use `@
|
764
|
-
steps and use `@
|
951
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
952
|
+
you can use `@conda_base` to set packages required by all
|
953
|
+
steps and use `@conda` to specify step-specific overrides.
|
765
954
|
|
766
955
|
Parameters
|
767
956
|
----------
|
768
|
-
packages : Dict[str, str], default
|
957
|
+
packages : Dict[str, str], default {}
|
769
958
|
Packages to use for this step. The key is the name of the package
|
770
959
|
and the value is the version to use.
|
771
|
-
|
960
|
+
libraries : Dict[str, str], default {}
|
961
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
962
|
+
python : str, optional, default None
|
772
963
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
773
964
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
965
|
+
disabled : bool, default False
|
966
|
+
If set to True, disables @conda.
|
774
967
|
"""
|
775
968
|
...
|
776
969
|
|
@@ -921,23 +1114,87 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
921
1114
|
"""
|
922
1115
|
...
|
923
1116
|
|
924
|
-
|
1117
|
+
@typing.overload
|
1118
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
925
1119
|
"""
|
926
|
-
Specifies
|
1120
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
1121
|
+
the execution of a step.
|
927
1122
|
|
928
1123
|
Parameters
|
929
1124
|
----------
|
930
|
-
|
931
|
-
|
932
|
-
|
933
|
-
|
934
|
-
|
935
|
-
|
936
|
-
|
937
|
-
|
938
|
-
|
939
|
-
|
940
|
-
|
1125
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
1126
|
+
List of secret specs, defining how the secrets are to be retrieved
|
1127
|
+
"""
|
1128
|
+
...
|
1129
|
+
|
1130
|
+
@typing.overload
|
1131
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1132
|
+
...
|
1133
|
+
|
1134
|
+
@typing.overload
|
1135
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1136
|
+
...
|
1137
|
+
|
1138
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
1139
|
+
"""
|
1140
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
1141
|
+
the execution of a step.
|
1142
|
+
|
1143
|
+
Parameters
|
1144
|
+
----------
|
1145
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
1146
|
+
List of secret specs, defining how the secrets are to be retrieved
|
1147
|
+
"""
|
1148
|
+
...
|
1149
|
+
|
1150
|
+
@typing.overload
|
1151
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1152
|
+
"""
|
1153
|
+
Specifies environment variables to be set prior to the execution of a step.
|
1154
|
+
|
1155
|
+
Parameters
|
1156
|
+
----------
|
1157
|
+
vars : Dict[str, str], default {}
|
1158
|
+
Dictionary of environment variables to set.
|
1159
|
+
"""
|
1160
|
+
...
|
1161
|
+
|
1162
|
+
@typing.overload
|
1163
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1164
|
+
...
|
1165
|
+
|
1166
|
+
@typing.overload
|
1167
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1168
|
+
...
|
1169
|
+
|
1170
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
1171
|
+
"""
|
1172
|
+
Specifies environment variables to be set prior to the execution of a step.
|
1173
|
+
|
1174
|
+
Parameters
|
1175
|
+
----------
|
1176
|
+
vars : Dict[str, str], default {}
|
1177
|
+
Dictionary of environment variables to set.
|
1178
|
+
"""
|
1179
|
+
...
|
1180
|
+
|
1181
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1182
|
+
"""
|
1183
|
+
Specifies that this step should execute on Kubernetes.
|
1184
|
+
|
1185
|
+
Parameters
|
1186
|
+
----------
|
1187
|
+
cpu : int, default 1
|
1188
|
+
Number of CPUs required for this step. If `@resources` is
|
1189
|
+
also present, the maximum value from all decorators is used.
|
1190
|
+
memory : int, default 4096
|
1191
|
+
Memory size (in MB) required for this step. If
|
1192
|
+
`@resources` is also present, the maximum value from all decorators is
|
1193
|
+
used.
|
1194
|
+
disk : int, default 10240
|
1195
|
+
Disk size (in MB) required for this step. If
|
1196
|
+
`@resources` is also present, the maximum value from all decorators is
|
1197
|
+
used.
|
941
1198
|
image : str, optional, default None
|
942
1199
|
Docker image to use when launching on Kubernetes. If not specified, and
|
943
1200
|
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
@@ -981,59 +1238,51 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
|
|
981
1238
|
...
|
982
1239
|
|
983
1240
|
@typing.overload
|
984
|
-
def
|
1241
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
985
1242
|
"""
|
986
|
-
Specifies the
|
1243
|
+
Specifies the PyPI packages for the step.
|
987
1244
|
|
988
1245
|
Information in this decorator will augment any
|
989
|
-
attributes set in the `@
|
990
|
-
you can use `@
|
991
|
-
steps and use `@
|
1246
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
1247
|
+
you can use `@pypi_base` to set packages required by all
|
1248
|
+
steps and use `@pypi` to specify step-specific overrides.
|
992
1249
|
|
993
1250
|
Parameters
|
994
1251
|
----------
|
995
|
-
packages : Dict[str, str], default {}
|
1252
|
+
packages : Dict[str, str], default: {}
|
996
1253
|
Packages to use for this step. The key is the name of the package
|
997
1254
|
and the value is the version to use.
|
998
|
-
|
999
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1000
|
-
python : str, optional, default None
|
1255
|
+
python : str, optional, default: None
|
1001
1256
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1002
1257
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1003
|
-
disabled : bool, default False
|
1004
|
-
If set to True, disables @conda.
|
1005
1258
|
"""
|
1006
1259
|
...
|
1007
1260
|
|
1008
1261
|
@typing.overload
|
1009
|
-
def
|
1262
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1010
1263
|
...
|
1011
1264
|
|
1012
1265
|
@typing.overload
|
1013
|
-
def
|
1266
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1014
1267
|
...
|
1015
1268
|
|
1016
|
-
def
|
1269
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
1017
1270
|
"""
|
1018
|
-
Specifies the
|
1271
|
+
Specifies the PyPI packages for the step.
|
1019
1272
|
|
1020
1273
|
Information in this decorator will augment any
|
1021
|
-
attributes set in the `@
|
1022
|
-
you can use `@
|
1023
|
-
steps and use `@
|
1274
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
1275
|
+
you can use `@pypi_base` to set packages required by all
|
1276
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1024
1277
|
|
1025
1278
|
Parameters
|
1026
1279
|
----------
|
1027
|
-
packages : Dict[str, str], default {}
|
1280
|
+
packages : Dict[str, str], default: {}
|
1028
1281
|
Packages to use for this step. The key is the name of the package
|
1029
1282
|
and the value is the version to use.
|
1030
|
-
|
1031
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1032
|
-
python : str, optional, default None
|
1283
|
+
python : str, optional, default: None
|
1033
1284
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1034
1285
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1035
|
-
disabled : bool, default False
|
1036
|
-
If set to True, disables @conda.
|
1037
1286
|
"""
|
1038
1287
|
...
|
1039
1288
|
|
@@ -1087,304 +1336,153 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
1087
1336
|
...
|
1088
1337
|
|
1089
1338
|
@typing.overload
|
1090
|
-
def
|
1339
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1091
1340
|
"""
|
1092
|
-
Specifies
|
1093
|
-
|
1094
|
-
This decorator is useful if this step may hang indefinitely.
|
1341
|
+
Specifies the number of times the task corresponding
|
1342
|
+
to a step needs to be retried.
|
1095
1343
|
|
1096
|
-
This
|
1097
|
-
|
1098
|
-
|
1344
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
1345
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
1346
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
1099
1347
|
|
1100
|
-
|
1101
|
-
|
1348
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
1349
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
1350
|
+
ensuring that the flow execution can continue.
|
1102
1351
|
|
1103
1352
|
Parameters
|
1104
1353
|
----------
|
1105
|
-
|
1106
|
-
Number of
|
1107
|
-
|
1108
|
-
Number of minutes
|
1109
|
-
hours : int, default 0
|
1110
|
-
Number of hours to wait prior to timing out.
|
1354
|
+
times : int, default 3
|
1355
|
+
Number of times to retry this task.
|
1356
|
+
minutes_between_retries : int, default 2
|
1357
|
+
Number of minutes between retries.
|
1111
1358
|
"""
|
1112
1359
|
...
|
1113
1360
|
|
1114
1361
|
@typing.overload
|
1115
|
-
def
|
1362
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1116
1363
|
...
|
1117
1364
|
|
1118
1365
|
@typing.overload
|
1119
|
-
def
|
1366
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1120
1367
|
...
|
1121
1368
|
|
1122
|
-
def
|
1369
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
1123
1370
|
"""
|
1124
|
-
Specifies
|
1125
|
-
|
1126
|
-
This decorator is useful if this step may hang indefinitely.
|
1371
|
+
Specifies the number of times the task corresponding
|
1372
|
+
to a step needs to be retried.
|
1127
1373
|
|
1128
|
-
This
|
1129
|
-
|
1130
|
-
|
1374
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
1375
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
1376
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
1131
1377
|
|
1132
|
-
|
1133
|
-
|
1378
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
1379
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
1380
|
+
ensuring that the flow execution can continue.
|
1134
1381
|
|
1135
1382
|
Parameters
|
1136
1383
|
----------
|
1137
|
-
|
1138
|
-
Number of
|
1139
|
-
|
1140
|
-
Number of minutes
|
1141
|
-
hours : int, default 0
|
1142
|
-
Number of hours to wait prior to timing out.
|
1384
|
+
times : int, default 3
|
1385
|
+
Number of times to retry this task.
|
1386
|
+
minutes_between_retries : int, default 2
|
1387
|
+
Number of minutes between retries.
|
1143
1388
|
"""
|
1144
1389
|
...
|
1145
1390
|
|
1146
1391
|
@typing.overload
|
1147
|
-
def
|
1392
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1148
1393
|
"""
|
1149
|
-
|
1150
|
-
|
1151
|
-
Note that you may add multiple `@card` decorators in a step with different parameters.
|
1394
|
+
Specifies the times when the flow should be run when running on a
|
1395
|
+
production scheduler.
|
1152
1396
|
|
1153
1397
|
Parameters
|
1154
1398
|
----------
|
1155
|
-
|
1156
|
-
|
1157
|
-
|
1158
|
-
|
1159
|
-
|
1160
|
-
|
1161
|
-
|
1162
|
-
|
1163
|
-
|
1164
|
-
|
1399
|
+
hourly : bool, default False
|
1400
|
+
Run the workflow hourly.
|
1401
|
+
daily : bool, default True
|
1402
|
+
Run the workflow daily.
|
1403
|
+
weekly : bool, default False
|
1404
|
+
Run the workflow weekly.
|
1405
|
+
cron : str, optional, default None
|
1406
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1407
|
+
specified by this expression.
|
1408
|
+
timezone : str, optional, default None
|
1409
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1410
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1165
1411
|
"""
|
1166
1412
|
...
|
1167
1413
|
|
1168
1414
|
@typing.overload
|
1169
|
-
def
|
1170
|
-
...
|
1171
|
-
|
1172
|
-
@typing.overload
|
1173
|
-
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1415
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1174
1416
|
...
|
1175
1417
|
|
1176
|
-
def
|
1418
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
1177
1419
|
"""
|
1178
|
-
|
1179
|
-
|
1180
|
-
Note that you may add multiple `@card` decorators in a step with different parameters.
|
1420
|
+
Specifies the times when the flow should be run when running on a
|
1421
|
+
production scheduler.
|
1181
1422
|
|
1182
1423
|
Parameters
|
1183
1424
|
----------
|
1184
|
-
|
1185
|
-
|
1186
|
-
|
1187
|
-
|
1188
|
-
|
1189
|
-
|
1190
|
-
|
1191
|
-
|
1192
|
-
|
1193
|
-
|
1425
|
+
hourly : bool, default False
|
1426
|
+
Run the workflow hourly.
|
1427
|
+
daily : bool, default True
|
1428
|
+
Run the workflow daily.
|
1429
|
+
weekly : bool, default False
|
1430
|
+
Run the workflow weekly.
|
1431
|
+
cron : str, optional, default None
|
1432
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1433
|
+
specified by this expression.
|
1434
|
+
timezone : str, optional, default None
|
1435
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1436
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1194
1437
|
"""
|
1195
1438
|
...
|
1196
1439
|
|
1197
1440
|
@typing.overload
|
1198
|
-
def
|
1441
|
+
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1199
1442
|
"""
|
1200
|
-
Specifies the
|
1201
|
-
|
1202
|
-
Use `@resources` to specify the resource requirements
|
1203
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
1443
|
+
Specifies the Conda environment for all steps of the flow.
|
1204
1444
|
|
1205
|
-
|
1206
|
-
|
1207
|
-
python myflow.py run --with batch
|
1208
|
-
```
|
1209
|
-
or
|
1210
|
-
```
|
1211
|
-
python myflow.py run --with kubernetes
|
1212
|
-
```
|
1213
|
-
which executes the flow on the desired system using the
|
1214
|
-
requirements specified in `@resources`.
|
1445
|
+
Use `@conda_base` to set common libraries required by all
|
1446
|
+
steps and use `@conda` to specify step-specific additions.
|
1215
1447
|
|
1216
1448
|
Parameters
|
1217
1449
|
----------
|
1218
|
-
|
1219
|
-
|
1220
|
-
|
1221
|
-
|
1222
|
-
|
1223
|
-
|
1224
|
-
|
1225
|
-
|
1226
|
-
|
1227
|
-
|
1228
|
-
This parameter maps to the `--shm-size` option in Docker.
|
1229
|
-
"""
|
1230
|
-
...
|
1231
|
-
|
1232
|
-
@typing.overload
|
1233
|
-
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1234
|
-
...
|
1235
|
-
|
1236
|
-
@typing.overload
|
1237
|
-
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1238
|
-
...
|
1239
|
-
|
1240
|
-
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
1241
|
-
"""
|
1242
|
-
Specifies the resources needed when executing this step.
|
1243
|
-
|
1244
|
-
Use `@resources` to specify the resource requirements
|
1245
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
1246
|
-
|
1247
|
-
You can choose the compute layer on the command line by executing e.g.
|
1248
|
-
```
|
1249
|
-
python myflow.py run --with batch
|
1250
|
-
```
|
1251
|
-
or
|
1252
|
-
```
|
1253
|
-
python myflow.py run --with kubernetes
|
1254
|
-
```
|
1255
|
-
which executes the flow on the desired system using the
|
1256
|
-
requirements specified in `@resources`.
|
1257
|
-
|
1258
|
-
Parameters
|
1259
|
-
----------
|
1260
|
-
cpu : int, default 1
|
1261
|
-
Number of CPUs required for this step.
|
1262
|
-
gpu : int, default 0
|
1263
|
-
Number of GPUs required for this step.
|
1264
|
-
disk : int, optional, default None
|
1265
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
1266
|
-
memory : int, default 4096
|
1267
|
-
Memory size (in MB) required for this step.
|
1268
|
-
shared_memory : int, optional, default None
|
1269
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
1270
|
-
This parameter maps to the `--shm-size` option in Docker.
|
1271
|
-
"""
|
1272
|
-
...
|
1273
|
-
|
1274
|
-
@typing.overload
|
1275
|
-
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1276
|
-
"""
|
1277
|
-
Specifies the number of times the task corresponding
|
1278
|
-
to a step needs to be retried.
|
1279
|
-
|
1280
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
1281
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
1282
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
1283
|
-
|
1284
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
1285
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
1286
|
-
ensuring that the flow execution can continue.
|
1287
|
-
|
1288
|
-
Parameters
|
1289
|
-
----------
|
1290
|
-
times : int, default 3
|
1291
|
-
Number of times to retry this task.
|
1292
|
-
minutes_between_retries : int, default 2
|
1293
|
-
Number of minutes between retries.
|
1294
|
-
"""
|
1295
|
-
...
|
1296
|
-
|
1297
|
-
@typing.overload
|
1298
|
-
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1299
|
-
...
|
1300
|
-
|
1301
|
-
@typing.overload
|
1302
|
-
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1303
|
-
...
|
1304
|
-
|
1305
|
-
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
1306
|
-
"""
|
1307
|
-
Specifies the number of times the task corresponding
|
1308
|
-
to a step needs to be retried.
|
1309
|
-
|
1310
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
1311
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
1312
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
1313
|
-
|
1314
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
1315
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
1316
|
-
ensuring that the flow execution can continue.
|
1317
|
-
|
1318
|
-
Parameters
|
1319
|
-
----------
|
1320
|
-
times : int, default 3
|
1321
|
-
Number of times to retry this task.
|
1322
|
-
minutes_between_retries : int, default 2
|
1323
|
-
Number of minutes between retries.
|
1324
|
-
"""
|
1325
|
-
...
|
1326
|
-
|
1327
|
-
@typing.overload
|
1328
|
-
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1329
|
-
"""
|
1330
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
1331
|
-
the execution of a step.
|
1332
|
-
|
1333
|
-
Parameters
|
1334
|
-
----------
|
1335
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
1336
|
-
List of secret specs, defining how the secrets are to be retrieved
|
1450
|
+
packages : Dict[str, str], default {}
|
1451
|
+
Packages to use for this flow. The key is the name of the package
|
1452
|
+
and the value is the version to use.
|
1453
|
+
libraries : Dict[str, str], default {}
|
1454
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1455
|
+
python : str, optional, default None
|
1456
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1457
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1458
|
+
disabled : bool, default False
|
1459
|
+
If set to True, disables Conda.
|
1337
1460
|
"""
|
1338
1461
|
...
|
1339
1462
|
|
1340
1463
|
@typing.overload
|
1341
|
-
def
|
1342
|
-
...
|
1343
|
-
|
1344
|
-
@typing.overload
|
1345
|
-
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1346
|
-
...
|
1347
|
-
|
1348
|
-
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
1349
|
-
"""
|
1350
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
1351
|
-
the execution of a step.
|
1352
|
-
|
1353
|
-
Parameters
|
1354
|
-
----------
|
1355
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
1356
|
-
List of secret specs, defining how the secrets are to be retrieved
|
1357
|
-
"""
|
1464
|
+
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1358
1465
|
...
|
1359
1466
|
|
1360
|
-
|
1361
|
-
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1467
|
+
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1362
1468
|
"""
|
1363
|
-
Specifies
|
1469
|
+
Specifies the Conda environment for all steps of the flow.
|
1364
1470
|
|
1365
|
-
|
1366
|
-
|
1367
|
-
vars : Dict[str, str], default {}
|
1368
|
-
Dictionary of environment variables to set.
|
1369
|
-
"""
|
1370
|
-
...
|
1371
|
-
|
1372
|
-
@typing.overload
|
1373
|
-
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1374
|
-
...
|
1375
|
-
|
1376
|
-
@typing.overload
|
1377
|
-
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1378
|
-
...
|
1379
|
-
|
1380
|
-
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
1381
|
-
"""
|
1382
|
-
Specifies environment variables to be set prior to the execution of a step.
|
1471
|
+
Use `@conda_base` to set common libraries required by all
|
1472
|
+
steps and use `@conda` to specify step-specific additions.
|
1383
1473
|
|
1384
1474
|
Parameters
|
1385
1475
|
----------
|
1386
|
-
|
1387
|
-
|
1476
|
+
packages : Dict[str, str], default {}
|
1477
|
+
Packages to use for this flow. The key is the name of the package
|
1478
|
+
and the value is the version to use.
|
1479
|
+
libraries : Dict[str, str], default {}
|
1480
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1481
|
+
python : str, optional, default None
|
1482
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1483
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1484
|
+
disabled : bool, default False
|
1485
|
+
If set to True, disables Conda.
|
1388
1486
|
"""
|
1389
1487
|
...
|
1390
1488
|
|
@@ -1427,90 +1525,6 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
|
|
1427
1525
|
"""
|
1428
1526
|
...
|
1429
1527
|
|
1430
|
-
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1431
|
-
"""
|
1432
|
-
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
1433
|
-
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
1434
|
-
|
1435
|
-
Parameters
|
1436
|
-
----------
|
1437
|
-
timeout : int
|
1438
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
1439
|
-
poke_interval : int
|
1440
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
1441
|
-
mode : str
|
1442
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1443
|
-
exponential_backoff : bool
|
1444
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1445
|
-
pool : str
|
1446
|
-
the slot pool this task should run in,
|
1447
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1448
|
-
soft_fail : bool
|
1449
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1450
|
-
name : str
|
1451
|
-
Name of the sensor on Airflow
|
1452
|
-
description : str
|
1453
|
-
Description of sensor in the Airflow UI
|
1454
|
-
external_dag_id : str
|
1455
|
-
The dag_id that contains the task you want to wait for.
|
1456
|
-
external_task_ids : List[str]
|
1457
|
-
The list of task_ids that you want to wait for.
|
1458
|
-
If None (default value) the sensor waits for the DAG. (Default: None)
|
1459
|
-
allowed_states : List[str]
|
1460
|
-
Iterable of allowed states, (Default: ['success'])
|
1461
|
-
failed_states : List[str]
|
1462
|
-
Iterable of failed or dis-allowed states. (Default: None)
|
1463
|
-
execution_delta : datetime.timedelta
|
1464
|
-
time difference with the previous execution to look at,
|
1465
|
-
the default is the same logical date as the current task or DAG. (Default: None)
|
1466
|
-
check_existence: bool
|
1467
|
-
Set to True to check if the external task exists or check if
|
1468
|
-
the DAG to wait for exists. (Default: True)
|
1469
|
-
"""
|
1470
|
-
...
|
1471
|
-
|
1472
|
-
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1473
|
-
"""
|
1474
|
-
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
1475
|
-
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
1476
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
1477
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
1478
|
-
starts only after all sensors finish.
|
1479
|
-
|
1480
|
-
Parameters
|
1481
|
-
----------
|
1482
|
-
timeout : int
|
1483
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
1484
|
-
poke_interval : int
|
1485
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
1486
|
-
mode : str
|
1487
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1488
|
-
exponential_backoff : bool
|
1489
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1490
|
-
pool : str
|
1491
|
-
the slot pool this task should run in,
|
1492
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1493
|
-
soft_fail : bool
|
1494
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1495
|
-
name : str
|
1496
|
-
Name of the sensor on Airflow
|
1497
|
-
description : str
|
1498
|
-
Description of sensor in the Airflow UI
|
1499
|
-
bucket_key : Union[str, List[str]]
|
1500
|
-
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
1501
|
-
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
1502
|
-
bucket_name : str
|
1503
|
-
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
1504
|
-
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
1505
|
-
wildcard_match : bool
|
1506
|
-
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
1507
|
-
aws_conn_id : str
|
1508
|
-
a reference to the s3 connection on Airflow. (Default: None)
|
1509
|
-
verify : bool
|
1510
|
-
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
1511
|
-
"""
|
1512
|
-
...
|
1513
|
-
|
1514
1528
|
@typing.overload
|
1515
1529
|
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1516
1530
|
"""
|
@@ -1606,52 +1620,87 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
|
|
1606
1620
|
"""
|
1607
1621
|
...
|
1608
1622
|
|
1609
|
-
|
1610
|
-
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1623
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1611
1624
|
"""
|
1612
|
-
|
1613
|
-
|
1614
|
-
|
1615
|
-
|
1625
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
1626
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
1627
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
1628
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
1629
|
+
starts only after all sensors finish.
|
1616
1630
|
|
1617
1631
|
Parameters
|
1618
1632
|
----------
|
1619
|
-
|
1620
|
-
|
1621
|
-
|
1622
|
-
|
1623
|
-
|
1624
|
-
|
1625
|
-
|
1626
|
-
|
1627
|
-
|
1628
|
-
|
1633
|
+
timeout : int
|
1634
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
1635
|
+
poke_interval : int
|
1636
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
1637
|
+
mode : str
|
1638
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1639
|
+
exponential_backoff : bool
|
1640
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1641
|
+
pool : str
|
1642
|
+
the slot pool this task should run in,
|
1643
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1644
|
+
soft_fail : bool
|
1645
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1646
|
+
name : str
|
1647
|
+
Name of the sensor on Airflow
|
1648
|
+
description : str
|
1649
|
+
Description of sensor in the Airflow UI
|
1650
|
+
bucket_key : Union[str, List[str]]
|
1651
|
+
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
1652
|
+
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
1653
|
+
bucket_name : str
|
1654
|
+
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
1655
|
+
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
1656
|
+
wildcard_match : bool
|
1657
|
+
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
1658
|
+
aws_conn_id : str
|
1659
|
+
a reference to the s3 connection on Airflow. (Default: None)
|
1660
|
+
verify : bool
|
1661
|
+
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
1629
1662
|
"""
|
1630
1663
|
...
|
1631
1664
|
|
1632
|
-
|
1633
|
-
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1634
|
-
...
|
1635
|
-
|
1636
|
-
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1665
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1637
1666
|
"""
|
1638
|
-
|
1639
|
-
|
1640
|
-
Use `@conda_base` to set common libraries required by all
|
1641
|
-
steps and use `@conda` to specify step-specific additions.
|
1667
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
1668
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
1642
1669
|
|
1643
1670
|
Parameters
|
1644
1671
|
----------
|
1645
|
-
|
1646
|
-
|
1647
|
-
|
1648
|
-
|
1649
|
-
|
1650
|
-
|
1651
|
-
|
1652
|
-
|
1653
|
-
|
1654
|
-
|
1672
|
+
timeout : int
|
1673
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
1674
|
+
poke_interval : int
|
1675
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
1676
|
+
mode : str
|
1677
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1678
|
+
exponential_backoff : bool
|
1679
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1680
|
+
pool : str
|
1681
|
+
the slot pool this task should run in,
|
1682
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1683
|
+
soft_fail : bool
|
1684
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1685
|
+
name : str
|
1686
|
+
Name of the sensor on Airflow
|
1687
|
+
description : str
|
1688
|
+
Description of sensor in the Airflow UI
|
1689
|
+
external_dag_id : str
|
1690
|
+
The dag_id that contains the task you want to wait for.
|
1691
|
+
external_task_ids : List[str]
|
1692
|
+
The list of task_ids that you want to wait for.
|
1693
|
+
If None (default value) the sensor waits for the DAG. (Default: None)
|
1694
|
+
allowed_states : List[str]
|
1695
|
+
Iterable of allowed states, (Default: ['success'])
|
1696
|
+
failed_states : List[str]
|
1697
|
+
Iterable of failed or dis-allowed states. (Default: None)
|
1698
|
+
execution_delta : datetime.timedelta
|
1699
|
+
time difference with the previous execution to look at,
|
1700
|
+
the default is the same logical date as the current task or DAG. (Default: None)
|
1701
|
+
check_existence: bool
|
1702
|
+
Set to True to check if the external task exists or check if
|
1703
|
+
the DAG to wait for exists. (Default: True)
|
1655
1704
|
"""
|
1656
1705
|
...
|
1657
1706
|
|
@@ -1673,55 +1722,6 @@ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typ
|
|
1673
1722
|
"""
|
1674
1723
|
...
|
1675
1724
|
|
1676
|
-
@typing.overload
|
1677
|
-
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1678
|
-
"""
|
1679
|
-
Specifies the times when the flow should be run when running on a
|
1680
|
-
production scheduler.
|
1681
|
-
|
1682
|
-
Parameters
|
1683
|
-
----------
|
1684
|
-
hourly : bool, default False
|
1685
|
-
Run the workflow hourly.
|
1686
|
-
daily : bool, default True
|
1687
|
-
Run the workflow daily.
|
1688
|
-
weekly : bool, default False
|
1689
|
-
Run the workflow weekly.
|
1690
|
-
cron : str, optional, default None
|
1691
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1692
|
-
specified by this expression.
|
1693
|
-
timezone : str, optional, default None
|
1694
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1695
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1696
|
-
"""
|
1697
|
-
...
|
1698
|
-
|
1699
|
-
@typing.overload
|
1700
|
-
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1701
|
-
...
|
1702
|
-
|
1703
|
-
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
1704
|
-
"""
|
1705
|
-
Specifies the times when the flow should be run when running on a
|
1706
|
-
production scheduler.
|
1707
|
-
|
1708
|
-
Parameters
|
1709
|
-
----------
|
1710
|
-
hourly : bool, default False
|
1711
|
-
Run the workflow hourly.
|
1712
|
-
daily : bool, default True
|
1713
|
-
Run the workflow daily.
|
1714
|
-
weekly : bool, default False
|
1715
|
-
Run the workflow weekly.
|
1716
|
-
cron : str, optional, default None
|
1717
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1718
|
-
specified by this expression.
|
1719
|
-
timezone : str, optional, default None
|
1720
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1721
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1722
|
-
"""
|
1723
|
-
...
|
1724
|
-
|
1725
1725
|
@typing.overload
|
1726
1726
|
def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1727
1727
|
"""
|