ob-metaflow-stubs 3.4__py2.py3-none-any.whl → 3.6__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (138) hide show
  1. metaflow-stubs/__init__.pyi +479 -479
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +3 -3
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +10 -4
  16. metaflow-stubs/metaflow_current.pyi +18 -18
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +3 -3
  20. metaflow-stubs/plugins/__init__.pyi +5 -5
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +8 -4
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  61. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  62. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  63. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_cli.pyi +3 -3
  65. metaflow-stubs/plugins/cards/card_client.pyi +4 -4
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  78. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  82. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  83. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  84. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  85. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  86. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/s3/s3.pyi +8 -5
  89. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  91. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  92. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  93. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  94. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  95. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/frameworks/pytorch.pyi +4 -4
  97. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +7 -3
  105. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +15 -4
  106. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +24 -7
  108. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +127 -0
  109. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  110. metaflow-stubs/plugins/package_cli.pyi +2 -2
  111. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  112. metaflow-stubs/plugins/perimeters.pyi +2 -2
  113. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  114. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  117. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  120. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  123. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  124. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  126. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  127. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  128. metaflow-stubs/plugins/timeout_decorator.pyi +5 -3
  129. metaflow-stubs/procpoll.pyi +2 -2
  130. metaflow-stubs/profilers/__init__.pyi +2 -2
  131. metaflow-stubs/pylint_wrapper.pyi +2 -2
  132. metaflow-stubs/tagging_util.pyi +2 -2
  133. metaflow-stubs/tuple_util.pyi +14 -0
  134. {ob_metaflow_stubs-3.4.dist-info → ob_metaflow_stubs-3.6.dist-info}/METADATA +1 -1
  135. ob_metaflow_stubs-3.6.dist-info/RECORD +138 -0
  136. ob_metaflow_stubs-3.4.dist-info/RECORD +0 -136
  137. {ob_metaflow_stubs-3.4.dist-info → ob_metaflow_stubs-3.6.dist-info}/WHEEL +0 -0
  138. {ob_metaflow_stubs-3.4.dist-info → ob_metaflow_stubs-3.6.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,23 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.11.15.2+ob(v1) #
4
- # Generated on 2024-05-17T19:44:44.623630 #
3
+ # MF version: 2.11.16.1+ob(v1) #
4
+ # Generated on 2024-05-21T17:36:50.007848 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
+ import metaflow.metaflow_current
12
+ import metaflow.parameters
11
13
  import datetime
12
- import io
13
14
  import metaflow.events
14
- import metaflow.parameters
15
+ import metaflow.datastore.inputs
15
16
  import typing
16
- import metaflow.metaflow_current
17
- import metaflow.client.core
18
17
  import metaflow._vendor.click.types
18
+ import metaflow.client.core
19
+ import io
19
20
  import metaflow.plugins.datatools.s3.s3
20
- import metaflow.datastore.inputs
21
21
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
22
  StepFlag = typing.NewType("StepFlag", bool)
23
23
 
@@ -726,195 +726,51 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
726
726
  ...
727
727
 
728
728
  @typing.overload
729
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
729
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
730
  """
731
- Specifies the Conda environment for the step.
731
+ Specifies the PyPI packages for the step.
732
732
 
733
733
  Information in this decorator will augment any
734
- attributes set in the `@conda_base` flow-level decorator. Hence,
735
- you can use `@conda_base` to set packages required by all
736
- steps and use `@conda` to specify step-specific overrides.
734
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
735
+ you can use `@pypi_base` to set packages required by all
736
+ steps and use `@pypi` to specify step-specific overrides.
737
737
 
738
738
  Parameters
739
739
  ----------
740
- packages : Dict[str, str], default {}
740
+ packages : Dict[str, str], default: {}
741
741
  Packages to use for this step. The key is the name of the package
742
742
  and the value is the version to use.
743
- libraries : Dict[str, str], default {}
744
- Supported for backward compatibility. When used with packages, packages will take precedence.
745
- python : str, optional, default None
743
+ python : str, optional, default: None
746
744
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
747
745
  that the version used will correspond to the version of the Python interpreter used to start the run.
748
- disabled : bool, default False
749
- If set to True, disables @conda.
750
746
  """
751
747
  ...
752
748
 
753
749
  @typing.overload
754
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
750
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
755
751
  ...
756
752
 
757
753
  @typing.overload
758
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
754
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
759
755
  ...
760
756
 
761
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
757
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
762
758
  """
763
- Specifies the Conda environment for the step.
759
+ Specifies the PyPI packages for the step.
764
760
 
765
761
  Information in this decorator will augment any
766
- attributes set in the `@conda_base` flow-level decorator. Hence,
767
- you can use `@conda_base` to set packages required by all
768
- steps and use `@conda` to specify step-specific overrides.
762
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
763
+ you can use `@pypi_base` to set packages required by all
764
+ steps and use `@pypi` to specify step-specific overrides.
769
765
 
770
766
  Parameters
771
767
  ----------
772
- packages : Dict[str, str], default {}
768
+ packages : Dict[str, str], default: {}
773
769
  Packages to use for this step. The key is the name of the package
774
770
  and the value is the version to use.
775
- libraries : Dict[str, str], default {}
776
- Supported for backward compatibility. When used with packages, packages will take precedence.
777
- python : str, optional, default None
771
+ python : str, optional, default: None
778
772
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
779
773
  that the version used will correspond to the version of the Python interpreter used to start the run.
780
- disabled : bool, default False
781
- If set to True, disables @conda.
782
- """
783
- ...
784
-
785
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
786
- """
787
- Specifies that this step should execute on Kubernetes.
788
-
789
- Parameters
790
- ----------
791
- cpu : int, default 1
792
- Number of CPUs required for this step. If `@resources` is
793
- also present, the maximum value from all decorators is used.
794
- memory : int, default 4096
795
- Memory size (in MB) required for this step. If
796
- `@resources` is also present, the maximum value from all decorators is
797
- used.
798
- disk : int, default 10240
799
- Disk size (in MB) required for this step. If
800
- `@resources` is also present, the maximum value from all decorators is
801
- used.
802
- image : str, optional, default None
803
- Docker image to use when launching on Kubernetes. If not specified, and
804
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
805
- not, a default Docker image mapping to the current version of Python is used.
806
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
807
- If given, the imagePullPolicy to be applied to the Docker image of the step.
808
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
809
- Kubernetes service account to use when launching pod in Kubernetes.
810
- secrets : List[str], optional, default None
811
- Kubernetes secrets to use when launching pod in Kubernetes. These
812
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
813
- in Metaflow configuration.
814
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
815
- Kubernetes namespace to use when launching pod in Kubernetes.
816
- gpu : int, optional, default None
817
- Number of GPUs required for this step. A value of zero implies that
818
- the scheduled node should not have GPUs.
819
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
820
- The vendor of the GPUs to be used for this step.
821
- tolerations : List[str], default []
822
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
823
- Kubernetes tolerations to use when launching pod in Kubernetes.
824
- use_tmpfs : bool, default False
825
- This enables an explicit tmpfs mount for this step.
826
- tmpfs_tempdir : bool, default True
827
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
828
- tmpfs_size : int, optional, default: None
829
- The value for the size (in MiB) of the tmpfs mount for this step.
830
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
831
- memory allocated for this step.
832
- tmpfs_path : str, optional, default /metaflow_temp
833
- Path to tmpfs mount for this step.
834
- persistent_volume_claims : Dict[str, str], optional, default None
835
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
836
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
837
- shared_memory: int, optional
838
- Shared memory size (in MiB) required for this step
839
- port: int, optional
840
- Port number to specify in the Kubernetes job object
841
- """
842
- ...
843
-
844
- @typing.overload
845
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
846
- """
847
- Specifies the resources needed when executing this step.
848
-
849
- Use `@resources` to specify the resource requirements
850
- independently of the specific compute layer (`@batch`, `@kubernetes`).
851
-
852
- You can choose the compute layer on the command line by executing e.g.
853
- ```
854
- python myflow.py run --with batch
855
- ```
856
- or
857
- ```
858
- python myflow.py run --with kubernetes
859
- ```
860
- which executes the flow on the desired system using the
861
- requirements specified in `@resources`.
862
-
863
- Parameters
864
- ----------
865
- cpu : int, default 1
866
- Number of CPUs required for this step.
867
- gpu : int, default 0
868
- Number of GPUs required for this step.
869
- disk : int, optional, default None
870
- Disk size (in MB) required for this step. Only applies on Kubernetes.
871
- memory : int, default 4096
872
- Memory size (in MB) required for this step.
873
- shared_memory : int, optional, default None
874
- The value for the size (in MiB) of the /dev/shm volume for this step.
875
- This parameter maps to the `--shm-size` option in Docker.
876
- """
877
- ...
878
-
879
- @typing.overload
880
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
881
- ...
882
-
883
- @typing.overload
884
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
885
- ...
886
-
887
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
888
- """
889
- Specifies the resources needed when executing this step.
890
-
891
- Use `@resources` to specify the resource requirements
892
- independently of the specific compute layer (`@batch`, `@kubernetes`).
893
-
894
- You can choose the compute layer on the command line by executing e.g.
895
- ```
896
- python myflow.py run --with batch
897
- ```
898
- or
899
- ```
900
- python myflow.py run --with kubernetes
901
- ```
902
- which executes the flow on the desired system using the
903
- requirements specified in `@resources`.
904
-
905
- Parameters
906
- ----------
907
- cpu : int, default 1
908
- Number of CPUs required for this step.
909
- gpu : int, default 0
910
- Number of GPUs required for this step.
911
- disk : int, optional, default None
912
- Disk size (in MB) required for this step. Only applies on Kubernetes.
913
- memory : int, default 4096
914
- Memory size (in MB) required for this step.
915
- shared_memory : int, optional, default None
916
- The value for the size (in MiB) of the /dev/shm volume for this step.
917
- This parameter maps to the `--shm-size` option in Docker.
918
774
  """
919
775
  ...
920
776
 
@@ -1066,51 +922,55 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1066
922
  ...
1067
923
 
1068
924
  @typing.overload
1069
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
925
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1070
926
  """
1071
- Specifies that the step will success under all circumstances.
927
+ Specifies the number of times the task corresponding
928
+ to a step needs to be retried.
1072
929
 
1073
- The decorator will create an optional artifact, specified by `var`, which
1074
- contains the exception raised. You can use it to detect the presence
1075
- of errors, indicating that all happy-path artifacts produced by the step
1076
- are missing.
930
+ This decorator is useful for handling transient errors, such as networking issues.
931
+ If your task contains operations that can't be retried safely, e.g. database updates,
932
+ it is advisable to annotate it with `@retry(times=0)`.
933
+
934
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
935
+ decorator will execute a no-op task after all retries have been exhausted,
936
+ ensuring that the flow execution can continue.
1077
937
 
1078
938
  Parameters
1079
939
  ----------
1080
- var : str, optional, default None
1081
- Name of the artifact in which to store the caught exception.
1082
- If not specified, the exception is not stored.
1083
- print_exception : bool, default True
1084
- Determines whether or not the exception is printed to
1085
- stdout when caught.
940
+ times : int, default 3
941
+ Number of times to retry this task.
942
+ minutes_between_retries : int, default 2
943
+ Number of minutes between retries.
1086
944
  """
1087
945
  ...
1088
946
 
1089
947
  @typing.overload
1090
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
948
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1091
949
  ...
1092
950
 
1093
951
  @typing.overload
1094
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
952
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1095
953
  ...
1096
954
 
1097
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
955
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1098
956
  """
1099
- Specifies that the step will success under all circumstances.
957
+ Specifies the number of times the task corresponding
958
+ to a step needs to be retried.
1100
959
 
1101
- The decorator will create an optional artifact, specified by `var`, which
1102
- contains the exception raised. You can use it to detect the presence
1103
- of errors, indicating that all happy-path artifacts produced by the step
1104
- are missing.
960
+ This decorator is useful for handling transient errors, such as networking issues.
961
+ If your task contains operations that can't be retried safely, e.g. database updates,
962
+ it is advisable to annotate it with `@retry(times=0)`.
963
+
964
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
965
+ decorator will execute a no-op task after all retries have been exhausted,
966
+ ensuring that the flow execution can continue.
1105
967
 
1106
968
  Parameters
1107
969
  ----------
1108
- var : str, optional, default None
1109
- Name of the artifact in which to store the caught exception.
1110
- If not specified, the exception is not stored.
1111
- print_exception : bool, default True
1112
- Determines whether or not the exception is printed to
1113
- stdout when caught.
970
+ times : int, default 3
971
+ Number of times to retry this task.
972
+ minutes_between_retries : int, default 2
973
+ Number of minutes between retries.
1114
974
  """
1115
975
  ...
1116
976
 
@@ -1145,56 +1005,229 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
1145
1005
  """
1146
1006
  ...
1147
1007
 
1148
- @typing.overload
1149
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1008
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1150
1009
  """
1151
- Specifies the number of times the task corresponding
1152
- to a step needs to be retried.
1153
-
1154
- This decorator is useful for handling transient errors, such as networking issues.
1155
- If your task contains operations that can't be retried safely, e.g. database updates,
1156
- it is advisable to annotate it with `@retry(times=0)`.
1157
-
1158
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1159
- decorator will execute a no-op task after all retries have been exhausted,
1160
- ensuring that the flow execution can continue.
1010
+ Specifies that this step should execute on Kubernetes.
1161
1011
 
1162
1012
  Parameters
1163
1013
  ----------
1164
- times : int, default 3
1165
- Number of times to retry this task.
1166
- minutes_between_retries : int, default 2
1167
- Number of minutes between retries.
1014
+ cpu : int, default 1
1015
+ Number of CPUs required for this step. If `@resources` is
1016
+ also present, the maximum value from all decorators is used.
1017
+ memory : int, default 4096
1018
+ Memory size (in MB) required for this step. If
1019
+ `@resources` is also present, the maximum value from all decorators is
1020
+ used.
1021
+ disk : int, default 10240
1022
+ Disk size (in MB) required for this step. If
1023
+ `@resources` is also present, the maximum value from all decorators is
1024
+ used.
1025
+ image : str, optional, default None
1026
+ Docker image to use when launching on Kubernetes. If not specified, and
1027
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1028
+ not, a default Docker image mapping to the current version of Python is used.
1029
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1030
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1031
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1032
+ Kubernetes service account to use when launching pod in Kubernetes.
1033
+ secrets : List[str], optional, default None
1034
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1035
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1036
+ in Metaflow configuration.
1037
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1038
+ Kubernetes namespace to use when launching pod in Kubernetes.
1039
+ gpu : int, optional, default None
1040
+ Number of GPUs required for this step. A value of zero implies that
1041
+ the scheduled node should not have GPUs.
1042
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1043
+ The vendor of the GPUs to be used for this step.
1044
+ tolerations : List[str], default []
1045
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1046
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1047
+ use_tmpfs : bool, default False
1048
+ This enables an explicit tmpfs mount for this step.
1049
+ tmpfs_tempdir : bool, default True
1050
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1051
+ tmpfs_size : int, optional, default: None
1052
+ The value for the size (in MiB) of the tmpfs mount for this step.
1053
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1054
+ memory allocated for this step.
1055
+ tmpfs_path : str, optional, default /metaflow_temp
1056
+ Path to tmpfs mount for this step.
1057
+ persistent_volume_claims : Dict[str, str], optional, default None
1058
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1059
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1060
+ shared_memory: int, optional
1061
+ Shared memory size (in MiB) required for this step
1062
+ port: int, optional
1063
+ Port number to specify in the Kubernetes job object
1168
1064
  """
1169
1065
  ...
1170
1066
 
1171
1067
  @typing.overload
1172
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1068
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1069
+ """
1070
+ Specifies secrets to be retrieved and injected as environment variables prior to
1071
+ the execution of a step.
1072
+
1073
+ Parameters
1074
+ ----------
1075
+ sources : List[Union[str, Dict[str, Any]]], default: []
1076
+ List of secret specs, defining how the secrets are to be retrieved
1077
+ """
1173
1078
  ...
1174
1079
 
1175
1080
  @typing.overload
1176
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1081
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1177
1082
  ...
1178
1083
 
1179
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1084
+ @typing.overload
1085
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1086
+ ...
1087
+
1088
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1180
1089
  """
1181
- Specifies the number of times the task corresponding
1182
- to a step needs to be retried.
1090
+ Specifies secrets to be retrieved and injected as environment variables prior to
1091
+ the execution of a step.
1183
1092
 
1184
- This decorator is useful for handling transient errors, such as networking issues.
1185
- If your task contains operations that can't be retried safely, e.g. database updates,
1186
- it is advisable to annotate it with `@retry(times=0)`.
1093
+ Parameters
1094
+ ----------
1095
+ sources : List[Union[str, Dict[str, Any]]], default: []
1096
+ List of secret specs, defining how the secrets are to be retrieved
1097
+ """
1098
+ ...
1099
+
1100
+ @typing.overload
1101
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1102
+ """
1103
+ Specifies the Conda environment for the step.
1187
1104
 
1188
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1189
- decorator will execute a no-op task after all retries have been exhausted,
1190
- ensuring that the flow execution can continue.
1105
+ Information in this decorator will augment any
1106
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1107
+ you can use `@conda_base` to set packages required by all
1108
+ steps and use `@conda` to specify step-specific overrides.
1191
1109
 
1192
1110
  Parameters
1193
1111
  ----------
1194
- times : int, default 3
1195
- Number of times to retry this task.
1196
- minutes_between_retries : int, default 2
1197
- Number of minutes between retries.
1112
+ packages : Dict[str, str], default {}
1113
+ Packages to use for this step. The key is the name of the package
1114
+ and the value is the version to use.
1115
+ libraries : Dict[str, str], default {}
1116
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1117
+ python : str, optional, default None
1118
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1119
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1120
+ disabled : bool, default False
1121
+ If set to True, disables @conda.
1122
+ """
1123
+ ...
1124
+
1125
+ @typing.overload
1126
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1127
+ ...
1128
+
1129
+ @typing.overload
1130
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1131
+ ...
1132
+
1133
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1134
+ """
1135
+ Specifies the Conda environment for the step.
1136
+
1137
+ Information in this decorator will augment any
1138
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1139
+ you can use `@conda_base` to set packages required by all
1140
+ steps and use `@conda` to specify step-specific overrides.
1141
+
1142
+ Parameters
1143
+ ----------
1144
+ packages : Dict[str, str], default {}
1145
+ Packages to use for this step. The key is the name of the package
1146
+ and the value is the version to use.
1147
+ libraries : Dict[str, str], default {}
1148
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1149
+ python : str, optional, default None
1150
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1151
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1152
+ disabled : bool, default False
1153
+ If set to True, disables @conda.
1154
+ """
1155
+ ...
1156
+
1157
+ @typing.overload
1158
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1159
+ """
1160
+ Specifies the resources needed when executing this step.
1161
+
1162
+ Use `@resources` to specify the resource requirements
1163
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1164
+
1165
+ You can choose the compute layer on the command line by executing e.g.
1166
+ ```
1167
+ python myflow.py run --with batch
1168
+ ```
1169
+ or
1170
+ ```
1171
+ python myflow.py run --with kubernetes
1172
+ ```
1173
+ which executes the flow on the desired system using the
1174
+ requirements specified in `@resources`.
1175
+
1176
+ Parameters
1177
+ ----------
1178
+ cpu : int, default 1
1179
+ Number of CPUs required for this step.
1180
+ gpu : int, default 0
1181
+ Number of GPUs required for this step.
1182
+ disk : int, optional, default None
1183
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1184
+ memory : int, default 4096
1185
+ Memory size (in MB) required for this step.
1186
+ shared_memory : int, optional, default None
1187
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1188
+ This parameter maps to the `--shm-size` option in Docker.
1189
+ """
1190
+ ...
1191
+
1192
+ @typing.overload
1193
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1194
+ ...
1195
+
1196
+ @typing.overload
1197
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1198
+ ...
1199
+
1200
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1201
+ """
1202
+ Specifies the resources needed when executing this step.
1203
+
1204
+ Use `@resources` to specify the resource requirements
1205
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1206
+
1207
+ You can choose the compute layer on the command line by executing e.g.
1208
+ ```
1209
+ python myflow.py run --with batch
1210
+ ```
1211
+ or
1212
+ ```
1213
+ python myflow.py run --with kubernetes
1214
+ ```
1215
+ which executes the flow on the desired system using the
1216
+ requirements specified in `@resources`.
1217
+
1218
+ Parameters
1219
+ ----------
1220
+ cpu : int, default 1
1221
+ Number of CPUs required for this step.
1222
+ gpu : int, default 0
1223
+ Number of GPUs required for this step.
1224
+ disk : int, optional, default None
1225
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1226
+ memory : int, default 4096
1227
+ Memory size (in MB) required for this step.
1228
+ shared_memory : int, optional, default None
1229
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1230
+ This parameter maps to the `--shm-size` option in Docker.
1198
1231
  """
1199
1232
  ...
1200
1233
 
@@ -1249,6 +1282,55 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1249
1282
  """
1250
1283
  ...
1251
1284
 
1285
+ @typing.overload
1286
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1287
+ """
1288
+ Specifies that the step will success under all circumstances.
1289
+
1290
+ The decorator will create an optional artifact, specified by `var`, which
1291
+ contains the exception raised. You can use it to detect the presence
1292
+ of errors, indicating that all happy-path artifacts produced by the step
1293
+ are missing.
1294
+
1295
+ Parameters
1296
+ ----------
1297
+ var : str, optional, default None
1298
+ Name of the artifact in which to store the caught exception.
1299
+ If not specified, the exception is not stored.
1300
+ print_exception : bool, default True
1301
+ Determines whether or not the exception is printed to
1302
+ stdout when caught.
1303
+ """
1304
+ ...
1305
+
1306
+ @typing.overload
1307
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1308
+ ...
1309
+
1310
+ @typing.overload
1311
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1312
+ ...
1313
+
1314
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1315
+ """
1316
+ Specifies that the step will success under all circumstances.
1317
+
1318
+ The decorator will create an optional artifact, specified by `var`, which
1319
+ contains the exception raised. You can use it to detect the presence
1320
+ of errors, indicating that all happy-path artifacts produced by the step
1321
+ are missing.
1322
+
1323
+ Parameters
1324
+ ----------
1325
+ var : str, optional, default None
1326
+ Name of the artifact in which to store the caught exception.
1327
+ If not specified, the exception is not stored.
1328
+ print_exception : bool, default True
1329
+ Determines whether or not the exception is printed to
1330
+ stdout when caught.
1331
+ """
1332
+ ...
1333
+
1252
1334
  @typing.overload
1253
1335
  def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1254
1336
  """
@@ -1307,84 +1389,202 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1307
1389
  ...
1308
1390
 
1309
1391
  @typing.overload
1310
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1392
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1311
1393
  """
1312
- Specifies the PyPI packages for the step.
1394
+ Specifies the times when the flow should be run when running on a
1395
+ production scheduler.
1313
1396
 
1314
- Information in this decorator will augment any
1315
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1316
- you can use `@pypi_base` to set packages required by all
1317
- steps and use `@pypi` to specify step-specific overrides.
1397
+ Parameters
1398
+ ----------
1399
+ hourly : bool, default False
1400
+ Run the workflow hourly.
1401
+ daily : bool, default True
1402
+ Run the workflow daily.
1403
+ weekly : bool, default False
1404
+ Run the workflow weekly.
1405
+ cron : str, optional, default None
1406
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1407
+ specified by this expression.
1408
+ timezone : str, optional, default None
1409
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1410
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1411
+ """
1412
+ ...
1413
+
1414
+ @typing.overload
1415
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1416
+ ...
1417
+
1418
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1419
+ """
1420
+ Specifies the times when the flow should be run when running on a
1421
+ production scheduler.
1318
1422
 
1319
1423
  Parameters
1320
1424
  ----------
1321
- packages : Dict[str, str], default: {}
1322
- Packages to use for this step. The key is the name of the package
1425
+ hourly : bool, default False
1426
+ Run the workflow hourly.
1427
+ daily : bool, default True
1428
+ Run the workflow daily.
1429
+ weekly : bool, default False
1430
+ Run the workflow weekly.
1431
+ cron : str, optional, default None
1432
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1433
+ specified by this expression.
1434
+ timezone : str, optional, default None
1435
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1436
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1437
+ """
1438
+ ...
1439
+
1440
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1441
+ """
1442
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1443
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1444
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1445
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1446
+ starts only after all sensors finish.
1447
+
1448
+ Parameters
1449
+ ----------
1450
+ timeout : int
1451
+ Time, in seconds before the task times out and fails. (Default: 3600)
1452
+ poke_interval : int
1453
+ Time in seconds that the job should wait in between each try. (Default: 60)
1454
+ mode : str
1455
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1456
+ exponential_backoff : bool
1457
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1458
+ pool : str
1459
+ the slot pool this task should run in,
1460
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1461
+ soft_fail : bool
1462
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1463
+ name : str
1464
+ Name of the sensor on Airflow
1465
+ description : str
1466
+ Description of sensor in the Airflow UI
1467
+ bucket_key : Union[str, List[str]]
1468
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1469
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1470
+ bucket_name : str
1471
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1472
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1473
+ wildcard_match : bool
1474
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1475
+ aws_conn_id : str
1476
+ a reference to the s3 connection on Airflow. (Default: None)
1477
+ verify : bool
1478
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1479
+ """
1480
+ ...
1481
+
1482
+ @typing.overload
1483
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1484
+ """
1485
+ Specifies the Conda environment for all steps of the flow.
1486
+
1487
+ Use `@conda_base` to set common libraries required by all
1488
+ steps and use `@conda` to specify step-specific additions.
1489
+
1490
+ Parameters
1491
+ ----------
1492
+ packages : Dict[str, str], default {}
1493
+ Packages to use for this flow. The key is the name of the package
1323
1494
  and the value is the version to use.
1324
- python : str, optional, default: None
1495
+ libraries : Dict[str, str], default {}
1496
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1497
+ python : str, optional, default None
1325
1498
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1326
1499
  that the version used will correspond to the version of the Python interpreter used to start the run.
1500
+ disabled : bool, default False
1501
+ If set to True, disables Conda.
1327
1502
  """
1328
1503
  ...
1329
1504
 
1330
1505
  @typing.overload
1331
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1506
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1332
1507
  ...
1333
1508
 
1334
- @typing.overload
1335
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1509
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1510
+ """
1511
+ Specifies the Conda environment for all steps of the flow.
1512
+
1513
+ Use `@conda_base` to set common libraries required by all
1514
+ steps and use `@conda` to specify step-specific additions.
1515
+
1516
+ Parameters
1517
+ ----------
1518
+ packages : Dict[str, str], default {}
1519
+ Packages to use for this flow. The key is the name of the package
1520
+ and the value is the version to use.
1521
+ libraries : Dict[str, str], default {}
1522
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1523
+ python : str, optional, default None
1524
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1525
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1526
+ disabled : bool, default False
1527
+ If set to True, disables Conda.
1528
+ """
1529
+ ...
1530
+
1531
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1532
+ """
1533
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1534
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1535
+
1536
+ Parameters
1537
+ ----------
1538
+ timeout : int
1539
+ Time, in seconds before the task times out and fails. (Default: 3600)
1540
+ poke_interval : int
1541
+ Time in seconds that the job should wait in between each try. (Default: 60)
1542
+ mode : str
1543
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1544
+ exponential_backoff : bool
1545
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1546
+ pool : str
1547
+ the slot pool this task should run in,
1548
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1549
+ soft_fail : bool
1550
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1551
+ name : str
1552
+ Name of the sensor on Airflow
1553
+ description : str
1554
+ Description of sensor in the Airflow UI
1555
+ external_dag_id : str
1556
+ The dag_id that contains the task you want to wait for.
1557
+ external_task_ids : List[str]
1558
+ The list of task_ids that you want to wait for.
1559
+ If None (default value) the sensor waits for the DAG. (Default: None)
1560
+ allowed_states : List[str]
1561
+ Iterable of allowed states, (Default: ['success'])
1562
+ failed_states : List[str]
1563
+ Iterable of failed or dis-allowed states. (Default: None)
1564
+ execution_delta : datetime.timedelta
1565
+ time difference with the previous execution to look at,
1566
+ the default is the same logical date as the current task or DAG. (Default: None)
1567
+ check_existence: bool
1568
+ Set to True to check if the external task exists or check if
1569
+ the DAG to wait for exists. (Default: True)
1570
+ """
1336
1571
  ...
1337
1572
 
1338
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1573
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1339
1574
  """
1340
- Specifies the PyPI packages for the step.
1575
+ Specifies what flows belong to the same project.
1341
1576
 
1342
- Information in this decorator will augment any
1343
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1344
- you can use `@pypi_base` to set packages required by all
1345
- steps and use `@pypi` to specify step-specific overrides.
1577
+ A project-specific namespace is created for all flows that
1578
+ use the same `@project(name)`.
1346
1579
 
1347
1580
  Parameters
1348
1581
  ----------
1349
- packages : Dict[str, str], default: {}
1350
- Packages to use for this step. The key is the name of the package
1351
- and the value is the version to use.
1352
- python : str, optional, default: None
1353
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1354
- that the version used will correspond to the version of the Python interpreter used to start the run.
1355
- """
1356
- ...
1357
-
1358
- @typing.overload
1359
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1360
- """
1361
- Specifies secrets to be retrieved and injected as environment variables prior to
1362
- the execution of a step.
1582
+ name : str
1583
+ Project name. Make sure that the name is unique amongst all
1584
+ projects that use the same production scheduler. The name may
1585
+ contain only lowercase alphanumeric characters and underscores.
1363
1586
 
1364
- Parameters
1365
- ----------
1366
- sources : List[Union[str, Dict[str, Any]]], default: []
1367
- List of secret specs, defining how the secrets are to be retrieved
1368
- """
1369
- ...
1370
-
1371
- @typing.overload
1372
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1373
- ...
1374
-
1375
- @typing.overload
1376
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1377
- ...
1378
-
1379
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1380
- """
1381
- Specifies secrets to be retrieved and injected as environment variables prior to
1382
- the execution of a step.
1383
1587
 
1384
- Parameters
1385
- ----------
1386
- sources : List[Union[str, Dict[str, Any]]], default: []
1387
- List of secret specs, defining how the secrets are to be retrieved
1388
1588
  """
1389
1589
  ...
1390
1590
 
@@ -1530,73 +1730,6 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1530
1730
  """
1531
1731
  ...
1532
1732
 
1533
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1534
- """
1535
- Specifies what flows belong to the same project.
1536
-
1537
- A project-specific namespace is created for all flows that
1538
- use the same `@project(name)`.
1539
-
1540
- Parameters
1541
- ----------
1542
- name : str
1543
- Project name. Make sure that the name is unique amongst all
1544
- projects that use the same production scheduler. The name may
1545
- contain only lowercase alphanumeric characters and underscores.
1546
-
1547
-
1548
- """
1549
- ...
1550
-
1551
- @typing.overload
1552
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1553
- """
1554
- Specifies the Conda environment for all steps of the flow.
1555
-
1556
- Use `@conda_base` to set common libraries required by all
1557
- steps and use `@conda` to specify step-specific additions.
1558
-
1559
- Parameters
1560
- ----------
1561
- packages : Dict[str, str], default {}
1562
- Packages to use for this flow. The key is the name of the package
1563
- and the value is the version to use.
1564
- libraries : Dict[str, str], default {}
1565
- Supported for backward compatibility. When used with packages, packages will take precedence.
1566
- python : str, optional, default None
1567
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1568
- that the version used will correspond to the version of the Python interpreter used to start the run.
1569
- disabled : bool, default False
1570
- If set to True, disables Conda.
1571
- """
1572
- ...
1573
-
1574
- @typing.overload
1575
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1576
- ...
1577
-
1578
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1579
- """
1580
- Specifies the Conda environment for all steps of the flow.
1581
-
1582
- Use `@conda_base` to set common libraries required by all
1583
- steps and use `@conda` to specify step-specific additions.
1584
-
1585
- Parameters
1586
- ----------
1587
- packages : Dict[str, str], default {}
1588
- Packages to use for this flow. The key is the name of the package
1589
- and the value is the version to use.
1590
- libraries : Dict[str, str], default {}
1591
- Supported for backward compatibility. When used with packages, packages will take precedence.
1592
- python : str, optional, default None
1593
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1594
- that the version used will correspond to the version of the Python interpreter used to start the run.
1595
- disabled : bool, default False
1596
- If set to True, disables Conda.
1597
- """
1598
- ...
1599
-
1600
1733
  @typing.overload
1601
1734
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1602
1735
  """
@@ -1692,139 +1825,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1692
1825
  """
1693
1826
  ...
1694
1827
 
1695
- @typing.overload
1696
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1697
- """
1698
- Specifies the times when the flow should be run when running on a
1699
- production scheduler.
1700
-
1701
- Parameters
1702
- ----------
1703
- hourly : bool, default False
1704
- Run the workflow hourly.
1705
- daily : bool, default True
1706
- Run the workflow daily.
1707
- weekly : bool, default False
1708
- Run the workflow weekly.
1709
- cron : str, optional, default None
1710
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1711
- specified by this expression.
1712
- timezone : str, optional, default None
1713
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1714
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1715
- """
1716
- ...
1717
-
1718
- @typing.overload
1719
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1720
- ...
1721
-
1722
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1723
- """
1724
- Specifies the times when the flow should be run when running on a
1725
- production scheduler.
1726
-
1727
- Parameters
1728
- ----------
1729
- hourly : bool, default False
1730
- Run the workflow hourly.
1731
- daily : bool, default True
1732
- Run the workflow daily.
1733
- weekly : bool, default False
1734
- Run the workflow weekly.
1735
- cron : str, optional, default None
1736
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1737
- specified by this expression.
1738
- timezone : str, optional, default None
1739
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1740
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1741
- """
1742
- ...
1743
-
1744
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1745
- """
1746
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1747
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1748
-
1749
- Parameters
1750
- ----------
1751
- timeout : int
1752
- Time, in seconds before the task times out and fails. (Default: 3600)
1753
- poke_interval : int
1754
- Time in seconds that the job should wait in between each try. (Default: 60)
1755
- mode : str
1756
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1757
- exponential_backoff : bool
1758
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1759
- pool : str
1760
- the slot pool this task should run in,
1761
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1762
- soft_fail : bool
1763
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1764
- name : str
1765
- Name of the sensor on Airflow
1766
- description : str
1767
- Description of sensor in the Airflow UI
1768
- external_dag_id : str
1769
- The dag_id that contains the task you want to wait for.
1770
- external_task_ids : List[str]
1771
- The list of task_ids that you want to wait for.
1772
- If None (default value) the sensor waits for the DAG. (Default: None)
1773
- allowed_states : List[str]
1774
- Iterable of allowed states, (Default: ['success'])
1775
- failed_states : List[str]
1776
- Iterable of failed or dis-allowed states. (Default: None)
1777
- execution_delta : datetime.timedelta
1778
- time difference with the previous execution to look at,
1779
- the default is the same logical date as the current task or DAG. (Default: None)
1780
- check_existence: bool
1781
- Set to True to check if the external task exists or check if
1782
- the DAG to wait for exists. (Default: True)
1783
- """
1784
- ...
1785
-
1786
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1787
- """
1788
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1789
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1790
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1791
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1792
- starts only after all sensors finish.
1793
-
1794
- Parameters
1795
- ----------
1796
- timeout : int
1797
- Time, in seconds before the task times out and fails. (Default: 3600)
1798
- poke_interval : int
1799
- Time in seconds that the job should wait in between each try. (Default: 60)
1800
- mode : str
1801
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1802
- exponential_backoff : bool
1803
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1804
- pool : str
1805
- the slot pool this task should run in,
1806
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1807
- soft_fail : bool
1808
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1809
- name : str
1810
- Name of the sensor on Airflow
1811
- description : str
1812
- Description of sensor in the Airflow UI
1813
- bucket_key : Union[str, List[str]]
1814
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1815
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1816
- bucket_name : str
1817
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1818
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1819
- wildcard_match : bool
1820
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1821
- aws_conn_id : str
1822
- a reference to the s3 connection on Airflow. (Default: None)
1823
- verify : bool
1824
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1825
- """
1826
- ...
1827
-
1828
1828
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1829
1829
  """
1830
1830
  Switch namespace to the one provided.