metaflow-stubs 2.12.0__py2.py3-none-any.whl → 2.12.2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. metaflow-stubs/__init__.pyi +467 -467
  2. metaflow-stubs/cards.pyi +6 -6
  3. metaflow-stubs/cli.pyi +8 -23
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +5 -5
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +4 -2
  16. metaflow-stubs/metaflow_current.pyi +18 -18
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +4 -4
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +4 -4
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  60. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  61. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  62. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  63. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  65. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/components.pyi +5 -5
  78. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  82. metaflow-stubs/plugins/cards/component_serializer.pyi +5 -5
  83. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  84. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  85. metaflow-stubs/plugins/datatools/__init__.pyi +4 -4
  86. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  89. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  91. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  92. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  93. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  94. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  95. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  97. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  99. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +4 -4
  106. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  108. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  110. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  111. metaflow-stubs/plugins/package_cli.pyi +2 -2
  112. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  114. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  117. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  119. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  120. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  123. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  124. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  126. metaflow-stubs/plugins/tag_cli.pyi +5 -5
  127. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  128. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  129. metaflow-stubs/procpoll.pyi +2 -2
  130. metaflow-stubs/pylint_wrapper.pyi +2 -2
  131. metaflow-stubs/runner/__init__.pyi +2 -2
  132. metaflow-stubs/runner/metaflow_runner.pyi +5 -5
  133. metaflow-stubs/runner/nbrun.pyi +2 -2
  134. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  135. metaflow-stubs/tagging_util.pyi +2 -2
  136. metaflow-stubs/tuple_util.pyi +2 -2
  137. metaflow-stubs/version.pyi +2 -2
  138. {metaflow_stubs-2.12.0.dist-info → metaflow_stubs-2.12.2.dist-info}/METADATA +2 -2
  139. metaflow_stubs-2.12.2.dist-info/RECORD +142 -0
  140. metaflow_stubs-2.12.0.dist-info/RECORD +0 -142
  141. {metaflow_stubs-2.12.0.dist-info → metaflow_stubs-2.12.2.dist-info}/WHEEL +0 -0
  142. {metaflow_stubs-2.12.0.dist-info → metaflow_stubs-2.12.2.dist-info}/top_level.txt +0 -0
@@ -1,24 +1,24 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.0 #
4
- # Generated on 2024-05-28T09:55:27.155006 #
3
+ # MF version: 2.12.2 #
4
+ # Generated on 2024-06-04T07:19:10.055768 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.parameters
12
- import metaflow._vendor.click.types
13
11
  import metaflow.datastore.inputs
14
- import datetime
15
- import metaflow.client.core
16
12
  import metaflow.plugins.datatools.s3.s3
17
- import typing
13
+ import metaflow._vendor.click.types
18
14
  import metaflow.metaflow_current
19
- import io
20
- import metaflow.events
21
15
  import metaflow.runner.metaflow_runner
16
+ import metaflow.parameters
17
+ import metaflow.client.core
18
+ import metaflow.events
19
+ import datetime
20
+ import io
21
+ import typing
22
22
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
23
23
  StepFlag = typing.NewType("StepFlag", bool)
24
24
 
@@ -726,83 +726,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
726
726
  """
727
727
  ...
728
728
 
729
- @typing.overload
730
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
731
- """
732
- Specifies the resources needed when executing this step.
733
-
734
- Use `@resources` to specify the resource requirements
735
- independently of the specific compute layer (`@batch`, `@kubernetes`).
736
-
737
- You can choose the compute layer on the command line by executing e.g.
738
- ```
739
- python myflow.py run --with batch
740
- ```
741
- or
742
- ```
743
- python myflow.py run --with kubernetes
744
- ```
745
- which executes the flow on the desired system using the
746
- requirements specified in `@resources`.
747
-
748
- Parameters
749
- ----------
750
- cpu : int, default 1
751
- Number of CPUs required for this step.
752
- gpu : int, default 0
753
- Number of GPUs required for this step.
754
- disk : int, optional, default None
755
- Disk size (in MB) required for this step. Only applies on Kubernetes.
756
- memory : int, default 4096
757
- Memory size (in MB) required for this step.
758
- shared_memory : int, optional, default None
759
- The value for the size (in MiB) of the /dev/shm volume for this step.
760
- This parameter maps to the `--shm-size` option in Docker.
761
- """
762
- ...
763
-
764
- @typing.overload
765
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
766
- ...
767
-
768
- @typing.overload
769
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
770
- ...
771
-
772
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
773
- """
774
- Specifies the resources needed when executing this step.
775
-
776
- Use `@resources` to specify the resource requirements
777
- independently of the specific compute layer (`@batch`, `@kubernetes`).
778
-
779
- You can choose the compute layer on the command line by executing e.g.
780
- ```
781
- python myflow.py run --with batch
782
- ```
783
- or
784
- ```
785
- python myflow.py run --with kubernetes
786
- ```
787
- which executes the flow on the desired system using the
788
- requirements specified in `@resources`.
789
-
790
- Parameters
791
- ----------
792
- cpu : int, default 1
793
- Number of CPUs required for this step.
794
- gpu : int, default 0
795
- Number of GPUs required for this step.
796
- disk : int, optional, default None
797
- Disk size (in MB) required for this step. Only applies on Kubernetes.
798
- memory : int, default 4096
799
- Memory size (in MB) required for this step.
800
- shared_memory : int, optional, default None
801
- The value for the size (in MiB) of the /dev/shm volume for this step.
802
- This parameter maps to the `--shm-size` option in Docker.
803
- """
804
- ...
805
-
806
729
  def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
807
730
  """
808
731
  Specifies that this step should execute on Kubernetes.
@@ -862,6 +785,127 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
862
785
  """
863
786
  ...
864
787
 
788
+ @typing.overload
789
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
790
+ """
791
+ Specifies the Conda environment for the step.
792
+
793
+ Information in this decorator will augment any
794
+ attributes set in the `@conda_base` flow-level decorator. Hence,
795
+ you can use `@conda_base` to set packages required by all
796
+ steps and use `@conda` to specify step-specific overrides.
797
+
798
+ Parameters
799
+ ----------
800
+ packages : Dict[str, str], default {}
801
+ Packages to use for this step. The key is the name of the package
802
+ and the value is the version to use.
803
+ libraries : Dict[str, str], default {}
804
+ Supported for backward compatibility. When used with packages, packages will take precedence.
805
+ python : str, optional, default None
806
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
807
+ that the version used will correspond to the version of the Python interpreter used to start the run.
808
+ disabled : bool, default False
809
+ If set to True, disables @conda.
810
+ """
811
+ ...
812
+
813
+ @typing.overload
814
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
815
+ ...
816
+
817
+ @typing.overload
818
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
819
+ ...
820
+
821
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
822
+ """
823
+ Specifies the Conda environment for the step.
824
+
825
+ Information in this decorator will augment any
826
+ attributes set in the `@conda_base` flow-level decorator. Hence,
827
+ you can use `@conda_base` to set packages required by all
828
+ steps and use `@conda` to specify step-specific overrides.
829
+
830
+ Parameters
831
+ ----------
832
+ packages : Dict[str, str], default {}
833
+ Packages to use for this step. The key is the name of the package
834
+ and the value is the version to use.
835
+ libraries : Dict[str, str], default {}
836
+ Supported for backward compatibility. When used with packages, packages will take precedence.
837
+ python : str, optional, default None
838
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
839
+ that the version used will correspond to the version of the Python interpreter used to start the run.
840
+ disabled : bool, default False
841
+ If set to True, disables @conda.
842
+ """
843
+ ...
844
+
845
+ @typing.overload
846
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
847
+ """
848
+ Specifies environment variables to be set prior to the execution of a step.
849
+
850
+ Parameters
851
+ ----------
852
+ vars : Dict[str, str], default {}
853
+ Dictionary of environment variables to set.
854
+ """
855
+ ...
856
+
857
+ @typing.overload
858
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
859
+ ...
860
+
861
+ @typing.overload
862
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
863
+ ...
864
+
865
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
866
+ """
867
+ Specifies environment variables to be set prior to the execution of a step.
868
+
869
+ Parameters
870
+ ----------
871
+ vars : Dict[str, str], default {}
872
+ Dictionary of environment variables to set.
873
+ """
874
+ ...
875
+
876
+ @typing.overload
877
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
878
+ """
879
+ Specifies secrets to be retrieved and injected as environment variables prior to
880
+ the execution of a step.
881
+
882
+ Parameters
883
+ ----------
884
+ sources : List[Union[str, Dict[str, Any]]], default: []
885
+ List of secret specs, defining how the secrets are to be retrieved
886
+ """
887
+ ...
888
+
889
+ @typing.overload
890
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
891
+ ...
892
+
893
+ @typing.overload
894
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
895
+ ...
896
+
897
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
898
+ """
899
+ Specifies secrets to be retrieved and injected as environment variables prior to
900
+ the execution of a step.
901
+
902
+ Parameters
903
+ ----------
904
+ sources : List[Union[str, Dict[str, Any]]], default: []
905
+ List of secret specs, defining how the secrets are to be retrieved
906
+ """
907
+ ...
908
+
865
909
  @typing.overload
866
910
  def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
867
911
  """
@@ -1010,123 +1054,100 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1010
1054
  ...
1011
1055
 
1012
1056
  @typing.overload
1013
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1057
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1014
1058
  """
1015
- Specifies a timeout for your step.
1016
-
1017
- This decorator is useful if this step may hang indefinitely.
1018
-
1019
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1020
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1021
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1059
+ Specifies that the step will success under all circumstances.
1022
1060
 
1023
- Note that all the values specified in parameters are added together so if you specify
1024
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1061
+ The decorator will create an optional artifact, specified by `var`, which
1062
+ contains the exception raised. You can use it to detect the presence
1063
+ of errors, indicating that all happy-path artifacts produced by the step
1064
+ are missing.
1025
1065
 
1026
1066
  Parameters
1027
1067
  ----------
1028
- seconds : int, default 0
1029
- Number of seconds to wait prior to timing out.
1030
- minutes : int, default 0
1031
- Number of minutes to wait prior to timing out.
1032
- hours : int, default 0
1033
- Number of hours to wait prior to timing out.
1068
+ var : str, optional, default None
1069
+ Name of the artifact in which to store the caught exception.
1070
+ If not specified, the exception is not stored.
1071
+ print_exception : bool, default True
1072
+ Determines whether or not the exception is printed to
1073
+ stdout when caught.
1034
1074
  """
1035
1075
  ...
1036
1076
 
1037
1077
  @typing.overload
1038
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1078
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1039
1079
  ...
1040
1080
 
1041
1081
  @typing.overload
1042
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1082
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1043
1083
  ...
1044
1084
 
1045
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1085
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1046
1086
  """
1047
- Specifies a timeout for your step.
1087
+ Specifies that the step will success under all circumstances.
1048
1088
 
1049
- This decorator is useful if this step may hang indefinitely.
1050
-
1051
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1052
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1053
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1054
-
1055
- Note that all the values specified in parameters are added together so if you specify
1056
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1089
+ The decorator will create an optional artifact, specified by `var`, which
1090
+ contains the exception raised. You can use it to detect the presence
1091
+ of errors, indicating that all happy-path artifacts produced by the step
1092
+ are missing.
1057
1093
 
1058
1094
  Parameters
1059
1095
  ----------
1060
- seconds : int, default 0
1061
- Number of seconds to wait prior to timing out.
1062
- minutes : int, default 0
1063
- Number of minutes to wait prior to timing out.
1064
- hours : int, default 0
1065
- Number of hours to wait prior to timing out.
1066
- """
1067
- ...
1068
-
1069
- @typing.overload
1070
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1071
- """
1072
- Specifies environment variables to be set prior to the execution of a step.
1073
-
1074
- Parameters
1075
- ----------
1076
- vars : Dict[str, str], default {}
1077
- Dictionary of environment variables to set.
1096
+ var : str, optional, default None
1097
+ Name of the artifact in which to store the caught exception.
1098
+ If not specified, the exception is not stored.
1099
+ print_exception : bool, default True
1100
+ Determines whether or not the exception is printed to
1101
+ stdout when caught.
1078
1102
  """
1079
1103
  ...
1080
1104
 
1081
1105
  @typing.overload
1082
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1083
- ...
1084
-
1085
- @typing.overload
1086
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1087
- ...
1088
-
1089
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1106
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1090
1107
  """
1091
- Specifies environment variables to be set prior to the execution of a step.
1108
+ Specifies the PyPI packages for the step.
1092
1109
 
1093
- Parameters
1094
- ----------
1095
- vars : Dict[str, str], default {}
1096
- Dictionary of environment variables to set.
1097
- """
1098
- ...
1099
-
1100
- @typing.overload
1101
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1102
- """
1103
- Specifies secrets to be retrieved and injected as environment variables prior to
1104
- the execution of a step.
1110
+ Information in this decorator will augment any
1111
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1112
+ you can use `@pypi_base` to set packages required by all
1113
+ steps and use `@pypi` to specify step-specific overrides.
1105
1114
 
1106
1115
  Parameters
1107
1116
  ----------
1108
- sources : List[Union[str, Dict[str, Any]]], default: []
1109
- List of secret specs, defining how the secrets are to be retrieved
1117
+ packages : Dict[str, str], default: {}
1118
+ Packages to use for this step. The key is the name of the package
1119
+ and the value is the version to use.
1120
+ python : str, optional, default: None
1121
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1122
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1110
1123
  """
1111
1124
  ...
1112
1125
 
1113
1126
  @typing.overload
1114
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1127
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1115
1128
  ...
1116
1129
 
1117
1130
  @typing.overload
1118
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1131
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1119
1132
  ...
1120
1133
 
1121
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1134
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1122
1135
  """
1123
- Specifies secrets to be retrieved and injected as environment variables prior to
1124
- the execution of a step.
1136
+ Specifies the PyPI packages for the step.
1137
+
1138
+ Information in this decorator will augment any
1139
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1140
+ you can use `@pypi_base` to set packages required by all
1141
+ steps and use `@pypi` to specify step-specific overrides.
1125
1142
 
1126
1143
  Parameters
1127
1144
  ----------
1128
- sources : List[Union[str, Dict[str, Any]]], default: []
1129
- List of secret specs, defining how the secrets are to be retrieved
1145
+ packages : Dict[str, str], default: {}
1146
+ Packages to use for this step. The key is the name of the package
1147
+ and the value is the version to use.
1148
+ python : str, optional, default: None
1149
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1150
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1130
1151
  """
1131
1152
  ...
1132
1153
 
@@ -1183,6 +1204,63 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1183
1204
  """
1184
1205
  ...
1185
1206
 
1207
+ @typing.overload
1208
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1209
+ """
1210
+ Specifies a timeout for your step.
1211
+
1212
+ This decorator is useful if this step may hang indefinitely.
1213
+
1214
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1215
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1216
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1217
+
1218
+ Note that all the values specified in parameters are added together so if you specify
1219
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1220
+
1221
+ Parameters
1222
+ ----------
1223
+ seconds : int, default 0
1224
+ Number of seconds to wait prior to timing out.
1225
+ minutes : int, default 0
1226
+ Number of minutes to wait prior to timing out.
1227
+ hours : int, default 0
1228
+ Number of hours to wait prior to timing out.
1229
+ """
1230
+ ...
1231
+
1232
+ @typing.overload
1233
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1234
+ ...
1235
+
1236
+ @typing.overload
1237
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1238
+ ...
1239
+
1240
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1241
+ """
1242
+ Specifies a timeout for your step.
1243
+
1244
+ This decorator is useful if this step may hang indefinitely.
1245
+
1246
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1247
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1248
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1249
+
1250
+ Note that all the values specified in parameters are added together so if you specify
1251
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1252
+
1253
+ Parameters
1254
+ ----------
1255
+ seconds : int, default 0
1256
+ Number of seconds to wait prior to timing out.
1257
+ minutes : int, default 0
1258
+ Number of minutes to wait prior to timing out.
1259
+ hours : int, default 0
1260
+ Number of hours to wait prior to timing out.
1261
+ """
1262
+ ...
1263
+
1186
1264
  @typing.overload
1187
1265
  def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1188
1266
  """
@@ -1235,68 +1313,202 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1235
1313
  ...
1236
1314
 
1237
1315
  @typing.overload
1238
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1316
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1239
1317
  """
1240
- Specifies that the step will success under all circumstances.
1318
+ Specifies the resources needed when executing this step.
1241
1319
 
1242
- The decorator will create an optional artifact, specified by `var`, which
1243
- contains the exception raised. You can use it to detect the presence
1244
- of errors, indicating that all happy-path artifacts produced by the step
1245
- are missing.
1320
+ Use `@resources` to specify the resource requirements
1321
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1322
+
1323
+ You can choose the compute layer on the command line by executing e.g.
1324
+ ```
1325
+ python myflow.py run --with batch
1326
+ ```
1327
+ or
1328
+ ```
1329
+ python myflow.py run --with kubernetes
1330
+ ```
1331
+ which executes the flow on the desired system using the
1332
+ requirements specified in `@resources`.
1246
1333
 
1247
1334
  Parameters
1248
1335
  ----------
1249
- var : str, optional, default None
1250
- Name of the artifact in which to store the caught exception.
1251
- If not specified, the exception is not stored.
1252
- print_exception : bool, default True
1253
- Determines whether or not the exception is printed to
1254
- stdout when caught.
1336
+ cpu : int, default 1
1337
+ Number of CPUs required for this step.
1338
+ gpu : int, default 0
1339
+ Number of GPUs required for this step.
1340
+ disk : int, optional, default None
1341
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1342
+ memory : int, default 4096
1343
+ Memory size (in MB) required for this step.
1344
+ shared_memory : int, optional, default None
1345
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1346
+ This parameter maps to the `--shm-size` option in Docker.
1255
1347
  """
1256
1348
  ...
1257
1349
 
1258
1350
  @typing.overload
1259
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1351
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1260
1352
  ...
1261
1353
 
1262
1354
  @typing.overload
1263
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1355
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1264
1356
  ...
1265
1357
 
1266
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1358
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1267
1359
  """
1268
- Specifies that the step will success under all circumstances.
1360
+ Specifies the resources needed when executing this step.
1269
1361
 
1270
- The decorator will create an optional artifact, specified by `var`, which
1271
- contains the exception raised. You can use it to detect the presence
1272
- of errors, indicating that all happy-path artifacts produced by the step
1273
- are missing.
1362
+ Use `@resources` to specify the resource requirements
1363
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1364
+
1365
+ You can choose the compute layer on the command line by executing e.g.
1366
+ ```
1367
+ python myflow.py run --with batch
1368
+ ```
1369
+ or
1370
+ ```
1371
+ python myflow.py run --with kubernetes
1372
+ ```
1373
+ which executes the flow on the desired system using the
1374
+ requirements specified in `@resources`.
1274
1375
 
1275
1376
  Parameters
1276
1377
  ----------
1277
- var : str, optional, default None
1278
- Name of the artifact in which to store the caught exception.
1279
- If not specified, the exception is not stored.
1280
- print_exception : bool, default True
1281
- Determines whether or not the exception is printed to
1282
- stdout when caught.
1378
+ cpu : int, default 1
1379
+ Number of CPUs required for this step.
1380
+ gpu : int, default 0
1381
+ Number of GPUs required for this step.
1382
+ disk : int, optional, default None
1383
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1384
+ memory : int, default 4096
1385
+ Memory size (in MB) required for this step.
1386
+ shared_memory : int, optional, default None
1387
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1388
+ This parameter maps to the `--shm-size` option in Docker.
1389
+ """
1390
+ ...
1391
+
1392
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1393
+ """
1394
+ Specifies what flows belong to the same project.
1395
+
1396
+ A project-specific namespace is created for all flows that
1397
+ use the same `@project(name)`.
1398
+
1399
+ Parameters
1400
+ ----------
1401
+ name : str
1402
+ Project name. Make sure that the name is unique amongst all
1403
+ projects that use the same production scheduler. The name may
1404
+ contain only lowercase alphanumeric characters and underscores.
1405
+
1406
+
1283
1407
  """
1284
1408
  ...
1285
1409
 
1286
1410
  @typing.overload
1287
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1411
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1288
1412
  """
1289
- Specifies the PyPI packages for the step.
1413
+ Specifies the times when the flow should be run when running on a
1414
+ production scheduler.
1290
1415
 
1291
- Information in this decorator will augment any
1292
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1293
- you can use `@pypi_base` to set packages required by all
1294
- steps and use `@pypi` to specify step-specific overrides.
1416
+ Parameters
1417
+ ----------
1418
+ hourly : bool, default False
1419
+ Run the workflow hourly.
1420
+ daily : bool, default True
1421
+ Run the workflow daily.
1422
+ weekly : bool, default False
1423
+ Run the workflow weekly.
1424
+ cron : str, optional, default None
1425
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1426
+ specified by this expression.
1427
+ timezone : str, optional, default None
1428
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1429
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1430
+ """
1431
+ ...
1432
+
1433
+ @typing.overload
1434
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1435
+ ...
1436
+
1437
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1438
+ """
1439
+ Specifies the times when the flow should be run when running on a
1440
+ production scheduler.
1441
+
1442
+ Parameters
1443
+ ----------
1444
+ hourly : bool, default False
1445
+ Run the workflow hourly.
1446
+ daily : bool, default True
1447
+ Run the workflow daily.
1448
+ weekly : bool, default False
1449
+ Run the workflow weekly.
1450
+ cron : str, optional, default None
1451
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1452
+ specified by this expression.
1453
+ timezone : str, optional, default None
1454
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1455
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1456
+ """
1457
+ ...
1458
+
1459
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1460
+ """
1461
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1462
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1463
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1464
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1465
+ starts only after all sensors finish.
1295
1466
 
1467
+ Parameters
1468
+ ----------
1469
+ timeout : int
1470
+ Time, in seconds before the task times out and fails. (Default: 3600)
1471
+ poke_interval : int
1472
+ Time in seconds that the job should wait in between each try. (Default: 60)
1473
+ mode : str
1474
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1475
+ exponential_backoff : bool
1476
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1477
+ pool : str
1478
+ the slot pool this task should run in,
1479
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1480
+ soft_fail : bool
1481
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1482
+ name : str
1483
+ Name of the sensor on Airflow
1484
+ description : str
1485
+ Description of sensor in the Airflow UI
1486
+ bucket_key : Union[str, List[str]]
1487
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1488
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1489
+ bucket_name : str
1490
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1491
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1492
+ wildcard_match : bool
1493
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1494
+ aws_conn_id : str
1495
+ a reference to the s3 connection on Airflow. (Default: None)
1496
+ verify : bool
1497
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1498
+ """
1499
+ ...
1500
+
1501
+ @typing.overload
1502
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1503
+ """
1504
+ Specifies the PyPI packages for all steps of the flow.
1505
+
1506
+ Use `@pypi_base` to set common packages required by all
1507
+ steps and use `@pypi` to specify step-specific overrides.
1296
1508
  Parameters
1297
1509
  ----------
1298
1510
  packages : Dict[str, str], default: {}
1299
- Packages to use for this step. The key is the name of the package
1511
+ Packages to use for this flow. The key is the name of the package
1300
1512
  and the value is the version to use.
1301
1513
  python : str, optional, default: None
1302
1514
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
@@ -1305,26 +1517,19 @@ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] =
1305
1517
  ...
1306
1518
 
1307
1519
  @typing.overload
1308
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1309
- ...
1310
-
1311
- @typing.overload
1312
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1520
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1313
1521
  ...
1314
1522
 
1315
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1523
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1316
1524
  """
1317
- Specifies the PyPI packages for the step.
1525
+ Specifies the PyPI packages for all steps of the flow.
1318
1526
 
1319
- Information in this decorator will augment any
1320
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1321
- you can use `@pypi_base` to set packages required by all
1527
+ Use `@pypi_base` to set common packages required by all
1322
1528
  steps and use `@pypi` to specify step-specific overrides.
1323
-
1324
1529
  Parameters
1325
1530
  ----------
1326
1531
  packages : Dict[str, str], default: {}
1327
- Packages to use for this step. The key is the name of the package
1532
+ Packages to use for this flow. The key is the name of the package
1328
1533
  and the value is the version to use.
1329
1534
  python : str, optional, default: None
1330
1535
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
@@ -1333,19 +1538,17 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1333
1538
  ...
1334
1539
 
1335
1540
  @typing.overload
1336
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1541
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1337
1542
  """
1338
- Specifies the Conda environment for the step.
1543
+ Specifies the Conda environment for all steps of the flow.
1339
1544
 
1340
- Information in this decorator will augment any
1341
- attributes set in the `@conda_base` flow-level decorator. Hence,
1342
- you can use `@conda_base` to set packages required by all
1343
- steps and use `@conda` to specify step-specific overrides.
1545
+ Use `@conda_base` to set common libraries required by all
1546
+ steps and use `@conda` to specify step-specific additions.
1344
1547
 
1345
1548
  Parameters
1346
1549
  ----------
1347
1550
  packages : Dict[str, str], default {}
1348
- Packages to use for this step. The key is the name of the package
1551
+ Packages to use for this flow. The key is the name of the package
1349
1552
  and the value is the version to use.
1350
1553
  libraries : Dict[str, str], default {}
1351
1554
  Supported for backward compatibility. When used with packages, packages will take precedence.
@@ -1353,31 +1556,25 @@ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, s
1353
1556
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1354
1557
  that the version used will correspond to the version of the Python interpreter used to start the run.
1355
1558
  disabled : bool, default False
1356
- If set to True, disables @conda.
1559
+ If set to True, disables Conda.
1357
1560
  """
1358
1561
  ...
1359
1562
 
1360
1563
  @typing.overload
1361
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1362
- ...
1363
-
1364
- @typing.overload
1365
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1564
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1366
1565
  ...
1367
1566
 
1368
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1567
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1369
1568
  """
1370
- Specifies the Conda environment for the step.
1569
+ Specifies the Conda environment for all steps of the flow.
1371
1570
 
1372
- Information in this decorator will augment any
1373
- attributes set in the `@conda_base` flow-level decorator. Hence,
1374
- you can use `@conda_base` to set packages required by all
1375
- steps and use `@conda` to specify step-specific overrides.
1571
+ Use `@conda_base` to set common libraries required by all
1572
+ steps and use `@conda` to specify step-specific additions.
1376
1573
 
1377
1574
  Parameters
1378
1575
  ----------
1379
1576
  packages : Dict[str, str], default {}
1380
- Packages to use for this step. The key is the name of the package
1577
+ Packages to use for this flow. The key is the name of the package
1381
1578
  and the value is the version to use.
1382
1579
  libraries : Dict[str, str], default {}
1383
1580
  Supported for backward compatibility. When used with packages, packages will take precedence.
@@ -1385,7 +1582,49 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1385
1582
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1386
1583
  that the version used will correspond to the version of the Python interpreter used to start the run.
1387
1584
  disabled : bool, default False
1388
- If set to True, disables @conda.
1585
+ If set to True, disables Conda.
1586
+ """
1587
+ ...
1588
+
1589
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1590
+ """
1591
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1592
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1593
+
1594
+ Parameters
1595
+ ----------
1596
+ timeout : int
1597
+ Time, in seconds before the task times out and fails. (Default: 3600)
1598
+ poke_interval : int
1599
+ Time in seconds that the job should wait in between each try. (Default: 60)
1600
+ mode : str
1601
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1602
+ exponential_backoff : bool
1603
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1604
+ pool : str
1605
+ the slot pool this task should run in,
1606
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1607
+ soft_fail : bool
1608
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1609
+ name : str
1610
+ Name of the sensor on Airflow
1611
+ description : str
1612
+ Description of sensor in the Airflow UI
1613
+ external_dag_id : str
1614
+ The dag_id that contains the task you want to wait for.
1615
+ external_task_ids : List[str]
1616
+ The list of task_ids that you want to wait for.
1617
+ If None (default value) the sensor waits for the DAG. (Default: None)
1618
+ allowed_states : List[str]
1619
+ Iterable of allowed states, (Default: ['success'])
1620
+ failed_states : List[str]
1621
+ Iterable of failed or dis-allowed states. (Default: None)
1622
+ execution_delta : datetime.timedelta
1623
+ time difference with the previous execution to look at,
1624
+ the default is the same logical date as the current task or DAG. (Default: None)
1625
+ check_existence: bool
1626
+ Set to True to check if the external task exists or check if
1627
+ the DAG to wait for exists. (Default: True)
1389
1628
  """
1390
1629
  ...
1391
1630
 
@@ -1484,105 +1723,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1484
1723
  """
1485
1724
  ...
1486
1725
 
1487
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1488
- """
1489
- Specifies what flows belong to the same project.
1490
-
1491
- A project-specific namespace is created for all flows that
1492
- use the same `@project(name)`.
1493
-
1494
- Parameters
1495
- ----------
1496
- name : str
1497
- Project name. Make sure that the name is unique amongst all
1498
- projects that use the same production scheduler. The name may
1499
- contain only lowercase alphanumeric characters and underscores.
1500
-
1501
-
1502
- """
1503
- ...
1504
-
1505
- @typing.overload
1506
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1507
- """
1508
- Specifies the PyPI packages for all steps of the flow.
1509
-
1510
- Use `@pypi_base` to set common packages required by all
1511
- steps and use `@pypi` to specify step-specific overrides.
1512
- Parameters
1513
- ----------
1514
- packages : Dict[str, str], default: {}
1515
- Packages to use for this flow. The key is the name of the package
1516
- and the value is the version to use.
1517
- python : str, optional, default: None
1518
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1519
- that the version used will correspond to the version of the Python interpreter used to start the run.
1520
- """
1521
- ...
1522
-
1523
- @typing.overload
1524
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1525
- ...
1526
-
1527
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1528
- """
1529
- Specifies the PyPI packages for all steps of the flow.
1530
-
1531
- Use `@pypi_base` to set common packages required by all
1532
- steps and use `@pypi` to specify step-specific overrides.
1533
- Parameters
1534
- ----------
1535
- packages : Dict[str, str], default: {}
1536
- Packages to use for this flow. The key is the name of the package
1537
- and the value is the version to use.
1538
- python : str, optional, default: None
1539
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1540
- that the version used will correspond to the version of the Python interpreter used to start the run.
1541
- """
1542
- ...
1543
-
1544
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1545
- """
1546
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1547
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1548
-
1549
- Parameters
1550
- ----------
1551
- timeout : int
1552
- Time, in seconds before the task times out and fails. (Default: 3600)
1553
- poke_interval : int
1554
- Time in seconds that the job should wait in between each try. (Default: 60)
1555
- mode : str
1556
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1557
- exponential_backoff : bool
1558
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1559
- pool : str
1560
- the slot pool this task should run in,
1561
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1562
- soft_fail : bool
1563
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1564
- name : str
1565
- Name of the sensor on Airflow
1566
- description : str
1567
- Description of sensor in the Airflow UI
1568
- external_dag_id : str
1569
- The dag_id that contains the task you want to wait for.
1570
- external_task_ids : List[str]
1571
- The list of task_ids that you want to wait for.
1572
- If None (default value) the sensor waits for the DAG. (Default: None)
1573
- allowed_states : List[str]
1574
- Iterable of allowed states, (Default: ['success'])
1575
- failed_states : List[str]
1576
- Iterable of failed or dis-allowed states. (Default: None)
1577
- execution_delta : datetime.timedelta
1578
- time difference with the previous execution to look at,
1579
- the default is the same logical date as the current task or DAG. (Default: None)
1580
- check_existence: bool
1581
- Set to True to check if the external task exists or check if
1582
- the DAG to wait for exists. (Default: True)
1583
- """
1584
- ...
1585
-
1586
1726
  @typing.overload
1587
1727
  def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1588
1728
  """
@@ -1686,146 +1826,6 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1686
1826
  """
1687
1827
  ...
1688
1828
 
1689
- @typing.overload
1690
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1691
- """
1692
- Specifies the times when the flow should be run when running on a
1693
- production scheduler.
1694
-
1695
- Parameters
1696
- ----------
1697
- hourly : bool, default False
1698
- Run the workflow hourly.
1699
- daily : bool, default True
1700
- Run the workflow daily.
1701
- weekly : bool, default False
1702
- Run the workflow weekly.
1703
- cron : str, optional, default None
1704
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1705
- specified by this expression.
1706
- timezone : str, optional, default None
1707
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1708
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1709
- """
1710
- ...
1711
-
1712
- @typing.overload
1713
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1714
- ...
1715
-
1716
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1717
- """
1718
- Specifies the times when the flow should be run when running on a
1719
- production scheduler.
1720
-
1721
- Parameters
1722
- ----------
1723
- hourly : bool, default False
1724
- Run the workflow hourly.
1725
- daily : bool, default True
1726
- Run the workflow daily.
1727
- weekly : bool, default False
1728
- Run the workflow weekly.
1729
- cron : str, optional, default None
1730
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1731
- specified by this expression.
1732
- timezone : str, optional, default None
1733
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1734
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1735
- """
1736
- ...
1737
-
1738
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1739
- """
1740
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1741
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1742
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1743
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1744
- starts only after all sensors finish.
1745
-
1746
- Parameters
1747
- ----------
1748
- timeout : int
1749
- Time, in seconds before the task times out and fails. (Default: 3600)
1750
- poke_interval : int
1751
- Time in seconds that the job should wait in between each try. (Default: 60)
1752
- mode : str
1753
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1754
- exponential_backoff : bool
1755
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1756
- pool : str
1757
- the slot pool this task should run in,
1758
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1759
- soft_fail : bool
1760
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1761
- name : str
1762
- Name of the sensor on Airflow
1763
- description : str
1764
- Description of sensor in the Airflow UI
1765
- bucket_key : Union[str, List[str]]
1766
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1767
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1768
- bucket_name : str
1769
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1770
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1771
- wildcard_match : bool
1772
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1773
- aws_conn_id : str
1774
- a reference to the s3 connection on Airflow. (Default: None)
1775
- verify : bool
1776
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1777
- """
1778
- ...
1779
-
1780
- @typing.overload
1781
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1782
- """
1783
- Specifies the Conda environment for all steps of the flow.
1784
-
1785
- Use `@conda_base` to set common libraries required by all
1786
- steps and use `@conda` to specify step-specific additions.
1787
-
1788
- Parameters
1789
- ----------
1790
- packages : Dict[str, str], default {}
1791
- Packages to use for this flow. The key is the name of the package
1792
- and the value is the version to use.
1793
- libraries : Dict[str, str], default {}
1794
- Supported for backward compatibility. When used with packages, packages will take precedence.
1795
- python : str, optional, default None
1796
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1797
- that the version used will correspond to the version of the Python interpreter used to start the run.
1798
- disabled : bool, default False
1799
- If set to True, disables Conda.
1800
- """
1801
- ...
1802
-
1803
- @typing.overload
1804
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1805
- ...
1806
-
1807
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1808
- """
1809
- Specifies the Conda environment for all steps of the flow.
1810
-
1811
- Use `@conda_base` to set common libraries required by all
1812
- steps and use `@conda` to specify step-specific additions.
1813
-
1814
- Parameters
1815
- ----------
1816
- packages : Dict[str, str], default {}
1817
- Packages to use for this flow. The key is the name of the package
1818
- and the value is the version to use.
1819
- libraries : Dict[str, str], default {}
1820
- Supported for backward compatibility. When used with packages, packages will take precedence.
1821
- python : str, optional, default None
1822
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1823
- that the version used will correspond to the version of the Python interpreter used to start the run.
1824
- disabled : bool, default False
1825
- If set to True, disables Conda.
1826
- """
1827
- ...
1828
-
1829
1829
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1830
1830
  """
1831
1831
  Switch namespace to the one provided.