metaflow-stubs 2.12.14__py2.py3-none-any.whl → 2.12.15__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. metaflow-stubs/__init__.pyi +500 -500
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +2 -2
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +2 -2
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +24 -24
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +3 -3
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +9 -7
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +7 -7
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  38. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  58. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  62. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  63. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  64. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  65. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  67. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  73. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  80. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  84. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  85. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  86. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/s3/__init__.pyi +4 -4
  90. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  91. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  93. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  94. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  95. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  96. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  99. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  112. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  113. metaflow-stubs/plugins/package_cli.pyi +2 -2
  114. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  117. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  119. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  121. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  122. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  124. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  125. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  126. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  128. metaflow-stubs/plugins/tag_cli.pyi +5 -5
  129. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  130. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  131. metaflow-stubs/procpoll.pyi +2 -2
  132. metaflow-stubs/pylint_wrapper.pyi +2 -2
  133. metaflow-stubs/runner/__init__.pyi +2 -2
  134. metaflow-stubs/runner/deployer.pyi +3 -3
  135. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  136. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  137. metaflow-stubs/runner/nbrun.pyi +2 -2
  138. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  139. metaflow-stubs/runner/utils.pyi +2 -2
  140. metaflow-stubs/system/__init__.pyi +3 -3
  141. metaflow-stubs/system/system_logger.pyi +2 -2
  142. metaflow-stubs/system/system_monitor.pyi +3 -3
  143. metaflow-stubs/tagging_util.pyi +2 -2
  144. metaflow-stubs/tuple_util.pyi +2 -2
  145. metaflow-stubs/version.pyi +2 -2
  146. {metaflow_stubs-2.12.14.dist-info → metaflow_stubs-2.12.15.dist-info}/METADATA +2 -2
  147. metaflow_stubs-2.12.15.dist-info/RECORD +150 -0
  148. metaflow_stubs-2.12.14.dist-info/RECORD +0 -150
  149. {metaflow_stubs-2.12.14.dist-info → metaflow_stubs-2.12.15.dist-info}/WHEEL +0 -0
  150. {metaflow_stubs-2.12.14.dist-info → metaflow_stubs-2.12.15.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.14 #
4
- # Generated on 2024-08-22T15:17:51.800458 #
3
+ # MF version: 2.12.15 #
4
+ # Generated on 2024-08-22T20:18:55.258441 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.metaflow_current
12
- import metaflow.events
13
- import metaflow.plugins.datatools.s3.s3
11
+ import metaflow.runner.metaflow_runner
14
12
  import metaflow.parameters
15
13
  import io
16
- import metaflow._vendor.click.types
17
- import datetime
18
14
  import metaflow.flowspec
19
- import metaflow.runner.metaflow_runner
20
- import metaflow.datastore.inputs
21
15
  import metaflow.client.core
16
+ import metaflow.events
17
+ import datetime
18
+ import metaflow.datastore.inputs
22
19
  import typing
20
+ import metaflow.plugins.datatools.s3.s3
21
+ import metaflow._vendor.click.types
22
+ import metaflow.metaflow_current
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -727,6 +727,170 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
727
727
  """
728
728
  ...
729
729
 
730
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
731
+ """
732
+ Specifies that this step should execute on Kubernetes.
733
+
734
+ Parameters
735
+ ----------
736
+ cpu : int, default 1
737
+ Number of CPUs required for this step. If `@resources` is
738
+ also present, the maximum value from all decorators is used.
739
+ memory : int, default 4096
740
+ Memory size (in MB) required for this step. If
741
+ `@resources` is also present, the maximum value from all decorators is
742
+ used.
743
+ disk : int, default 10240
744
+ Disk size (in MB) required for this step. If
745
+ `@resources` is also present, the maximum value from all decorators is
746
+ used.
747
+ image : str, optional, default None
748
+ Docker image to use when launching on Kubernetes. If not specified, and
749
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
750
+ not, a default Docker image mapping to the current version of Python is used.
751
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
752
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
753
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
754
+ Kubernetes service account to use when launching pod in Kubernetes.
755
+ secrets : List[str], optional, default None
756
+ Kubernetes secrets to use when launching pod in Kubernetes. These
757
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
758
+ in Metaflow configuration.
759
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
760
+ Kubernetes namespace to use when launching pod in Kubernetes.
761
+ gpu : int, optional, default None
762
+ Number of GPUs required for this step. A value of zero implies that
763
+ the scheduled node should not have GPUs.
764
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
765
+ The vendor of the GPUs to be used for this step.
766
+ tolerations : List[str], default []
767
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
768
+ Kubernetes tolerations to use when launching pod in Kubernetes.
769
+ use_tmpfs : bool, default False
770
+ This enables an explicit tmpfs mount for this step.
771
+ tmpfs_tempdir : bool, default True
772
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
773
+ tmpfs_size : int, optional, default: None
774
+ The value for the size (in MiB) of the tmpfs mount for this step.
775
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
776
+ memory allocated for this step.
777
+ tmpfs_path : str, optional, default /metaflow_temp
778
+ Path to tmpfs mount for this step.
779
+ persistent_volume_claims : Dict[str, str], optional, default None
780
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
781
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
782
+ shared_memory: int, optional
783
+ Shared memory size (in MiB) required for this step
784
+ port: int, optional
785
+ Port number to specify in the Kubernetes job object
786
+ compute_pool : str, optional, default None
787
+ Compute pool to be used for for this step.
788
+ If not specified, any accessible compute pool within the perimeter is used.
789
+ """
790
+ ...
791
+
792
+ @typing.overload
793
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
794
+ """
795
+ Specifies the number of times the task corresponding
796
+ to a step needs to be retried.
797
+
798
+ This decorator is useful for handling transient errors, such as networking issues.
799
+ If your task contains operations that can't be retried safely, e.g. database updates,
800
+ it is advisable to annotate it with `@retry(times=0)`.
801
+
802
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
803
+ decorator will execute a no-op task after all retries have been exhausted,
804
+ ensuring that the flow execution can continue.
805
+
806
+ Parameters
807
+ ----------
808
+ times : int, default 3
809
+ Number of times to retry this task.
810
+ minutes_between_retries : int, default 2
811
+ Number of minutes between retries.
812
+ """
813
+ ...
814
+
815
+ @typing.overload
816
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
817
+ ...
818
+
819
+ @typing.overload
820
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
821
+ ...
822
+
823
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
824
+ """
825
+ Specifies the number of times the task corresponding
826
+ to a step needs to be retried.
827
+
828
+ This decorator is useful for handling transient errors, such as networking issues.
829
+ If your task contains operations that can't be retried safely, e.g. database updates,
830
+ it is advisable to annotate it with `@retry(times=0)`.
831
+
832
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
833
+ decorator will execute a no-op task after all retries have been exhausted,
834
+ ensuring that the flow execution can continue.
835
+
836
+ Parameters
837
+ ----------
838
+ times : int, default 3
839
+ Number of times to retry this task.
840
+ minutes_between_retries : int, default 2
841
+ Number of minutes between retries.
842
+ """
843
+ ...
844
+
845
+ @typing.overload
846
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
847
+ """
848
+ Specifies that the step will success under all circumstances.
849
+
850
+ The decorator will create an optional artifact, specified by `var`, which
851
+ contains the exception raised. You can use it to detect the presence
852
+ of errors, indicating that all happy-path artifacts produced by the step
853
+ are missing.
854
+
855
+ Parameters
856
+ ----------
857
+ var : str, optional, default None
858
+ Name of the artifact in which to store the caught exception.
859
+ If not specified, the exception is not stored.
860
+ print_exception : bool, default True
861
+ Determines whether or not the exception is printed to
862
+ stdout when caught.
863
+ """
864
+ ...
865
+
866
+ @typing.overload
867
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
868
+ ...
869
+
870
+ @typing.overload
871
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
872
+ ...
873
+
874
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
875
+ """
876
+ Specifies that the step will success under all circumstances.
877
+
878
+ The decorator will create an optional artifact, specified by `var`, which
879
+ contains the exception raised. You can use it to detect the presence
880
+ of errors, indicating that all happy-path artifacts produced by the step
881
+ are missing.
882
+
883
+ Parameters
884
+ ----------
885
+ var : str, optional, default None
886
+ Name of the artifact in which to store the caught exception.
887
+ If not specified, the exception is not stored.
888
+ print_exception : bool, default True
889
+ Determines whether or not the exception is printed to
890
+ stdout when caught.
891
+ """
892
+ ...
893
+
730
894
  @typing.overload
731
895
  def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
896
  """
@@ -805,115 +969,57 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
805
969
  ...
806
970
 
807
971
  @typing.overload
808
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
972
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
809
973
  """
810
- Specifies the Conda environment for the step.
811
-
812
- Information in this decorator will augment any
813
- attributes set in the `@conda_base` flow-level decorator. Hence,
814
- you can use `@conda_base` to set packages required by all
815
- steps and use `@conda` to specify step-specific overrides.
974
+ Specifies environment variables to be set prior to the execution of a step.
816
975
 
817
976
  Parameters
818
977
  ----------
819
- packages : Dict[str, str], default {}
820
- Packages to use for this step. The key is the name of the package
821
- and the value is the version to use.
822
- libraries : Dict[str, str], default {}
823
- Supported for backward compatibility. When used with packages, packages will take precedence.
824
- python : str, optional, default None
825
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
826
- that the version used will correspond to the version of the Python interpreter used to start the run.
827
- disabled : bool, default False
828
- If set to True, disables @conda.
978
+ vars : Dict[str, str], default {}
979
+ Dictionary of environment variables to set.
829
980
  """
830
981
  ...
831
982
 
832
983
  @typing.overload
833
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
984
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
834
985
  ...
835
986
 
836
987
  @typing.overload
837
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
988
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
838
989
  ...
839
990
 
840
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
991
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
841
992
  """
842
- Specifies the Conda environment for the step.
843
-
844
- Information in this decorator will augment any
845
- attributes set in the `@conda_base` flow-level decorator. Hence,
846
- you can use `@conda_base` to set packages required by all
847
- steps and use `@conda` to specify step-specific overrides.
993
+ Specifies environment variables to be set prior to the execution of a step.
848
994
 
849
995
  Parameters
850
996
  ----------
851
- packages : Dict[str, str], default {}
852
- Packages to use for this step. The key is the name of the package
853
- and the value is the version to use.
854
- libraries : Dict[str, str], default {}
855
- Supported for backward compatibility. When used with packages, packages will take precedence.
856
- python : str, optional, default None
857
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
858
- that the version used will correspond to the version of the Python interpreter used to start the run.
859
- disabled : bool, default False
860
- If set to True, disables @conda.
997
+ vars : Dict[str, str], default {}
998
+ Dictionary of environment variables to set.
861
999
  """
862
1000
  ...
863
1001
 
864
1002
  @typing.overload
865
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1003
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
866
1004
  """
867
- Creates a human-readable report, a Metaflow Card, after this step completes.
868
-
869
- Note that you may add multiple `@card` decorators in a step with different parameters.
870
-
871
- Parameters
872
- ----------
873
- type : str, default 'default'
874
- Card type.
875
- id : str, optional, default None
876
- If multiple cards are present, use this id to identify this card.
877
- options : Dict[str, Any], default {}
878
- Options passed to the card. The contents depend on the card type.
879
- timeout : int, default 45
880
- Interrupt reporting if it takes more than this many seconds.
881
-
882
-
1005
+ Decorator prototype for all step decorators. This function gets specialized
1006
+ and imported for all decorators types by _import_plugin_decorators().
883
1007
  """
884
1008
  ...
885
1009
 
886
1010
  @typing.overload
887
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1011
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
888
1012
  ...
889
1013
 
890
- @typing.overload
891
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1014
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1015
+ """
1016
+ Decorator prototype for all step decorators. This function gets specialized
1017
+ and imported for all decorators types by _import_plugin_decorators().
1018
+ """
892
1019
  ...
893
1020
 
894
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
895
- """
896
- Creates a human-readable report, a Metaflow Card, after this step completes.
897
-
898
- Note that you may add multiple `@card` decorators in a step with different parameters.
899
-
900
- Parameters
901
- ----------
902
- type : str, default 'default'
903
- Card type.
904
- id : str, optional, default None
905
- If multiple cards are present, use this id to identify this card.
906
- options : Dict[str, Any], default {}
907
- Options passed to the card. The contents depend on the card type.
908
- timeout : int, default 45
909
- Interrupt reporting if it takes more than this many seconds.
910
-
911
-
912
- """
913
- ...
914
-
915
- @typing.overload
916
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1021
+ @typing.overload
1022
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
917
1023
  """
918
1024
  Specifies a timeout for your step.
919
1025
 
@@ -969,6 +1075,57 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
969
1075
  """
970
1076
  ...
971
1077
 
1078
+ @typing.overload
1079
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1080
+ """
1081
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1082
+
1083
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1084
+
1085
+ Parameters
1086
+ ----------
1087
+ type : str, default 'default'
1088
+ Card type.
1089
+ id : str, optional, default None
1090
+ If multiple cards are present, use this id to identify this card.
1091
+ options : Dict[str, Any], default {}
1092
+ Options passed to the card. The contents depend on the card type.
1093
+ timeout : int, default 45
1094
+ Interrupt reporting if it takes more than this many seconds.
1095
+
1096
+
1097
+ """
1098
+ ...
1099
+
1100
+ @typing.overload
1101
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1102
+ ...
1103
+
1104
+ @typing.overload
1105
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1106
+ ...
1107
+
1108
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1109
+ """
1110
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1111
+
1112
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1113
+
1114
+ Parameters
1115
+ ----------
1116
+ type : str, default 'default'
1117
+ Card type.
1118
+ id : str, optional, default None
1119
+ If multiple cards are present, use this id to identify this card.
1120
+ options : Dict[str, Any], default {}
1121
+ Options passed to the card. The contents depend on the card type.
1122
+ timeout : int, default 45
1123
+ Interrupt reporting if it takes more than this many seconds.
1124
+
1125
+
1126
+ """
1127
+ ...
1128
+
972
1129
  @typing.overload
973
1130
  def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
974
1131
  """
@@ -1117,340 +1274,342 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1117
1274
  ...
1118
1275
 
1119
1276
  @typing.overload
1120
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1121
- """
1122
- Specifies secrets to be retrieved and injected as environment variables prior to
1123
- the execution of a step.
1124
-
1125
- Parameters
1126
- ----------
1127
- sources : List[Union[str, Dict[str, Any]]], default: []
1128
- List of secret specs, defining how the secrets are to be retrieved
1129
- """
1130
- ...
1131
-
1132
- @typing.overload
1133
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1134
- ...
1135
-
1136
- @typing.overload
1137
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1138
- ...
1139
-
1140
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1141
- """
1142
- Specifies secrets to be retrieved and injected as environment variables prior to
1143
- the execution of a step.
1144
-
1145
- Parameters
1146
- ----------
1147
- sources : List[Union[str, Dict[str, Any]]], default: []
1148
- List of secret specs, defining how the secrets are to be retrieved
1149
- """
1150
- ...
1151
-
1152
- @typing.overload
1153
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1277
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1154
1278
  """
1155
- Specifies the number of times the task corresponding
1156
- to a step needs to be retried.
1157
-
1158
- This decorator is useful for handling transient errors, such as networking issues.
1159
- If your task contains operations that can't be retried safely, e.g. database updates,
1160
- it is advisable to annotate it with `@retry(times=0)`.
1279
+ Specifies the PyPI packages for the step.
1161
1280
 
1162
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1163
- decorator will execute a no-op task after all retries have been exhausted,
1164
- ensuring that the flow execution can continue.
1281
+ Information in this decorator will augment any
1282
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1283
+ you can use `@pypi_base` to set packages required by all
1284
+ steps and use `@pypi` to specify step-specific overrides.
1165
1285
 
1166
1286
  Parameters
1167
1287
  ----------
1168
- times : int, default 3
1169
- Number of times to retry this task.
1170
- minutes_between_retries : int, default 2
1171
- Number of minutes between retries.
1288
+ packages : Dict[str, str], default: {}
1289
+ Packages to use for this step. The key is the name of the package
1290
+ and the value is the version to use.
1291
+ python : str, optional, default: None
1292
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1293
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1172
1294
  """
1173
1295
  ...
1174
1296
 
1175
1297
  @typing.overload
1176
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1298
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1177
1299
  ...
1178
1300
 
1179
1301
  @typing.overload
1180
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1302
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1181
1303
  ...
1182
1304
 
1183
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1305
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1184
1306
  """
1185
- Specifies the number of times the task corresponding
1186
- to a step needs to be retried.
1187
-
1188
- This decorator is useful for handling transient errors, such as networking issues.
1189
- If your task contains operations that can't be retried safely, e.g. database updates,
1190
- it is advisable to annotate it with `@retry(times=0)`.
1307
+ Specifies the PyPI packages for the step.
1191
1308
 
1192
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1193
- decorator will execute a no-op task after all retries have been exhausted,
1194
- ensuring that the flow execution can continue.
1309
+ Information in this decorator will augment any
1310
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1311
+ you can use `@pypi_base` to set packages required by all
1312
+ steps and use `@pypi` to specify step-specific overrides.
1195
1313
 
1196
1314
  Parameters
1197
1315
  ----------
1198
- times : int, default 3
1199
- Number of times to retry this task.
1200
- minutes_between_retries : int, default 2
1201
- Number of minutes between retries.
1316
+ packages : Dict[str, str], default: {}
1317
+ Packages to use for this step. The key is the name of the package
1318
+ and the value is the version to use.
1319
+ python : str, optional, default: None
1320
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1321
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1202
1322
  """
1203
1323
  ...
1204
1324
 
1205
1325
  @typing.overload
1206
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1326
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1207
1327
  """
1208
- Specifies environment variables to be set prior to the execution of a step.
1328
+ Specifies secrets to be retrieved and injected as environment variables prior to
1329
+ the execution of a step.
1209
1330
 
1210
1331
  Parameters
1211
1332
  ----------
1212
- vars : Dict[str, str], default {}
1213
- Dictionary of environment variables to set.
1333
+ sources : List[Union[str, Dict[str, Any]]], default: []
1334
+ List of secret specs, defining how the secrets are to be retrieved
1214
1335
  """
1215
1336
  ...
1216
1337
 
1217
1338
  @typing.overload
1218
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1339
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1219
1340
  ...
1220
1341
 
1221
1342
  @typing.overload
1222
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1343
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1223
1344
  ...
1224
1345
 
1225
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1346
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1226
1347
  """
1227
- Specifies environment variables to be set prior to the execution of a step.
1348
+ Specifies secrets to be retrieved and injected as environment variables prior to
1349
+ the execution of a step.
1228
1350
 
1229
1351
  Parameters
1230
1352
  ----------
1231
- vars : Dict[str, str], default {}
1232
- Dictionary of environment variables to set.
1353
+ sources : List[Union[str, Dict[str, Any]]], default: []
1354
+ List of secret specs, defining how the secrets are to be retrieved
1233
1355
  """
1234
1356
  ...
1235
1357
 
1236
1358
  @typing.overload
1237
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1359
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1238
1360
  """
1239
- Specifies the PyPI packages for the step.
1361
+ Specifies the Conda environment for the step.
1240
1362
 
1241
1363
  Information in this decorator will augment any
1242
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1243
- you can use `@pypi_base` to set packages required by all
1244
- steps and use `@pypi` to specify step-specific overrides.
1364
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1365
+ you can use `@conda_base` to set packages required by all
1366
+ steps and use `@conda` to specify step-specific overrides.
1245
1367
 
1246
1368
  Parameters
1247
1369
  ----------
1248
- packages : Dict[str, str], default: {}
1370
+ packages : Dict[str, str], default {}
1249
1371
  Packages to use for this step. The key is the name of the package
1250
1372
  and the value is the version to use.
1251
- python : str, optional, default: None
1373
+ libraries : Dict[str, str], default {}
1374
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1375
+ python : str, optional, default None
1252
1376
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1253
1377
  that the version used will correspond to the version of the Python interpreter used to start the run.
1378
+ disabled : bool, default False
1379
+ If set to True, disables @conda.
1254
1380
  """
1255
1381
  ...
1256
1382
 
1257
1383
  @typing.overload
1258
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1384
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1259
1385
  ...
1260
1386
 
1261
1387
  @typing.overload
1262
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1388
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1263
1389
  ...
1264
1390
 
1265
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1391
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1266
1392
  """
1267
- Specifies the PyPI packages for the step.
1393
+ Specifies the Conda environment for the step.
1268
1394
 
1269
1395
  Information in this decorator will augment any
1270
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1271
- you can use `@pypi_base` to set packages required by all
1272
- steps and use `@pypi` to specify step-specific overrides.
1396
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1397
+ you can use `@conda_base` to set packages required by all
1398
+ steps and use `@conda` to specify step-specific overrides.
1273
1399
 
1274
1400
  Parameters
1275
1401
  ----------
1276
- packages : Dict[str, str], default: {}
1402
+ packages : Dict[str, str], default {}
1277
1403
  Packages to use for this step. The key is the name of the package
1278
1404
  and the value is the version to use.
1279
- python : str, optional, default: None
1405
+ libraries : Dict[str, str], default {}
1406
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1407
+ python : str, optional, default None
1280
1408
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1281
1409
  that the version used will correspond to the version of the Python interpreter used to start the run.
1410
+ disabled : bool, default False
1411
+ If set to True, disables @conda.
1282
1412
  """
1283
1413
  ...
1284
1414
 
1285
1415
  @typing.overload
1286
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1416
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1287
1417
  """
1288
- Specifies that the step will success under all circumstances.
1289
-
1290
- The decorator will create an optional artifact, specified by `var`, which
1291
- contains the exception raised. You can use it to detect the presence
1292
- of errors, indicating that all happy-path artifacts produced by the step
1293
- are missing.
1418
+ Specifies the times when the flow should be run when running on a
1419
+ production scheduler.
1294
1420
 
1295
1421
  Parameters
1296
1422
  ----------
1297
- var : str, optional, default None
1298
- Name of the artifact in which to store the caught exception.
1299
- If not specified, the exception is not stored.
1300
- print_exception : bool, default True
1301
- Determines whether or not the exception is printed to
1302
- stdout when caught.
1423
+ hourly : bool, default False
1424
+ Run the workflow hourly.
1425
+ daily : bool, default True
1426
+ Run the workflow daily.
1427
+ weekly : bool, default False
1428
+ Run the workflow weekly.
1429
+ cron : str, optional, default None
1430
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1431
+ specified by this expression.
1432
+ timezone : str, optional, default None
1433
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1434
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1303
1435
  """
1304
1436
  ...
1305
1437
 
1306
1438
  @typing.overload
1307
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1439
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1308
1440
  ...
1309
1441
 
1310
- @typing.overload
1311
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1442
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1443
+ """
1444
+ Specifies the times when the flow should be run when running on a
1445
+ production scheduler.
1446
+
1447
+ Parameters
1448
+ ----------
1449
+ hourly : bool, default False
1450
+ Run the workflow hourly.
1451
+ daily : bool, default True
1452
+ Run the workflow daily.
1453
+ weekly : bool, default False
1454
+ Run the workflow weekly.
1455
+ cron : str, optional, default None
1456
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1457
+ specified by this expression.
1458
+ timezone : str, optional, default None
1459
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1460
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1461
+ """
1312
1462
  ...
1313
1463
 
1314
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1464
+ @typing.overload
1465
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1315
1466
  """
1316
- Specifies that the step will success under all circumstances.
1467
+ Specifies the flow(s) that this flow depends on.
1317
1468
 
1318
- The decorator will create an optional artifact, specified by `var`, which
1319
- contains the exception raised. You can use it to detect the presence
1320
- of errors, indicating that all happy-path artifacts produced by the step
1321
- are missing.
1469
+ ```
1470
+ @trigger_on_finish(flow='FooFlow')
1471
+ ```
1472
+ or
1473
+ ```
1474
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1475
+ ```
1476
+ This decorator respects the @project decorator and triggers the flow
1477
+ when upstream runs within the same namespace complete successfully
1478
+
1479
+ Additionally, you can specify project aware upstream flow dependencies
1480
+ by specifying the fully qualified project_flow_name.
1481
+ ```
1482
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1483
+ ```
1484
+ or
1485
+ ```
1486
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1487
+ ```
1488
+
1489
+ You can also specify just the project or project branch (other values will be
1490
+ inferred from the current project or project branch):
1491
+ ```
1492
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1493
+ ```
1494
+
1495
+ Note that `branch` is typically one of:
1496
+ - `prod`
1497
+ - `user.bob`
1498
+ - `test.my_experiment`
1499
+ - `prod.staging`
1322
1500
 
1323
1501
  Parameters
1324
1502
  ----------
1325
- var : str, optional, default None
1326
- Name of the artifact in which to store the caught exception.
1327
- If not specified, the exception is not stored.
1328
- print_exception : bool, default True
1329
- Determines whether or not the exception is printed to
1330
- stdout when caught.
1503
+ flow : Union[str, Dict[str, str]], optional, default None
1504
+ Upstream flow dependency for this flow.
1505
+ flows : List[Union[str, Dict[str, str]]], default []
1506
+ Upstream flow dependencies for this flow.
1507
+ options : Dict[str, Any], default {}
1508
+ Backend-specific configuration for tuning eventing behavior.
1509
+
1510
+
1331
1511
  """
1332
1512
  ...
1333
1513
 
1334
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1514
+ @typing.overload
1515
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1516
+ ...
1517
+
1518
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1335
1519
  """
1336
- Specifies that this step should execute on Kubernetes.
1520
+ Specifies the flow(s) that this flow depends on.
1521
+
1522
+ ```
1523
+ @trigger_on_finish(flow='FooFlow')
1524
+ ```
1525
+ or
1526
+ ```
1527
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1528
+ ```
1529
+ This decorator respects the @project decorator and triggers the flow
1530
+ when upstream runs within the same namespace complete successfully
1531
+
1532
+ Additionally, you can specify project aware upstream flow dependencies
1533
+ by specifying the fully qualified project_flow_name.
1534
+ ```
1535
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1536
+ ```
1537
+ or
1538
+ ```
1539
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1540
+ ```
1541
+
1542
+ You can also specify just the project or project branch (other values will be
1543
+ inferred from the current project or project branch):
1544
+ ```
1545
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1546
+ ```
1547
+
1548
+ Note that `branch` is typically one of:
1549
+ - `prod`
1550
+ - `user.bob`
1551
+ - `test.my_experiment`
1552
+ - `prod.staging`
1337
1553
 
1338
1554
  Parameters
1339
1555
  ----------
1340
- cpu : int, default 1
1341
- Number of CPUs required for this step. If `@resources` is
1342
- also present, the maximum value from all decorators is used.
1343
- memory : int, default 4096
1344
- Memory size (in MB) required for this step. If
1345
- `@resources` is also present, the maximum value from all decorators is
1346
- used.
1347
- disk : int, default 10240
1348
- Disk size (in MB) required for this step. If
1349
- `@resources` is also present, the maximum value from all decorators is
1350
- used.
1351
- image : str, optional, default None
1352
- Docker image to use when launching on Kubernetes. If not specified, and
1353
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1354
- not, a default Docker image mapping to the current version of Python is used.
1355
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1356
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1357
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1358
- Kubernetes service account to use when launching pod in Kubernetes.
1359
- secrets : List[str], optional, default None
1360
- Kubernetes secrets to use when launching pod in Kubernetes. These
1361
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1362
- in Metaflow configuration.
1363
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1364
- Kubernetes namespace to use when launching pod in Kubernetes.
1365
- gpu : int, optional, default None
1366
- Number of GPUs required for this step. A value of zero implies that
1367
- the scheduled node should not have GPUs.
1368
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1369
- The vendor of the GPUs to be used for this step.
1370
- tolerations : List[str], default []
1371
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1372
- Kubernetes tolerations to use when launching pod in Kubernetes.
1373
- use_tmpfs : bool, default False
1374
- This enables an explicit tmpfs mount for this step.
1375
- tmpfs_tempdir : bool, default True
1376
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1377
- tmpfs_size : int, optional, default: None
1378
- The value for the size (in MiB) of the tmpfs mount for this step.
1379
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1380
- memory allocated for this step.
1381
- tmpfs_path : str, optional, default /metaflow_temp
1382
- Path to tmpfs mount for this step.
1383
- persistent_volume_claims : Dict[str, str], optional, default None
1384
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1385
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1386
- shared_memory: int, optional
1387
- Shared memory size (in MiB) required for this step
1388
- port: int, optional
1389
- Port number to specify in the Kubernetes job object
1390
- compute_pool : str, optional, default None
1391
- Compute pool to be used for for this step.
1392
- If not specified, any accessible compute pool within the perimeter is used.
1556
+ flow : Union[str, Dict[str, str]], optional, default None
1557
+ Upstream flow dependency for this flow.
1558
+ flows : List[Union[str, Dict[str, str]]], default []
1559
+ Upstream flow dependencies for this flow.
1560
+ options : Dict[str, Any], default {}
1561
+ Backend-specific configuration for tuning eventing behavior.
1562
+
1563
+
1393
1564
  """
1394
1565
  ...
1395
1566
 
1396
1567
  @typing.overload
1397
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1568
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1398
1569
  """
1399
- Decorator prototype for all step decorators. This function gets specialized
1400
- and imported for all decorators types by _import_plugin_decorators().
1570
+ Specifies the Conda environment for all steps of the flow.
1571
+
1572
+ Use `@conda_base` to set common libraries required by all
1573
+ steps and use `@conda` to specify step-specific additions.
1574
+
1575
+ Parameters
1576
+ ----------
1577
+ packages : Dict[str, str], default {}
1578
+ Packages to use for this flow. The key is the name of the package
1579
+ and the value is the version to use.
1580
+ libraries : Dict[str, str], default {}
1581
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1582
+ python : str, optional, default None
1583
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1584
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1585
+ disabled : bool, default False
1586
+ If set to True, disables Conda.
1401
1587
  """
1402
1588
  ...
1403
1589
 
1404
1590
  @typing.overload
1405
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1406
- ...
1407
-
1408
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1409
- """
1410
- Decorator prototype for all step decorators. This function gets specialized
1411
- and imported for all decorators types by _import_plugin_decorators().
1412
- """
1591
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1413
1592
  ...
1414
1593
 
1415
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1594
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1416
1595
  """
1417
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1418
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1596
+ Specifies the Conda environment for all steps of the flow.
1597
+
1598
+ Use `@conda_base` to set common libraries required by all
1599
+ steps and use `@conda` to specify step-specific additions.
1419
1600
 
1420
1601
  Parameters
1421
1602
  ----------
1422
- timeout : int
1423
- Time, in seconds before the task times out and fails. (Default: 3600)
1424
- poke_interval : int
1425
- Time in seconds that the job should wait in between each try. (Default: 60)
1426
- mode : str
1427
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1428
- exponential_backoff : bool
1429
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1430
- pool : str
1431
- the slot pool this task should run in,
1432
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1433
- soft_fail : bool
1434
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1435
- name : str
1436
- Name of the sensor on Airflow
1437
- description : str
1438
- Description of sensor in the Airflow UI
1439
- external_dag_id : str
1440
- The dag_id that contains the task you want to wait for.
1441
- external_task_ids : List[str]
1442
- The list of task_ids that you want to wait for.
1443
- If None (default value) the sensor waits for the DAG. (Default: None)
1444
- allowed_states : List[str]
1445
- Iterable of allowed states, (Default: ['success'])
1446
- failed_states : List[str]
1447
- Iterable of failed or dis-allowed states. (Default: None)
1448
- execution_delta : datetime.timedelta
1449
- time difference with the previous execution to look at,
1450
- the default is the same logical date as the current task or DAG. (Default: None)
1451
- check_existence: bool
1452
- Set to True to check if the external task exists or check if
1453
- the DAG to wait for exists. (Default: True)
1603
+ packages : Dict[str, str], default {}
1604
+ Packages to use for this flow. The key is the name of the package
1605
+ and the value is the version to use.
1606
+ libraries : Dict[str, str], default {}
1607
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1608
+ python : str, optional, default None
1609
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1610
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1611
+ disabled : bool, default False
1612
+ If set to True, disables Conda.
1454
1613
  """
1455
1614
  ...
1456
1615
 
@@ -1549,52 +1708,45 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1549
1708
  """
1550
1709
  ...
1551
1710
 
1552
- @typing.overload
1553
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1554
- """
1555
- Specifies the times when the flow should be run when running on a
1556
- production scheduler.
1557
-
1558
- Parameters
1559
- ----------
1560
- hourly : bool, default False
1561
- Run the workflow hourly.
1562
- daily : bool, default True
1563
- Run the workflow daily.
1564
- weekly : bool, default False
1565
- Run the workflow weekly.
1566
- cron : str, optional, default None
1567
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1568
- specified by this expression.
1569
- timezone : str, optional, default None
1570
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1571
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1572
- """
1573
- ...
1574
-
1575
- @typing.overload
1576
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1577
- ...
1578
-
1579
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1711
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1580
1712
  """
1581
- Specifies the times when the flow should be run when running on a
1582
- production scheduler.
1713
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1714
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1583
1715
 
1584
1716
  Parameters
1585
1717
  ----------
1586
- hourly : bool, default False
1587
- Run the workflow hourly.
1588
- daily : bool, default True
1589
- Run the workflow daily.
1590
- weekly : bool, default False
1591
- Run the workflow weekly.
1592
- cron : str, optional, default None
1593
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1594
- specified by this expression.
1595
- timezone : str, optional, default None
1596
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1597
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1718
+ timeout : int
1719
+ Time, in seconds before the task times out and fails. (Default: 3600)
1720
+ poke_interval : int
1721
+ Time in seconds that the job should wait in between each try. (Default: 60)
1722
+ mode : str
1723
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1724
+ exponential_backoff : bool
1725
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1726
+ pool : str
1727
+ the slot pool this task should run in,
1728
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1729
+ soft_fail : bool
1730
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1731
+ name : str
1732
+ Name of the sensor on Airflow
1733
+ description : str
1734
+ Description of sensor in the Airflow UI
1735
+ external_dag_id : str
1736
+ The dag_id that contains the task you want to wait for.
1737
+ external_task_ids : List[str]
1738
+ The list of task_ids that you want to wait for.
1739
+ If None (default value) the sensor waits for the DAG. (Default: None)
1740
+ allowed_states : List[str]
1741
+ Iterable of allowed states, (Default: ['success'])
1742
+ failed_states : List[str]
1743
+ Iterable of failed or dis-allowed states. (Default: None)
1744
+ execution_delta : datetime.timedelta
1745
+ time difference with the previous execution to look at,
1746
+ the default is the same logical date as the current task or DAG. (Default: None)
1747
+ check_existence: bool
1748
+ Set to True to check if the external task exists or check if
1749
+ the DAG to wait for exists. (Default: True)
1598
1750
  """
1599
1751
  ...
1600
1752
 
@@ -1658,158 +1810,6 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1658
1810
  """
1659
1811
  ...
1660
1812
 
1661
- @typing.overload
1662
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1663
- """
1664
- Specifies the Conda environment for all steps of the flow.
1665
-
1666
- Use `@conda_base` to set common libraries required by all
1667
- steps and use `@conda` to specify step-specific additions.
1668
-
1669
- Parameters
1670
- ----------
1671
- packages : Dict[str, str], default {}
1672
- Packages to use for this flow. The key is the name of the package
1673
- and the value is the version to use.
1674
- libraries : Dict[str, str], default {}
1675
- Supported for backward compatibility. When used with packages, packages will take precedence.
1676
- python : str, optional, default None
1677
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1678
- that the version used will correspond to the version of the Python interpreter used to start the run.
1679
- disabled : bool, default False
1680
- If set to True, disables Conda.
1681
- """
1682
- ...
1683
-
1684
- @typing.overload
1685
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1686
- ...
1687
-
1688
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1689
- """
1690
- Specifies the Conda environment for all steps of the flow.
1691
-
1692
- Use `@conda_base` to set common libraries required by all
1693
- steps and use `@conda` to specify step-specific additions.
1694
-
1695
- Parameters
1696
- ----------
1697
- packages : Dict[str, str], default {}
1698
- Packages to use for this flow. The key is the name of the package
1699
- and the value is the version to use.
1700
- libraries : Dict[str, str], default {}
1701
- Supported for backward compatibility. When used with packages, packages will take precedence.
1702
- python : str, optional, default None
1703
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1704
- that the version used will correspond to the version of the Python interpreter used to start the run.
1705
- disabled : bool, default False
1706
- If set to True, disables Conda.
1707
- """
1708
- ...
1709
-
1710
- @typing.overload
1711
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1712
- """
1713
- Specifies the flow(s) that this flow depends on.
1714
-
1715
- ```
1716
- @trigger_on_finish(flow='FooFlow')
1717
- ```
1718
- or
1719
- ```
1720
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1721
- ```
1722
- This decorator respects the @project decorator and triggers the flow
1723
- when upstream runs within the same namespace complete successfully
1724
-
1725
- Additionally, you can specify project aware upstream flow dependencies
1726
- by specifying the fully qualified project_flow_name.
1727
- ```
1728
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1729
- ```
1730
- or
1731
- ```
1732
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1733
- ```
1734
-
1735
- You can also specify just the project or project branch (other values will be
1736
- inferred from the current project or project branch):
1737
- ```
1738
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1739
- ```
1740
-
1741
- Note that `branch` is typically one of:
1742
- - `prod`
1743
- - `user.bob`
1744
- - `test.my_experiment`
1745
- - `prod.staging`
1746
-
1747
- Parameters
1748
- ----------
1749
- flow : Union[str, Dict[str, str]], optional, default None
1750
- Upstream flow dependency for this flow.
1751
- flows : List[Union[str, Dict[str, str]]], default []
1752
- Upstream flow dependencies for this flow.
1753
- options : Dict[str, Any], default {}
1754
- Backend-specific configuration for tuning eventing behavior.
1755
-
1756
-
1757
- """
1758
- ...
1759
-
1760
- @typing.overload
1761
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1762
- ...
1763
-
1764
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1765
- """
1766
- Specifies the flow(s) that this flow depends on.
1767
-
1768
- ```
1769
- @trigger_on_finish(flow='FooFlow')
1770
- ```
1771
- or
1772
- ```
1773
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1774
- ```
1775
- This decorator respects the @project decorator and triggers the flow
1776
- when upstream runs within the same namespace complete successfully
1777
-
1778
- Additionally, you can specify project aware upstream flow dependencies
1779
- by specifying the fully qualified project_flow_name.
1780
- ```
1781
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1782
- ```
1783
- or
1784
- ```
1785
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1786
- ```
1787
-
1788
- You can also specify just the project or project branch (other values will be
1789
- inferred from the current project or project branch):
1790
- ```
1791
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1792
- ```
1793
-
1794
- Note that `branch` is typically one of:
1795
- - `prod`
1796
- - `user.bob`
1797
- - `test.my_experiment`
1798
- - `prod.staging`
1799
-
1800
- Parameters
1801
- ----------
1802
- flow : Union[str, Dict[str, str]], optional, default None
1803
- Upstream flow dependency for this flow.
1804
- flows : List[Union[str, Dict[str, str]]], default []
1805
- Upstream flow dependencies for this flow.
1806
- options : Dict[str, Any], default {}
1807
- Backend-specific configuration for tuning eventing behavior.
1808
-
1809
-
1810
- """
1811
- ...
1812
-
1813
1813
  @typing.overload
1814
1814
  def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1815
1815
  """