metaflow-stubs 2.11.5__py2.py3-none-any.whl → 2.11.6__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. metaflow-stubs/__init__.pyi +500 -488
  2. metaflow-stubs/cards.pyi +3 -3
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +2 -2
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +17 -17
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +3 -3
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +3 -3
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +4 -4
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +4 -4
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +3 -3
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +4 -4
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  59. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  60. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  61. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  62. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  63. metaflow-stubs/plugins/cards/card_client.pyi +4 -4
  64. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  67. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  76. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  80. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  81. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  82. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  84. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  86. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  87. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  88. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  89. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  90. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  91. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  92. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  93. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  95. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  103. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  105. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  106. metaflow-stubs/plugins/package_cli.pyi +2 -2
  107. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  109. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  112. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  114. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  115. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  118. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  119. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  121. metaflow-stubs/plugins/tag_cli.pyi +3 -3
  122. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  123. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  124. metaflow-stubs/procpoll.pyi +2 -2
  125. metaflow-stubs/pylint_wrapper.pyi +2 -2
  126. metaflow-stubs/tagging_util.pyi +2 -2
  127. metaflow-stubs/version.pyi +2 -2
  128. {metaflow_stubs-2.11.5.dist-info → metaflow_stubs-2.11.6.dist-info}/METADATA +2 -2
  129. metaflow_stubs-2.11.6.dist-info/RECORD +132 -0
  130. metaflow_stubs-2.11.5.dist-info/RECORD +0 -132
  131. {metaflow_stubs-2.11.5.dist-info → metaflow_stubs-2.11.6.dist-info}/WHEEL +0 -0
  132. {metaflow_stubs-2.11.5.dist-info → metaflow_stubs-2.11.6.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.11.5 #
4
- # Generated on 2024-03-14T18:36:24.873925 #
3
+ # MF version: 2.11.6 #
4
+ # Generated on 2024-03-23T22:37:23.058784 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -9,15 +9,15 @@ from __future__ import annotations
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
11
  import metaflow.metaflow_current
12
- import metaflow._vendor.click.types
13
12
  import datetime
14
13
  import metaflow.events
15
14
  import metaflow.parameters
16
- import metaflow.client.core
15
+ import typing
17
16
  import metaflow.plugins.datatools.s3.s3
18
- import metaflow.datastore.inputs
19
17
  import io
20
- import typing
18
+ import metaflow._vendor.click.types
19
+ import metaflow.datastore.inputs
20
+ import metaflow.client.core
21
21
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
22
  StepFlag = typing.NewType("StepFlag", bool)
23
23
 
@@ -803,172 +803,337 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
803
803
  ...
804
804
 
805
805
  @typing.overload
806
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
806
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
807
807
  """
808
- Specifies environment variables to be set prior to the execution of a step.
808
+ Specifies that the step will success under all circumstances.
809
+
810
+ The decorator will create an optional artifact, specified by `var`, which
811
+ contains the exception raised. You can use it to detect the presence
812
+ of errors, indicating that all happy-path artifacts produced by the step
813
+ are missing.
809
814
 
810
815
  Parameters
811
816
  ----------
812
- vars : Dict[str, str], default {}
813
- Dictionary of environment variables to set.
817
+ var : str, optional, default None
818
+ Name of the artifact in which to store the caught exception.
819
+ If not specified, the exception is not stored.
820
+ print_exception : bool, default True
821
+ Determines whether or not the exception is printed to
822
+ stdout when caught.
814
823
  """
815
824
  ...
816
825
 
817
826
  @typing.overload
818
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
827
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
819
828
  ...
820
829
 
821
830
  @typing.overload
822
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
831
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
823
832
  ...
824
833
 
825
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
834
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
826
835
  """
827
- Specifies environment variables to be set prior to the execution of a step.
836
+ Specifies that the step will success under all circumstances.
837
+
838
+ The decorator will create an optional artifact, specified by `var`, which
839
+ contains the exception raised. You can use it to detect the presence
840
+ of errors, indicating that all happy-path artifacts produced by the step
841
+ are missing.
828
842
 
829
843
  Parameters
830
844
  ----------
831
- vars : Dict[str, str], default {}
832
- Dictionary of environment variables to set.
845
+ var : str, optional, default None
846
+ Name of the artifact in which to store the caught exception.
847
+ If not specified, the exception is not stored.
848
+ print_exception : bool, default True
849
+ Determines whether or not the exception is printed to
850
+ stdout when caught.
833
851
  """
834
852
  ...
835
853
 
836
854
  @typing.overload
837
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
855
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
838
856
  """
839
- Specifies secrets to be retrieved and injected as environment variables prior to
840
- the execution of a step.
857
+ Specifies environment variables to be set prior to the execution of a step.
841
858
 
842
859
  Parameters
843
860
  ----------
844
- sources : List[Union[str, Dict[str, Any]]], default: []
845
- List of secret specs, defining how the secrets are to be retrieved
861
+ vars : Dict[str, str], default {}
862
+ Dictionary of environment variables to set.
846
863
  """
847
864
  ...
848
865
 
849
866
  @typing.overload
850
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
867
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
851
868
  ...
852
869
 
853
870
  @typing.overload
854
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
871
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
855
872
  ...
856
873
 
857
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
874
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
858
875
  """
859
- Specifies secrets to be retrieved and injected as environment variables prior to
860
- the execution of a step.
876
+ Specifies environment variables to be set prior to the execution of a step.
861
877
 
862
878
  Parameters
863
879
  ----------
864
- sources : List[Union[str, Dict[str, Any]]], default: []
865
- List of secret specs, defining how the secrets are to be retrieved
880
+ vars : Dict[str, str], default {}
881
+ Dictionary of environment variables to set.
866
882
  """
867
883
  ...
868
884
 
869
885
  @typing.overload
870
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
886
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
871
887
  """
872
- Specifies a timeout for your step.
873
-
874
- This decorator is useful if this step may hang indefinitely.
875
-
876
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
877
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
878
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
879
-
880
- Note that all the values specified in parameters are added together so if you specify
881
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
888
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
882
889
 
883
890
  Parameters
884
891
  ----------
885
- seconds : int, default 0
886
- Number of seconds to wait prior to timing out.
887
- minutes : int, default 0
888
- Number of minutes to wait prior to timing out.
889
- hours : int, default 0
890
- Number of hours to wait prior to timing out.
892
+ cpu : int, default 1
893
+ Number of CPUs required for this step. If `@resources` is
894
+ also present, the maximum value from all decorators is used.
895
+ gpu : int, default 0
896
+ Number of GPUs required for this step. If `@resources` is
897
+ also present, the maximum value from all decorators is used.
898
+ memory : int, default 4096
899
+ Memory size (in MB) required for this step. If
900
+ `@resources` is also present, the maximum value from all decorators is
901
+ used.
902
+ image : str, optional, default None
903
+ Docker image to use when launching on AWS Batch. If not specified, and
904
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
905
+ not, a default Docker image mapping to the current version of Python is used.
906
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
907
+ AWS Batch Job Queue to submit the job to.
908
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
909
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
910
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
911
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
912
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
913
+ shared_memory : int, optional, default None
914
+ The value for the size (in MiB) of the /dev/shm volume for this step.
915
+ This parameter maps to the `--shm-size` option in Docker.
916
+ max_swap : int, optional, default None
917
+ The total amount of swap memory (in MiB) a container can use for this
918
+ step. This parameter is translated to the `--memory-swap` option in
919
+ Docker where the value is the sum of the container memory plus the
920
+ `max_swap` value.
921
+ swappiness : int, optional, default None
922
+ This allows you to tune memory swappiness behavior for this step.
923
+ A swappiness value of 0 causes swapping not to happen unless absolutely
924
+ necessary. A swappiness value of 100 causes pages to be swapped very
925
+ aggressively. Accepted values are whole numbers between 0 and 100.
926
+ use_tmpfs : bool, default False
927
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
928
+ not available on Fargate compute environments
929
+ tmpfs_tempdir : bool, default True
930
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
931
+ tmpfs_size : int, optional, default None
932
+ The value for the size (in MiB) of the tmpfs mount for this step.
933
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
934
+ memory allocated for this step.
935
+ tmpfs_path : str, optional, default None
936
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
937
+ inferentia : int, default 0
938
+ Number of Inferentia chips required for this step.
939
+ trainium : int, default None
940
+ Alias for inferentia. Use only one of the two.
941
+ efa : int, default 0
942
+ Number of elastic fabric adapter network devices to attach to container
943
+ ephemeral_storage: int, default None
944
+ The total amount, in GiB, of ephemeral storage to set for the task (21-200)
945
+ This is only relevant for Fargate compute environments
946
+ log_driver: str, optional, default None
947
+ The log driver to use for the Amazon ECS container.
948
+ log_options: List[str], optional, default None
949
+ List of strings containing options for the chosen log driver. The configurable values
950
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
951
+ Example usage: ["awslogs-group:aws/batch/job"]
891
952
  """
892
953
  ...
893
954
 
894
955
  @typing.overload
895
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
956
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
896
957
  ...
897
958
 
898
959
  @typing.overload
899
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
900
- ...
901
-
902
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
903
- """
904
- Specifies a timeout for your step.
905
-
906
- This decorator is useful if this step may hang indefinitely.
907
-
908
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
909
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
910
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
911
-
912
- Note that all the values specified in parameters are added together so if you specify
913
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
914
-
915
- Parameters
916
- ----------
917
- seconds : int, default 0
918
- Number of seconds to wait prior to timing out.
919
- minutes : int, default 0
920
- Number of minutes to wait prior to timing out.
921
- hours : int, default 0
922
- Number of hours to wait prior to timing out.
923
- """
960
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
924
961
  ...
925
962
 
926
- @typing.overload
927
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
963
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
928
964
  """
929
- Specifies the PyPI packages for the step.
930
-
931
- Information in this decorator will augment any
932
- attributes set in the `@pyi_base` flow-level decorator. Hence,
933
- you can use `@pypi_base` to set packages required by all
934
- steps and use `@pypi` to specify step-specific overrides.
965
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
935
966
 
936
967
  Parameters
937
968
  ----------
938
- packages : Dict[str, str], default: {}
939
- Packages to use for this step. The key is the name of the package
940
- and the value is the version to use.
941
- python : str, optional, default: None
942
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
943
- that the version used will correspond to the version of the Python interpreter used to start the run.
969
+ cpu : int, default 1
970
+ Number of CPUs required for this step. If `@resources` is
971
+ also present, the maximum value from all decorators is used.
972
+ gpu : int, default 0
973
+ Number of GPUs required for this step. If `@resources` is
974
+ also present, the maximum value from all decorators is used.
975
+ memory : int, default 4096
976
+ Memory size (in MB) required for this step. If
977
+ `@resources` is also present, the maximum value from all decorators is
978
+ used.
979
+ image : str, optional, default None
980
+ Docker image to use when launching on AWS Batch. If not specified, and
981
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
982
+ not, a default Docker image mapping to the current version of Python is used.
983
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
984
+ AWS Batch Job Queue to submit the job to.
985
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
986
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
987
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
988
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
989
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
990
+ shared_memory : int, optional, default None
991
+ The value for the size (in MiB) of the /dev/shm volume for this step.
992
+ This parameter maps to the `--shm-size` option in Docker.
993
+ max_swap : int, optional, default None
994
+ The total amount of swap memory (in MiB) a container can use for this
995
+ step. This parameter is translated to the `--memory-swap` option in
996
+ Docker where the value is the sum of the container memory plus the
997
+ `max_swap` value.
998
+ swappiness : int, optional, default None
999
+ This allows you to tune memory swappiness behavior for this step.
1000
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1001
+ necessary. A swappiness value of 100 causes pages to be swapped very
1002
+ aggressively. Accepted values are whole numbers between 0 and 100.
1003
+ use_tmpfs : bool, default False
1004
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1005
+ not available on Fargate compute environments
1006
+ tmpfs_tempdir : bool, default True
1007
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1008
+ tmpfs_size : int, optional, default None
1009
+ The value for the size (in MiB) of the tmpfs mount for this step.
1010
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1011
+ memory allocated for this step.
1012
+ tmpfs_path : str, optional, default None
1013
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1014
+ inferentia : int, default 0
1015
+ Number of Inferentia chips required for this step.
1016
+ trainium : int, default None
1017
+ Alias for inferentia. Use only one of the two.
1018
+ efa : int, default 0
1019
+ Number of elastic fabric adapter network devices to attach to container
1020
+ ephemeral_storage: int, default None
1021
+ The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1022
+ This is only relevant for Fargate compute environments
1023
+ log_driver: str, optional, default None
1024
+ The log driver to use for the Amazon ECS container.
1025
+ log_options: List[str], optional, default None
1026
+ List of strings containing options for the chosen log driver. The configurable values
1027
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1028
+ Example usage: ["awslogs-group:aws/batch/job"]
944
1029
  """
945
1030
  ...
946
1031
 
947
- @typing.overload
948
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
949
- ...
950
-
951
- @typing.overload
952
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
953
- ...
954
-
955
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1032
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
956
1033
  """
957
- Specifies the PyPI packages for the step.
958
-
959
- Information in this decorator will augment any
960
- attributes set in the `@pyi_base` flow-level decorator. Hence,
961
- you can use `@pypi_base` to set packages required by all
962
- steps and use `@pypi` to specify step-specific overrides.
1034
+ Specifies that this step should execute on Kubernetes.
963
1035
 
964
1036
  Parameters
965
1037
  ----------
966
- packages : Dict[str, str], default: {}
967
- Packages to use for this step. The key is the name of the package
968
- and the value is the version to use.
969
- python : str, optional, default: None
970
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
971
- that the version used will correspond to the version of the Python interpreter used to start the run.
1038
+ cpu : int, default 1
1039
+ Number of CPUs required for this step. If `@resources` is
1040
+ also present, the maximum value from all decorators is used.
1041
+ memory : int, default 4096
1042
+ Memory size (in MB) required for this step. If
1043
+ `@resources` is also present, the maximum value from all decorators is
1044
+ used.
1045
+ disk : int, default 10240
1046
+ Disk size (in MB) required for this step. If
1047
+ `@resources` is also present, the maximum value from all decorators is
1048
+ used.
1049
+ image : str, optional, default None
1050
+ Docker image to use when launching on Kubernetes. If not specified, and
1051
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1052
+ not, a default Docker image mapping to the current version of Python is used.
1053
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1054
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1055
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1056
+ Kubernetes service account to use when launching pod in Kubernetes.
1057
+ secrets : List[str], optional, default None
1058
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1059
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1060
+ in Metaflow configuration.
1061
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1062
+ Kubernetes namespace to use when launching pod in Kubernetes.
1063
+ gpu : int, optional, default None
1064
+ Number of GPUs required for this step. A value of zero implies that
1065
+ the scheduled node should not have GPUs.
1066
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1067
+ The vendor of the GPUs to be used for this step.
1068
+ tolerations : List[str], default []
1069
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1070
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1071
+ use_tmpfs : bool, default False
1072
+ This enables an explicit tmpfs mount for this step.
1073
+ tmpfs_tempdir : bool, default True
1074
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1075
+ tmpfs_size : int, optional, default: None
1076
+ The value for the size (in MiB) of the tmpfs mount for this step.
1077
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1078
+ memory allocated for this step.
1079
+ tmpfs_path : str, optional, default /metaflow_temp
1080
+ Path to tmpfs mount for this step.
1081
+ persistent_volume_claims : Dict[str, str], optional, default None
1082
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1083
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1084
+ shared_memory: int, optional
1085
+ Shared memory size (in MiB) required for this step
1086
+ """
1087
+ ...
1088
+
1089
+ @typing.overload
1090
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1091
+ """
1092
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1093
+
1094
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1095
+
1096
+ Parameters
1097
+ ----------
1098
+ type : str, default 'default'
1099
+ Card type.
1100
+ id : str, optional, default None
1101
+ If multiple cards are present, use this id to identify this card.
1102
+ options : Dict[str, Any], default {}
1103
+ Options passed to the card. The contents depend on the card type.
1104
+ timeout : int, default 45
1105
+ Interrupt reporting if it takes more than this many seconds.
1106
+
1107
+
1108
+ """
1109
+ ...
1110
+
1111
+ @typing.overload
1112
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1113
+ ...
1114
+
1115
+ @typing.overload
1116
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1117
+ ...
1118
+
1119
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1120
+ """
1121
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1122
+
1123
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1124
+
1125
+ Parameters
1126
+ ----------
1127
+ type : str, default 'default'
1128
+ Card type.
1129
+ id : str, optional, default None
1130
+ If multiple cards are present, use this id to identify this card.
1131
+ options : Dict[str, Any], default {}
1132
+ Options passed to the card. The contents depend on the card type.
1133
+ timeout : int, default 45
1134
+ Interrupt reporting if it takes more than this many seconds.
1135
+
1136
+
972
1137
  """
973
1138
  ...
974
1139
 
@@ -1030,456 +1195,233 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1030
1195
  ...
1031
1196
 
1032
1197
  @typing.overload
1033
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1198
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1034
1199
  """
1035
- Creates a human-readable report, a Metaflow Card, after this step completes.
1200
+ Specifies the number of times the task corresponding
1201
+ to a step needs to be retried.
1036
1202
 
1037
- Note that you may add multiple `@card` decorators in a step with different parameters.
1203
+ This decorator is useful for handling transient errors, such as networking issues.
1204
+ If your task contains operations that can't be retried safely, e.g. database updates,
1205
+ it is advisable to annotate it with `@retry(times=0)`.
1206
+
1207
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1208
+ decorator will execute a no-op task after all retries have been exhausted,
1209
+ ensuring that the flow execution can continue.
1038
1210
 
1039
1211
  Parameters
1040
1212
  ----------
1041
- type : str, default 'default'
1042
- Card type.
1043
- id : str, optional, default None
1044
- If multiple cards are present, use this id to identify this card.
1045
- options : Dict[str, Any], default {}
1046
- Options passed to the card. The contents depend on the card type.
1047
- timeout : int, default 45
1048
- Interrupt reporting if it takes more than this many seconds.
1049
-
1050
-
1213
+ times : int, default 3
1214
+ Number of times to retry this task.
1215
+ minutes_between_retries : int, default 2
1216
+ Number of minutes between retries.
1051
1217
  """
1052
1218
  ...
1053
1219
 
1054
1220
  @typing.overload
1055
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1221
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1056
1222
  ...
1057
1223
 
1058
1224
  @typing.overload
1059
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1225
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1060
1226
  ...
1061
1227
 
1062
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1228
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1063
1229
  """
1064
- Creates a human-readable report, a Metaflow Card, after this step completes.
1065
-
1066
- Note that you may add multiple `@card` decorators in a step with different parameters.
1067
-
1068
- Parameters
1069
- ----------
1070
- type : str, default 'default'
1071
- Card type.
1072
- id : str, optional, default None
1073
- If multiple cards are present, use this id to identify this card.
1074
- options : Dict[str, Any], default {}
1075
- Options passed to the card. The contents depend on the card type.
1076
- timeout : int, default 45
1077
- Interrupt reporting if it takes more than this many seconds.
1078
-
1230
+ Specifies the number of times the task corresponding
1231
+ to a step needs to be retried.
1079
1232
 
1080
- """
1081
- ...
1082
-
1083
- @typing.overload
1084
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1085
- """
1086
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1233
+ This decorator is useful for handling transient errors, such as networking issues.
1234
+ If your task contains operations that can't be retried safely, e.g. database updates,
1235
+ it is advisable to annotate it with `@retry(times=0)`.
1087
1236
 
1088
- Parameters
1089
- ----------
1090
- cpu : int, default 1
1091
- Number of CPUs required for this step. If `@resources` is
1092
- also present, the maximum value from all decorators is used.
1093
- gpu : int, default 0
1094
- Number of GPUs required for this step. If `@resources` is
1095
- also present, the maximum value from all decorators is used.
1096
- memory : int, default 4096
1097
- Memory size (in MB) required for this step. If
1098
- `@resources` is also present, the maximum value from all decorators is
1099
- used.
1100
- image : str, optional, default None
1101
- Docker image to use when launching on AWS Batch. If not specified, and
1102
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1103
- not, a default Docker image mapping to the current version of Python is used.
1104
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1105
- AWS Batch Job Queue to submit the job to.
1106
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1107
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1108
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1109
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1110
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1111
- shared_memory : int, optional, default None
1112
- The value for the size (in MiB) of the /dev/shm volume for this step.
1113
- This parameter maps to the `--shm-size` option in Docker.
1114
- max_swap : int, optional, default None
1115
- The total amount of swap memory (in MiB) a container can use for this
1116
- step. This parameter is translated to the `--memory-swap` option in
1117
- Docker where the value is the sum of the container memory plus the
1118
- `max_swap` value.
1119
- swappiness : int, optional, default None
1120
- This allows you to tune memory swappiness behavior for this step.
1121
- A swappiness value of 0 causes swapping not to happen unless absolutely
1122
- necessary. A swappiness value of 100 causes pages to be swapped very
1123
- aggressively. Accepted values are whole numbers between 0 and 100.
1124
- use_tmpfs : bool, default False
1125
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1126
- not available on Fargate compute environments
1127
- tmpfs_tempdir : bool, default True
1128
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1129
- tmpfs_size : int, optional, default None
1130
- The value for the size (in MiB) of the tmpfs mount for this step.
1131
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1132
- memory allocated for this step.
1133
- tmpfs_path : str, optional, default None
1134
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1135
- inferentia : int, default 0
1136
- Number of Inferentia chips required for this step.
1137
- trainium : int, default None
1138
- Alias for inferentia. Use only one of the two.
1139
- efa : int, default 0
1140
- Number of elastic fabric adapter network devices to attach to container
1141
- ephemeral_storage: int, default None
1142
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1143
- This is only relevant for Fargate compute environments
1144
- """
1145
- ...
1146
-
1147
- @typing.overload
1148
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1149
- ...
1150
-
1151
- @typing.overload
1152
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1153
- ...
1154
-
1155
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None):
1156
- """
1157
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1237
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1238
+ decorator will execute a no-op task after all retries have been exhausted,
1239
+ ensuring that the flow execution can continue.
1158
1240
 
1159
1241
  Parameters
1160
1242
  ----------
1161
- cpu : int, default 1
1162
- Number of CPUs required for this step. If `@resources` is
1163
- also present, the maximum value from all decorators is used.
1164
- gpu : int, default 0
1165
- Number of GPUs required for this step. If `@resources` is
1166
- also present, the maximum value from all decorators is used.
1167
- memory : int, default 4096
1168
- Memory size (in MB) required for this step. If
1169
- `@resources` is also present, the maximum value from all decorators is
1170
- used.
1171
- image : str, optional, default None
1172
- Docker image to use when launching on AWS Batch. If not specified, and
1173
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1174
- not, a default Docker image mapping to the current version of Python is used.
1175
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1176
- AWS Batch Job Queue to submit the job to.
1177
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1178
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1179
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1180
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1181
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1182
- shared_memory : int, optional, default None
1183
- The value for the size (in MiB) of the /dev/shm volume for this step.
1184
- This parameter maps to the `--shm-size` option in Docker.
1185
- max_swap : int, optional, default None
1186
- The total amount of swap memory (in MiB) a container can use for this
1187
- step. This parameter is translated to the `--memory-swap` option in
1188
- Docker where the value is the sum of the container memory plus the
1189
- `max_swap` value.
1190
- swappiness : int, optional, default None
1191
- This allows you to tune memory swappiness behavior for this step.
1192
- A swappiness value of 0 causes swapping not to happen unless absolutely
1193
- necessary. A swappiness value of 100 causes pages to be swapped very
1194
- aggressively. Accepted values are whole numbers between 0 and 100.
1195
- use_tmpfs : bool, default False
1196
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1197
- not available on Fargate compute environments
1198
- tmpfs_tempdir : bool, default True
1199
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1200
- tmpfs_size : int, optional, default None
1201
- The value for the size (in MiB) of the tmpfs mount for this step.
1202
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1203
- memory allocated for this step.
1204
- tmpfs_path : str, optional, default None
1205
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1206
- inferentia : int, default 0
1207
- Number of Inferentia chips required for this step.
1208
- trainium : int, default None
1209
- Alias for inferentia. Use only one of the two.
1210
- efa : int, default 0
1211
- Number of elastic fabric adapter network devices to attach to container
1212
- ephemeral_storage: int, default None
1213
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1214
- This is only relevant for Fargate compute environments
1243
+ times : int, default 3
1244
+ Number of times to retry this task.
1245
+ minutes_between_retries : int, default 2
1246
+ Number of minutes between retries.
1215
1247
  """
1216
1248
  ...
1217
1249
 
1218
1250
  @typing.overload
1219
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1251
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1220
1252
  """
1221
- Specifies that the step will success under all circumstances.
1253
+ Specifies the PyPI packages for the step.
1222
1254
 
1223
- The decorator will create an optional artifact, specified by `var`, which
1224
- contains the exception raised. You can use it to detect the presence
1225
- of errors, indicating that all happy-path artifacts produced by the step
1226
- are missing.
1255
+ Information in this decorator will augment any
1256
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1257
+ you can use `@pypi_base` to set packages required by all
1258
+ steps and use `@pypi` to specify step-specific overrides.
1227
1259
 
1228
1260
  Parameters
1229
1261
  ----------
1230
- var : str, optional, default None
1231
- Name of the artifact in which to store the caught exception.
1232
- If not specified, the exception is not stored.
1233
- print_exception : bool, default True
1234
- Determines whether or not the exception is printed to
1235
- stdout when caught.
1262
+ packages : Dict[str, str], default: {}
1263
+ Packages to use for this step. The key is the name of the package
1264
+ and the value is the version to use.
1265
+ python : str, optional, default: None
1266
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1267
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1236
1268
  """
1237
1269
  ...
1238
1270
 
1239
1271
  @typing.overload
1240
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1272
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1241
1273
  ...
1242
1274
 
1243
1275
  @typing.overload
1244
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1276
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1245
1277
  ...
1246
1278
 
1247
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1279
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1248
1280
  """
1249
- Specifies that the step will success under all circumstances.
1250
-
1251
- The decorator will create an optional artifact, specified by `var`, which
1252
- contains the exception raised. You can use it to detect the presence
1253
- of errors, indicating that all happy-path artifacts produced by the step
1254
- are missing.
1281
+ Specifies the PyPI packages for the step.
1255
1282
 
1256
- Parameters
1257
- ----------
1258
- var : str, optional, default None
1259
- Name of the artifact in which to store the caught exception.
1260
- If not specified, the exception is not stored.
1261
- print_exception : bool, default True
1262
- Determines whether or not the exception is printed to
1263
- stdout when caught.
1264
- """
1265
- ...
1266
-
1267
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1268
- """
1269
- Specifies that this step should execute on Kubernetes.
1283
+ Information in this decorator will augment any
1284
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1285
+ you can use `@pypi_base` to set packages required by all
1286
+ steps and use `@pypi` to specify step-specific overrides.
1270
1287
 
1271
1288
  Parameters
1272
1289
  ----------
1273
- cpu : int, default 1
1274
- Number of CPUs required for this step. If `@resources` is
1275
- also present, the maximum value from all decorators is used.
1276
- memory : int, default 4096
1277
- Memory size (in MB) required for this step. If
1278
- `@resources` is also present, the maximum value from all decorators is
1279
- used.
1280
- disk : int, default 10240
1281
- Disk size (in MB) required for this step. If
1282
- `@resources` is also present, the maximum value from all decorators is
1283
- used.
1284
- image : str, optional, default None
1285
- Docker image to use when launching on Kubernetes. If not specified, and
1286
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1287
- not, a default Docker image mapping to the current version of Python is used.
1288
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1289
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1290
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1291
- Kubernetes service account to use when launching pod in Kubernetes.
1292
- secrets : List[str], optional, default None
1293
- Kubernetes secrets to use when launching pod in Kubernetes. These
1294
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1295
- in Metaflow configuration.
1296
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1297
- Kubernetes namespace to use when launching pod in Kubernetes.
1298
- gpu : int, optional, default None
1299
- Number of GPUs required for this step. A value of zero implies that
1300
- the scheduled node should not have GPUs.
1301
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1302
- The vendor of the GPUs to be used for this step.
1303
- tolerations : List[str], default []
1304
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1305
- Kubernetes tolerations to use when launching pod in Kubernetes.
1306
- use_tmpfs : bool, default False
1307
- This enables an explicit tmpfs mount for this step.
1308
- tmpfs_tempdir : bool, default True
1309
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1310
- tmpfs_size : int, optional, default: None
1311
- The value for the size (in MiB) of the tmpfs mount for this step.
1312
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1313
- memory allocated for this step.
1314
- tmpfs_path : str, optional, default /metaflow_temp
1315
- Path to tmpfs mount for this step.
1316
- persistent_volume_claims : Dict[str, str], optional, default None
1317
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1318
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1319
- shared_memory: int, optional
1320
- Shared memory size (in MiB) required for this step
1290
+ packages : Dict[str, str], default: {}
1291
+ Packages to use for this step. The key is the name of the package
1292
+ and the value is the version to use.
1293
+ python : str, optional, default: None
1294
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1295
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1321
1296
  """
1322
1297
  ...
1323
1298
 
1324
1299
  @typing.overload
1325
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1300
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1326
1301
  """
1327
- Specifies the number of times the task corresponding
1328
- to a step needs to be retried.
1302
+ Specifies a timeout for your step.
1329
1303
 
1330
- This decorator is useful for handling transient errors, such as networking issues.
1331
- If your task contains operations that can't be retried safely, e.g. database updates,
1332
- it is advisable to annotate it with `@retry(times=0)`.
1304
+ This decorator is useful if this step may hang indefinitely.
1333
1305
 
1334
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1335
- decorator will execute a no-op task after all retries have been exhausted,
1336
- ensuring that the flow execution can continue.
1306
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1307
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1308
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1309
+
1310
+ Note that all the values specified in parameters are added together so if you specify
1311
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1337
1312
 
1338
1313
  Parameters
1339
1314
  ----------
1340
- times : int, default 3
1341
- Number of times to retry this task.
1342
- minutes_between_retries : int, default 2
1343
- Number of minutes between retries.
1315
+ seconds : int, default 0
1316
+ Number of seconds to wait prior to timing out.
1317
+ minutes : int, default 0
1318
+ Number of minutes to wait prior to timing out.
1319
+ hours : int, default 0
1320
+ Number of hours to wait prior to timing out.
1344
1321
  """
1345
1322
  ...
1346
1323
 
1347
1324
  @typing.overload
1348
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1325
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1349
1326
  ...
1350
1327
 
1351
1328
  @typing.overload
1352
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1329
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1353
1330
  ...
1354
1331
 
1355
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1332
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1356
1333
  """
1357
- Specifies the number of times the task corresponding
1358
- to a step needs to be retried.
1334
+ Specifies a timeout for your step.
1359
1335
 
1360
- This decorator is useful for handling transient errors, such as networking issues.
1361
- If your task contains operations that can't be retried safely, e.g. database updates,
1362
- it is advisable to annotate it with `@retry(times=0)`.
1336
+ This decorator is useful if this step may hang indefinitely.
1363
1337
 
1364
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1365
- decorator will execute a no-op task after all retries have been exhausted,
1366
- ensuring that the flow execution can continue.
1338
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1339
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1340
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1341
+
1342
+ Note that all the values specified in parameters are added together so if you specify
1343
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1367
1344
 
1368
1345
  Parameters
1369
- ----------
1370
- times : int, default 3
1371
- Number of times to retry this task.
1372
- minutes_between_retries : int, default 2
1373
- Number of minutes between retries.
1346
+ ----------
1347
+ seconds : int, default 0
1348
+ Number of seconds to wait prior to timing out.
1349
+ minutes : int, default 0
1350
+ Number of minutes to wait prior to timing out.
1351
+ hours : int, default 0
1352
+ Number of hours to wait prior to timing out.
1374
1353
  """
1375
1354
  ...
1376
1355
 
1377
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1356
+ @typing.overload
1357
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1378
1358
  """
1379
- Specifies what flows belong to the same project.
1380
-
1381
- A project-specific namespace is created for all flows that
1382
- use the same `@project(name)`.
1359
+ Specifies secrets to be retrieved and injected as environment variables prior to
1360
+ the execution of a step.
1383
1361
 
1384
1362
  Parameters
1385
1363
  ----------
1386
- name : str
1387
- Project name. Make sure that the name is unique amongst all
1388
- projects that use the same production scheduler. The name may
1389
- contain only lowercase alphanumeric characters and underscores.
1390
-
1391
-
1364
+ sources : List[Union[str, Dict[str, Any]]], default: []
1365
+ List of secret specs, defining how the secrets are to be retrieved
1392
1366
  """
1393
1367
  ...
1394
1368
 
1395
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1369
+ @typing.overload
1370
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1371
+ ...
1372
+
1373
+ @typing.overload
1374
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1375
+ ...
1376
+
1377
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1396
1378
  """
1397
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1398
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1379
+ Specifies secrets to be retrieved and injected as environment variables prior to
1380
+ the execution of a step.
1399
1381
 
1400
1382
  Parameters
1401
1383
  ----------
1402
- timeout : int
1403
- Time, in seconds before the task times out and fails. (Default: 3600)
1404
- poke_interval : int
1405
- Time in seconds that the job should wait in between each try. (Default: 60)
1406
- mode : str
1407
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1408
- exponential_backoff : bool
1409
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1410
- pool : str
1411
- the slot pool this task should run in,
1412
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1413
- soft_fail : bool
1414
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1415
- name : str
1416
- Name of the sensor on Airflow
1417
- description : str
1418
- Description of sensor in the Airflow UI
1419
- external_dag_id : str
1420
- The dag_id that contains the task you want to wait for.
1421
- external_task_ids : List[str]
1422
- The list of task_ids that you want to wait for.
1423
- If None (default value) the sensor waits for the DAG. (Default: None)
1424
- allowed_states : List[str]
1425
- Iterable of allowed states, (Default: ['success'])
1426
- failed_states : List[str]
1427
- Iterable of failed or dis-allowed states. (Default: None)
1428
- execution_delta : datetime.timedelta
1429
- time difference with the previous execution to look at,
1430
- the default is the same logical date as the current task or DAG. (Default: None)
1431
- check_existence: bool
1432
- Set to True to check if the external task exists or check if
1433
- the DAG to wait for exists. (Default: True)
1384
+ sources : List[Union[str, Dict[str, Any]]], default: []
1385
+ List of secret specs, defining how the secrets are to be retrieved
1434
1386
  """
1435
1387
  ...
1436
1388
 
1437
1389
  @typing.overload
1438
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1390
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1439
1391
  """
1440
- Specifies the Conda environment for all steps of the flow.
1441
-
1442
- Use `@conda_base` to set common libraries required by all
1443
- steps and use `@conda` to specify step-specific additions.
1392
+ Specifies the PyPI packages for all steps of the flow.
1444
1393
 
1394
+ Use `@pypi_base` to set common packages required by all
1395
+ steps and use `@pypi` to specify step-specific overrides.
1445
1396
  Parameters
1446
1397
  ----------
1447
- packages : Dict[str, str], default {}
1398
+ packages : Dict[str, str], default: {}
1448
1399
  Packages to use for this flow. The key is the name of the package
1449
1400
  and the value is the version to use.
1450
- libraries : Dict[str, str], default {}
1451
- Supported for backward compatibility. When used with packages, packages will take precedence.
1452
- python : str, optional, default None
1401
+ python : str, optional, default: None
1453
1402
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1454
1403
  that the version used will correspond to the version of the Python interpreter used to start the run.
1455
- disabled : bool, default False
1456
- If set to True, disables Conda.
1457
1404
  """
1458
1405
  ...
1459
1406
 
1460
1407
  @typing.overload
1461
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1408
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1462
1409
  ...
1463
1410
 
1464
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1411
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1465
1412
  """
1466
- Specifies the Conda environment for all steps of the flow.
1467
-
1468
- Use `@conda_base` to set common libraries required by all
1469
- steps and use `@conda` to specify step-specific additions.
1413
+ Specifies the PyPI packages for all steps of the flow.
1470
1414
 
1415
+ Use `@pypi_base` to set common packages required by all
1416
+ steps and use `@pypi` to specify step-specific overrides.
1471
1417
  Parameters
1472
1418
  ----------
1473
- packages : Dict[str, str], default {}
1419
+ packages : Dict[str, str], default: {}
1474
1420
  Packages to use for this flow. The key is the name of the package
1475
1421
  and the value is the version to use.
1476
- libraries : Dict[str, str], default {}
1477
- Supported for backward compatibility. When used with packages, packages will take precedence.
1478
- python : str, optional, default None
1422
+ python : str, optional, default: None
1479
1423
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1480
1424
  that the version used will correspond to the version of the Python interpreter used to start the run.
1481
- disabled : bool, default False
1482
- If set to True, disables Conda.
1483
1425
  """
1484
1426
  ...
1485
1427
 
@@ -1532,6 +1474,48 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1532
1474
  """
1533
1475
  ...
1534
1476
 
1477
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1478
+ """
1479
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1480
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1481
+
1482
+ Parameters
1483
+ ----------
1484
+ timeout : int
1485
+ Time, in seconds before the task times out and fails. (Default: 3600)
1486
+ poke_interval : int
1487
+ Time in seconds that the job should wait in between each try. (Default: 60)
1488
+ mode : str
1489
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1490
+ exponential_backoff : bool
1491
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1492
+ pool : str
1493
+ the slot pool this task should run in,
1494
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1495
+ soft_fail : bool
1496
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1497
+ name : str
1498
+ Name of the sensor on Airflow
1499
+ description : str
1500
+ Description of sensor in the Airflow UI
1501
+ external_dag_id : str
1502
+ The dag_id that contains the task you want to wait for.
1503
+ external_task_ids : List[str]
1504
+ The list of task_ids that you want to wait for.
1505
+ If None (default value) the sensor waits for the DAG. (Default: None)
1506
+ allowed_states : List[str]
1507
+ Iterable of allowed states, (Default: ['success'])
1508
+ failed_states : List[str]
1509
+ Iterable of failed or dis-allowed states. (Default: None)
1510
+ execution_delta : datetime.timedelta
1511
+ time difference with the previous execution to look at,
1512
+ the default is the same logical date as the current task or DAG. (Default: None)
1513
+ check_existence: bool
1514
+ Set to True to check if the external task exists or check if
1515
+ the DAG to wait for exists. (Default: True)
1516
+ """
1517
+ ...
1518
+
1535
1519
  @typing.overload
1536
1520
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1537
1521
  """
@@ -1627,45 +1611,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1627
1611
  """
1628
1612
  ...
1629
1613
 
1630
- @typing.overload
1631
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1632
- """
1633
- Specifies the PyPI packages for all steps of the flow.
1634
-
1635
- Use `@pypi_base` to set common packages required by all
1636
- steps and use `@pypi` to specify step-specific overrides.
1637
- Parameters
1638
- ----------
1639
- packages : Dict[str, str], default: {}
1640
- Packages to use for this flow. The key is the name of the package
1641
- and the value is the version to use.
1642
- python : str, optional, default: None
1643
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1644
- that the version used will correspond to the version of the Python interpreter used to start the run.
1645
- """
1646
- ...
1647
-
1648
- @typing.overload
1649
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1650
- ...
1651
-
1652
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1653
- """
1654
- Specifies the PyPI packages for all steps of the flow.
1655
-
1656
- Use `@pypi_base` to set common packages required by all
1657
- steps and use `@pypi` to specify step-specific overrides.
1658
- Parameters
1659
- ----------
1660
- packages : Dict[str, str], default: {}
1661
- Packages to use for this flow. The key is the name of the package
1662
- and the value is the version to use.
1663
- python : str, optional, default: None
1664
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1665
- that the version used will correspond to the version of the Python interpreter used to start the run.
1666
- """
1667
- ...
1668
-
1669
1614
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1670
1615
  """
1671
1616
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1708,6 +1653,73 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1708
1653
  """
1709
1654
  ...
1710
1655
 
1656
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1657
+ """
1658
+ Specifies what flows belong to the same project.
1659
+
1660
+ A project-specific namespace is created for all flows that
1661
+ use the same `@project(name)`.
1662
+
1663
+ Parameters
1664
+ ----------
1665
+ name : str
1666
+ Project name. Make sure that the name is unique amongst all
1667
+ projects that use the same production scheduler. The name may
1668
+ contain only lowercase alphanumeric characters and underscores.
1669
+
1670
+
1671
+ """
1672
+ ...
1673
+
1674
+ @typing.overload
1675
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1676
+ """
1677
+ Specifies the Conda environment for all steps of the flow.
1678
+
1679
+ Use `@conda_base` to set common libraries required by all
1680
+ steps and use `@conda` to specify step-specific additions.
1681
+
1682
+ Parameters
1683
+ ----------
1684
+ packages : Dict[str, str], default {}
1685
+ Packages to use for this flow. The key is the name of the package
1686
+ and the value is the version to use.
1687
+ libraries : Dict[str, str], default {}
1688
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1689
+ python : str, optional, default None
1690
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1691
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1692
+ disabled : bool, default False
1693
+ If set to True, disables Conda.
1694
+ """
1695
+ ...
1696
+
1697
+ @typing.overload
1698
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1699
+ ...
1700
+
1701
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1702
+ """
1703
+ Specifies the Conda environment for all steps of the flow.
1704
+
1705
+ Use `@conda_base` to set common libraries required by all
1706
+ steps and use `@conda` to specify step-specific additions.
1707
+
1708
+ Parameters
1709
+ ----------
1710
+ packages : Dict[str, str], default {}
1711
+ Packages to use for this flow. The key is the name of the package
1712
+ and the value is the version to use.
1713
+ libraries : Dict[str, str], default {}
1714
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1715
+ python : str, optional, default None
1716
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1717
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1718
+ disabled : bool, default False
1719
+ If set to True, disables Conda.
1720
+ """
1721
+ ...
1722
+
1711
1723
  @typing.overload
1712
1724
  def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1713
1725
  """