metaflow-stubs 2.11.9__py2.py3-none-any.whl → 2.11.10__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. metaflow-stubs/__init__.pyi +541 -539
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +6 -6
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +4 -2
  16. metaflow-stubs/metaflow_current.pyi +16 -16
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +4 -4
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +3 -3
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  59. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  60. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  61. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  62. metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
  63. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  76. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  80. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  81. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  82. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  84. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  86. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  87. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  88. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  89. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  90. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  91. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  92. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  93. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  95. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  102. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +5 -5
  103. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +5 -3
  105. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  106. metaflow-stubs/plugins/package_cli.pyi +2 -2
  107. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  109. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  112. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  114. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  115. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  118. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  119. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  121. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  122. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  123. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  124. metaflow-stubs/procpoll.pyi +2 -2
  125. metaflow-stubs/pylint_wrapper.pyi +2 -2
  126. metaflow-stubs/tagging_util.pyi +2 -2
  127. metaflow-stubs/version.pyi +2 -2
  128. {metaflow_stubs-2.11.9.dist-info → metaflow_stubs-2.11.10.dist-info}/METADATA +2 -2
  129. metaflow_stubs-2.11.10.dist-info/RECORD +132 -0
  130. metaflow_stubs-2.11.9.dist-info/RECORD +0 -132
  131. {metaflow_stubs-2.11.9.dist-info → metaflow_stubs-2.11.10.dist-info}/WHEEL +0 -0
  132. {metaflow_stubs-2.11.9.dist-info → metaflow_stubs-2.11.10.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,23 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.11.9 #
4
- # Generated on 2024-03-29T22:28:00.911417 #
3
+ # MF version: 2.11.10 #
4
+ # Generated on 2024-04-12T11:08:34.072022 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.datastore.inputs
12
- import metaflow.plugins.datatools.s3.s3
13
11
  import metaflow.metaflow_current
14
- import typing
15
12
  import io
16
- import metaflow.events
13
+ import metaflow.client.core
17
14
  import metaflow.parameters
18
15
  import datetime
19
- import metaflow.client.core
16
+ import metaflow.events
20
17
  import metaflow._vendor.click.types
18
+ import typing
19
+ import metaflow.datastore.inputs
20
+ import metaflow.plugins.datatools.s3.s3
21
21
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
22
  StepFlag = typing.NewType("StepFlag", bool)
23
23
 
@@ -726,117 +726,59 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
726
726
  ...
727
727
 
728
728
  @typing.overload
729
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
- """
731
- Creates a human-readable report, a Metaflow Card, after this step completes.
732
-
733
- Note that you may add multiple `@card` decorators in a step with different parameters.
734
-
735
- Parameters
736
- ----------
737
- type : str, default 'default'
738
- Card type.
739
- id : str, optional, default None
740
- If multiple cards are present, use this id to identify this card.
741
- options : Dict[str, Any], default {}
742
- Options passed to the card. The contents depend on the card type.
743
- timeout : int, default 45
744
- Interrupt reporting if it takes more than this many seconds.
745
-
746
-
747
- """
748
- ...
749
-
750
- @typing.overload
751
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
752
- ...
753
-
754
- @typing.overload
755
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
756
- ...
757
-
758
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
729
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
759
730
  """
760
- Creates a human-readable report, a Metaflow Card, after this step completes.
761
-
762
- Note that you may add multiple `@card` decorators in a step with different parameters.
731
+ Specifies a timeout for your step.
763
732
 
764
- Parameters
765
- ----------
766
- type : str, default 'default'
767
- Card type.
768
- id : str, optional, default None
769
- If multiple cards are present, use this id to identify this card.
770
- options : Dict[str, Any], default {}
771
- Options passed to the card. The contents depend on the card type.
772
- timeout : int, default 45
773
- Interrupt reporting if it takes more than this many seconds.
733
+ This decorator is useful if this step may hang indefinitely.
774
734
 
735
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
736
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
737
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
775
738
 
776
- """
777
- ...
778
-
779
- @typing.overload
780
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
781
- """
782
- Specifies environment variables to be set prior to the execution of a step.
739
+ Note that all the values specified in parameters are added together so if you specify
740
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
783
741
 
784
742
  Parameters
785
743
  ----------
786
- vars : Dict[str, str], default {}
787
- Dictionary of environment variables to set.
744
+ seconds : int, default 0
745
+ Number of seconds to wait prior to timing out.
746
+ minutes : int, default 0
747
+ Number of minutes to wait prior to timing out.
748
+ hours : int, default 0
749
+ Number of hours to wait prior to timing out.
788
750
  """
789
751
  ...
790
752
 
791
753
  @typing.overload
792
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
754
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
793
755
  ...
794
756
 
795
757
  @typing.overload
796
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
758
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
797
759
  ...
798
760
 
799
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
761
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
800
762
  """
801
- Specifies environment variables to be set prior to the execution of a step.
763
+ Specifies a timeout for your step.
802
764
 
803
- Parameters
804
- ----------
805
- vars : Dict[str, str], default {}
806
- Dictionary of environment variables to set.
807
- """
808
- ...
809
-
810
- @typing.overload
811
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
812
- """
813
- Specifies secrets to be retrieved and injected as environment variables prior to
814
- the execution of a step.
765
+ This decorator is useful if this step may hang indefinitely.
815
766
 
816
- Parameters
817
- ----------
818
- sources : List[Union[str, Dict[str, Any]]], default: []
819
- List of secret specs, defining how the secrets are to be retrieved
820
- """
821
- ...
822
-
823
- @typing.overload
824
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
825
- ...
826
-
827
- @typing.overload
828
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
829
- ...
830
-
831
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
832
- """
833
- Specifies secrets to be retrieved and injected as environment variables prior to
834
- the execution of a step.
767
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
768
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
769
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
770
+
771
+ Note that all the values specified in parameters are added together so if you specify
772
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
835
773
 
836
774
  Parameters
837
775
  ----------
838
- sources : List[Union[str, Dict[str, Any]]], default: []
839
- List of secret specs, defining how the secrets are to be retrieved
776
+ seconds : int, default 0
777
+ Number of seconds to wait prior to timing out.
778
+ minutes : int, default 0
779
+ Number of minutes to wait prior to timing out.
780
+ hours : int, default 0
781
+ Number of hours to wait prior to timing out.
840
782
  """
841
783
  ...
842
784
 
@@ -918,198 +860,157 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
918
860
  ...
919
861
 
920
862
  @typing.overload
921
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
863
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
922
864
  """
923
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
865
+ Specifies the PyPI packages for the step.
866
+
867
+ Information in this decorator will augment any
868
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
869
+ you can use `@pypi_base` to set packages required by all
870
+ steps and use `@pypi` to specify step-specific overrides.
924
871
 
925
872
  Parameters
926
873
  ----------
927
- cpu : int, default 1
928
- Number of CPUs required for this step. If `@resources` is
929
- also present, the maximum value from all decorators is used.
930
- gpu : int, default 0
931
- Number of GPUs required for this step. If `@resources` is
932
- also present, the maximum value from all decorators is used.
933
- memory : int, default 4096
934
- Memory size (in MB) required for this step. If
935
- `@resources` is also present, the maximum value from all decorators is
936
- used.
937
- image : str, optional, default None
938
- Docker image to use when launching on AWS Batch. If not specified, and
939
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
940
- not, a default Docker image mapping to the current version of Python is used.
941
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
942
- AWS Batch Job Queue to submit the job to.
943
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
944
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
945
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
946
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
947
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
948
- shared_memory : int, optional, default None
949
- The value for the size (in MiB) of the /dev/shm volume for this step.
950
- This parameter maps to the `--shm-size` option in Docker.
951
- max_swap : int, optional, default None
952
- The total amount of swap memory (in MiB) a container can use for this
953
- step. This parameter is translated to the `--memory-swap` option in
954
- Docker where the value is the sum of the container memory plus the
955
- `max_swap` value.
956
- swappiness : int, optional, default None
957
- This allows you to tune memory swappiness behavior for this step.
958
- A swappiness value of 0 causes swapping not to happen unless absolutely
959
- necessary. A swappiness value of 100 causes pages to be swapped very
960
- aggressively. Accepted values are whole numbers between 0 and 100.
961
- use_tmpfs : bool, default False
962
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
963
- not available on Fargate compute environments
964
- tmpfs_tempdir : bool, default True
965
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
966
- tmpfs_size : int, optional, default None
967
- The value for the size (in MiB) of the tmpfs mount for this step.
968
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
969
- memory allocated for this step.
970
- tmpfs_path : str, optional, default None
971
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
972
- inferentia : int, default 0
973
- Number of Inferentia chips required for this step.
974
- trainium : int, default None
975
- Alias for inferentia. Use only one of the two.
976
- efa : int, default 0
977
- Number of elastic fabric adapter network devices to attach to container
978
- ephemeral_storage: int, default None
979
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
980
- This is only relevant for Fargate compute environments
981
- log_driver: str, optional, default None
982
- The log driver to use for the Amazon ECS container.
983
- log_options: List[str], optional, default None
984
- List of strings containing options for the chosen log driver. The configurable values
985
- depend on the `log driver` chosen. Validation of these options is not supported yet.
986
- Example usage: ["awslogs-group:aws/batch/job"]
874
+ packages : Dict[str, str], default: {}
875
+ Packages to use for this step. The key is the name of the package
876
+ and the value is the version to use.
877
+ python : str, optional, default: None
878
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
879
+ that the version used will correspond to the version of the Python interpreter used to start the run.
987
880
  """
988
881
  ...
989
882
 
990
883
  @typing.overload
991
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
884
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
992
885
  ...
993
886
 
994
887
  @typing.overload
995
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
888
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
996
889
  ...
997
890
 
998
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
891
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
999
892
  """
1000
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
893
+ Specifies the PyPI packages for the step.
894
+
895
+ Information in this decorator will augment any
896
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
897
+ you can use `@pypi_base` to set packages required by all
898
+ steps and use `@pypi` to specify step-specific overrides.
1001
899
 
1002
900
  Parameters
1003
901
  ----------
1004
- cpu : int, default 1
1005
- Number of CPUs required for this step. If `@resources` is
1006
- also present, the maximum value from all decorators is used.
1007
- gpu : int, default 0
1008
- Number of GPUs required for this step. If `@resources` is
1009
- also present, the maximum value from all decorators is used.
1010
- memory : int, default 4096
1011
- Memory size (in MB) required for this step. If
1012
- `@resources` is also present, the maximum value from all decorators is
1013
- used.
1014
- image : str, optional, default None
1015
- Docker image to use when launching on AWS Batch. If not specified, and
1016
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1017
- not, a default Docker image mapping to the current version of Python is used.
1018
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1019
- AWS Batch Job Queue to submit the job to.
1020
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1021
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1022
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1023
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1024
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1025
- shared_memory : int, optional, default None
1026
- The value for the size (in MiB) of the /dev/shm volume for this step.
1027
- This parameter maps to the `--shm-size` option in Docker.
1028
- max_swap : int, optional, default None
1029
- The total amount of swap memory (in MiB) a container can use for this
1030
- step. This parameter is translated to the `--memory-swap` option in
1031
- Docker where the value is the sum of the container memory plus the
1032
- `max_swap` value.
1033
- swappiness : int, optional, default None
1034
- This allows you to tune memory swappiness behavior for this step.
1035
- A swappiness value of 0 causes swapping not to happen unless absolutely
1036
- necessary. A swappiness value of 100 causes pages to be swapped very
1037
- aggressively. Accepted values are whole numbers between 0 and 100.
1038
- use_tmpfs : bool, default False
1039
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1040
- not available on Fargate compute environments
1041
- tmpfs_tempdir : bool, default True
1042
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1043
- tmpfs_size : int, optional, default None
1044
- The value for the size (in MiB) of the tmpfs mount for this step.
1045
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1046
- memory allocated for this step.
1047
- tmpfs_path : str, optional, default None
1048
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1049
- inferentia : int, default 0
1050
- Number of Inferentia chips required for this step.
1051
- trainium : int, default None
1052
- Alias for inferentia. Use only one of the two.
1053
- efa : int, default 0
1054
- Number of elastic fabric adapter network devices to attach to container
1055
- ephemeral_storage: int, default None
1056
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1057
- This is only relevant for Fargate compute environments
1058
- log_driver: str, optional, default None
1059
- The log driver to use for the Amazon ECS container.
1060
- log_options: List[str], optional, default None
1061
- List of strings containing options for the chosen log driver. The configurable values
1062
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1063
- Example usage: ["awslogs-group:aws/batch/job"]
902
+ packages : Dict[str, str], default: {}
903
+ Packages to use for this step. The key is the name of the package
904
+ and the value is the version to use.
905
+ python : str, optional, default: None
906
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
907
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1064
908
  """
1065
909
  ...
1066
910
 
1067
911
  @typing.overload
1068
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
912
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1069
913
  """
1070
- Specifies the PyPI packages for the step.
914
+ Specifies the Conda environment for the step.
1071
915
 
1072
916
  Information in this decorator will augment any
1073
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1074
- you can use `@pypi_base` to set packages required by all
1075
- steps and use `@pypi` to specify step-specific overrides.
917
+ attributes set in the `@conda_base` flow-level decorator. Hence,
918
+ you can use `@conda_base` to set packages required by all
919
+ steps and use `@conda` to specify step-specific overrides.
1076
920
 
1077
921
  Parameters
1078
922
  ----------
1079
- packages : Dict[str, str], default: {}
923
+ packages : Dict[str, str], default {}
1080
924
  Packages to use for this step. The key is the name of the package
1081
925
  and the value is the version to use.
1082
- python : str, optional, default: None
926
+ libraries : Dict[str, str], default {}
927
+ Supported for backward compatibility. When used with packages, packages will take precedence.
928
+ python : str, optional, default None
1083
929
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1084
930
  that the version used will correspond to the version of the Python interpreter used to start the run.
931
+ disabled : bool, default False
932
+ If set to True, disables @conda.
1085
933
  """
1086
934
  ...
1087
935
 
1088
936
  @typing.overload
1089
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
937
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1090
938
  ...
1091
939
 
1092
940
  @typing.overload
1093
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
941
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1094
942
  ...
1095
943
 
1096
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
944
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1097
945
  """
1098
- Specifies the PyPI packages for the step.
946
+ Specifies the Conda environment for the step.
1099
947
 
1100
948
  Information in this decorator will augment any
1101
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1102
- you can use `@pypi_base` to set packages required by all
1103
- steps and use `@pypi` to specify step-specific overrides.
949
+ attributes set in the `@conda_base` flow-level decorator. Hence,
950
+ you can use `@conda_base` to set packages required by all
951
+ steps and use `@conda` to specify step-specific overrides.
1104
952
 
1105
953
  Parameters
1106
954
  ----------
1107
- packages : Dict[str, str], default: {}
955
+ packages : Dict[str, str], default {}
1108
956
  Packages to use for this step. The key is the name of the package
1109
957
  and the value is the version to use.
1110
- python : str, optional, default: None
958
+ libraries : Dict[str, str], default {}
959
+ Supported for backward compatibility. When used with packages, packages will take precedence.
960
+ python : str, optional, default None
1111
961
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1112
962
  that the version used will correspond to the version of the Python interpreter used to start the run.
963
+ disabled : bool, default False
964
+ If set to True, disables @conda.
965
+ """
966
+ ...
967
+
968
+ @typing.overload
969
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
970
+ """
971
+ Specifies that the step will success under all circumstances.
972
+
973
+ The decorator will create an optional artifact, specified by `var`, which
974
+ contains the exception raised. You can use it to detect the presence
975
+ of errors, indicating that all happy-path artifacts produced by the step
976
+ are missing.
977
+
978
+ Parameters
979
+ ----------
980
+ var : str, optional, default None
981
+ Name of the artifact in which to store the caught exception.
982
+ If not specified, the exception is not stored.
983
+ print_exception : bool, default True
984
+ Determines whether or not the exception is printed to
985
+ stdout when caught.
986
+ """
987
+ ...
988
+
989
+ @typing.overload
990
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
991
+ ...
992
+
993
+ @typing.overload
994
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
995
+ ...
996
+
997
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
998
+ """
999
+ Specifies that the step will success under all circumstances.
1000
+
1001
+ The decorator will create an optional artifact, specified by `var`, which
1002
+ contains the exception raised. You can use it to detect the presence
1003
+ of errors, indicating that all happy-path artifacts produced by the step
1004
+ are missing.
1005
+
1006
+ Parameters
1007
+ ----------
1008
+ var : str, optional, default None
1009
+ Name of the artifact in which to store the caught exception.
1010
+ If not specified, the exception is not stored.
1011
+ print_exception : bool, default True
1012
+ Determines whether or not the exception is printed to
1013
+ stdout when caught.
1113
1014
  """
1114
1015
  ...
1115
1016
 
@@ -1129,93 +1030,275 @@ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callabl
1129
1030
 
1130
1031
  Parameters
1131
1032
  ----------
1132
- times : int, default 3
1133
- Number of times to retry this task.
1134
- minutes_between_retries : int, default 2
1135
- Number of minutes between retries.
1033
+ times : int, default 3
1034
+ Number of times to retry this task.
1035
+ minutes_between_retries : int, default 2
1036
+ Number of minutes between retries.
1037
+ """
1038
+ ...
1039
+
1040
+ @typing.overload
1041
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1042
+ ...
1043
+
1044
+ @typing.overload
1045
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1046
+ ...
1047
+
1048
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1049
+ """
1050
+ Specifies the number of times the task corresponding
1051
+ to a step needs to be retried.
1052
+
1053
+ This decorator is useful for handling transient errors, such as networking issues.
1054
+ If your task contains operations that can't be retried safely, e.g. database updates,
1055
+ it is advisable to annotate it with `@retry(times=0)`.
1056
+
1057
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1058
+ decorator will execute a no-op task after all retries have been exhausted,
1059
+ ensuring that the flow execution can continue.
1060
+
1061
+ Parameters
1062
+ ----------
1063
+ times : int, default 3
1064
+ Number of times to retry this task.
1065
+ minutes_between_retries : int, default 2
1066
+ Number of minutes between retries.
1067
+ """
1068
+ ...
1069
+
1070
+ @typing.overload
1071
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1072
+ """
1073
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1074
+
1075
+ Parameters
1076
+ ----------
1077
+ cpu : int, default 1
1078
+ Number of CPUs required for this step. If `@resources` is
1079
+ also present, the maximum value from all decorators is used.
1080
+ gpu : int, default 0
1081
+ Number of GPUs required for this step. If `@resources` is
1082
+ also present, the maximum value from all decorators is used.
1083
+ memory : int, default 4096
1084
+ Memory size (in MB) required for this step. If
1085
+ `@resources` is also present, the maximum value from all decorators is
1086
+ used.
1087
+ image : str, optional, default None
1088
+ Docker image to use when launching on AWS Batch. If not specified, and
1089
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1090
+ not, a default Docker image mapping to the current version of Python is used.
1091
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1092
+ AWS Batch Job Queue to submit the job to.
1093
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1094
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1095
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1096
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1097
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1098
+ shared_memory : int, optional, default None
1099
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1100
+ This parameter maps to the `--shm-size` option in Docker.
1101
+ max_swap : int, optional, default None
1102
+ The total amount of swap memory (in MiB) a container can use for this
1103
+ step. This parameter is translated to the `--memory-swap` option in
1104
+ Docker where the value is the sum of the container memory plus the
1105
+ `max_swap` value.
1106
+ swappiness : int, optional, default None
1107
+ This allows you to tune memory swappiness behavior for this step.
1108
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1109
+ necessary. A swappiness value of 100 causes pages to be swapped very
1110
+ aggressively. Accepted values are whole numbers between 0 and 100.
1111
+ use_tmpfs : bool, default False
1112
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1113
+ not available on Fargate compute environments
1114
+ tmpfs_tempdir : bool, default True
1115
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1116
+ tmpfs_size : int, optional, default None
1117
+ The value for the size (in MiB) of the tmpfs mount for this step.
1118
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1119
+ memory allocated for this step.
1120
+ tmpfs_path : str, optional, default None
1121
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1122
+ inferentia : int, default 0
1123
+ Number of Inferentia chips required for this step.
1124
+ trainium : int, default None
1125
+ Alias for inferentia. Use only one of the two.
1126
+ efa : int, default 0
1127
+ Number of elastic fabric adapter network devices to attach to container
1128
+ ephemeral_storage: int, default None
1129
+ The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1130
+ This is only relevant for Fargate compute environments
1131
+ log_driver: str, optional, default None
1132
+ The log driver to use for the Amazon ECS container.
1133
+ log_options: List[str], optional, default None
1134
+ List of strings containing options for the chosen log driver. The configurable values
1135
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1136
+ Example usage: ["awslogs-group:aws/batch/job"]
1137
+ """
1138
+ ...
1139
+
1140
+ @typing.overload
1141
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1142
+ ...
1143
+
1144
+ @typing.overload
1145
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1146
+ ...
1147
+
1148
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1149
+ """
1150
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1151
+
1152
+ Parameters
1153
+ ----------
1154
+ cpu : int, default 1
1155
+ Number of CPUs required for this step. If `@resources` is
1156
+ also present, the maximum value from all decorators is used.
1157
+ gpu : int, default 0
1158
+ Number of GPUs required for this step. If `@resources` is
1159
+ also present, the maximum value from all decorators is used.
1160
+ memory : int, default 4096
1161
+ Memory size (in MB) required for this step. If
1162
+ `@resources` is also present, the maximum value from all decorators is
1163
+ used.
1164
+ image : str, optional, default None
1165
+ Docker image to use when launching on AWS Batch. If not specified, and
1166
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1167
+ not, a default Docker image mapping to the current version of Python is used.
1168
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1169
+ AWS Batch Job Queue to submit the job to.
1170
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1171
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1172
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1173
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1174
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1175
+ shared_memory : int, optional, default None
1176
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1177
+ This parameter maps to the `--shm-size` option in Docker.
1178
+ max_swap : int, optional, default None
1179
+ The total amount of swap memory (in MiB) a container can use for this
1180
+ step. This parameter is translated to the `--memory-swap` option in
1181
+ Docker where the value is the sum of the container memory plus the
1182
+ `max_swap` value.
1183
+ swappiness : int, optional, default None
1184
+ This allows you to tune memory swappiness behavior for this step.
1185
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1186
+ necessary. A swappiness value of 100 causes pages to be swapped very
1187
+ aggressively. Accepted values are whole numbers between 0 and 100.
1188
+ use_tmpfs : bool, default False
1189
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1190
+ not available on Fargate compute environments
1191
+ tmpfs_tempdir : bool, default True
1192
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1193
+ tmpfs_size : int, optional, default None
1194
+ The value for the size (in MiB) of the tmpfs mount for this step.
1195
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1196
+ memory allocated for this step.
1197
+ tmpfs_path : str, optional, default None
1198
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1199
+ inferentia : int, default 0
1200
+ Number of Inferentia chips required for this step.
1201
+ trainium : int, default None
1202
+ Alias for inferentia. Use only one of the two.
1203
+ efa : int, default 0
1204
+ Number of elastic fabric adapter network devices to attach to container
1205
+ ephemeral_storage: int, default None
1206
+ The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1207
+ This is only relevant for Fargate compute environments
1208
+ log_driver: str, optional, default None
1209
+ The log driver to use for the Amazon ECS container.
1210
+ log_options: List[str], optional, default None
1211
+ List of strings containing options for the chosen log driver. The configurable values
1212
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1213
+ Example usage: ["awslogs-group:aws/batch/job"]
1214
+ """
1215
+ ...
1216
+
1217
+ @typing.overload
1218
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1219
+ """
1220
+ Specifies secrets to be retrieved and injected as environment variables prior to
1221
+ the execution of a step.
1222
+
1223
+ Parameters
1224
+ ----------
1225
+ sources : List[Union[str, Dict[str, Any]]], default: []
1226
+ List of secret specs, defining how the secrets are to be retrieved
1136
1227
  """
1137
1228
  ...
1138
1229
 
1139
1230
  @typing.overload
1140
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1231
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1141
1232
  ...
1142
1233
 
1143
1234
  @typing.overload
1144
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1235
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1145
1236
  ...
1146
1237
 
1147
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1238
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1148
1239
  """
1149
- Specifies the number of times the task corresponding
1150
- to a step needs to be retried.
1151
-
1152
- This decorator is useful for handling transient errors, such as networking issues.
1153
- If your task contains operations that can't be retried safely, e.g. database updates,
1154
- it is advisable to annotate it with `@retry(times=0)`.
1155
-
1156
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1157
- decorator will execute a no-op task after all retries have been exhausted,
1158
- ensuring that the flow execution can continue.
1240
+ Specifies secrets to be retrieved and injected as environment variables prior to
1241
+ the execution of a step.
1159
1242
 
1160
1243
  Parameters
1161
1244
  ----------
1162
- times : int, default 3
1163
- Number of times to retry this task.
1164
- minutes_between_retries : int, default 2
1165
- Number of minutes between retries.
1245
+ sources : List[Union[str, Dict[str, Any]]], default: []
1246
+ List of secret specs, defining how the secrets are to be retrieved
1166
1247
  """
1167
1248
  ...
1168
1249
 
1169
1250
  @typing.overload
1170
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1251
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1171
1252
  """
1172
- Specifies that the step will success under all circumstances.
1253
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1173
1254
 
1174
- The decorator will create an optional artifact, specified by `var`, which
1175
- contains the exception raised. You can use it to detect the presence
1176
- of errors, indicating that all happy-path artifacts produced by the step
1177
- are missing.
1255
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1178
1256
 
1179
1257
  Parameters
1180
1258
  ----------
1181
- var : str, optional, default None
1182
- Name of the artifact in which to store the caught exception.
1183
- If not specified, the exception is not stored.
1184
- print_exception : bool, default True
1185
- Determines whether or not the exception is printed to
1186
- stdout when caught.
1259
+ type : str, default 'default'
1260
+ Card type.
1261
+ id : str, optional, default None
1262
+ If multiple cards are present, use this id to identify this card.
1263
+ options : Dict[str, Any], default {}
1264
+ Options passed to the card. The contents depend on the card type.
1265
+ timeout : int, default 45
1266
+ Interrupt reporting if it takes more than this many seconds.
1267
+
1268
+
1187
1269
  """
1188
1270
  ...
1189
1271
 
1190
1272
  @typing.overload
1191
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1273
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1192
1274
  ...
1193
1275
 
1194
1276
  @typing.overload
1195
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1277
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1196
1278
  ...
1197
1279
 
1198
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1280
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1199
1281
  """
1200
- Specifies that the step will success under all circumstances.
1282
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1201
1283
 
1202
- The decorator will create an optional artifact, specified by `var`, which
1203
- contains the exception raised. You can use it to detect the presence
1204
- of errors, indicating that all happy-path artifacts produced by the step
1205
- are missing.
1284
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1206
1285
 
1207
1286
  Parameters
1208
1287
  ----------
1209
- var : str, optional, default None
1210
- Name of the artifact in which to store the caught exception.
1211
- If not specified, the exception is not stored.
1212
- print_exception : bool, default True
1213
- Determines whether or not the exception is printed to
1214
- stdout when caught.
1288
+ type : str, default 'default'
1289
+ Card type.
1290
+ id : str, optional, default None
1291
+ If multiple cards are present, use this id to identify this card.
1292
+ options : Dict[str, Any], default {}
1293
+ Options passed to the card. The contents depend on the card type.
1294
+ timeout : int, default 45
1295
+ Interrupt reporting if it takes more than this many seconds.
1296
+
1297
+
1215
1298
  """
1216
1299
  ...
1217
1300
 
1218
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1301
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1219
1302
  """
1220
1303
  Specifies that this step should execute on Kubernetes.
1221
1304
 
@@ -1269,138 +1352,39 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1269
1352
  volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1270
1353
  shared_memory: int, optional
1271
1354
  Shared memory size (in MiB) required for this step
1355
+ port: int, optional
1356
+ Port number to specify in the Kubernetes job object
1272
1357
  """
1273
1358
  ...
1274
1359
 
1275
1360
  @typing.overload
1276
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1277
- """
1278
- Specifies the Conda environment for the step.
1279
-
1280
- Information in this decorator will augment any
1281
- attributes set in the `@conda_base` flow-level decorator. Hence,
1282
- you can use `@conda_base` to set packages required by all
1283
- steps and use `@conda` to specify step-specific overrides.
1284
-
1285
- Parameters
1286
- ----------
1287
- packages : Dict[str, str], default {}
1288
- Packages to use for this step. The key is the name of the package
1289
- and the value is the version to use.
1290
- libraries : Dict[str, str], default {}
1291
- Supported for backward compatibility. When used with packages, packages will take precedence.
1292
- python : str, optional, default None
1293
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1294
- that the version used will correspond to the version of the Python interpreter used to start the run.
1295
- disabled : bool, default False
1296
- If set to True, disables @conda.
1297
- """
1298
- ...
1299
-
1300
- @typing.overload
1301
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1302
- ...
1303
-
1304
- @typing.overload
1305
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1306
- ...
1307
-
1308
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1309
- """
1310
- Specifies the Conda environment for the step.
1311
-
1312
- Information in this decorator will augment any
1313
- attributes set in the `@conda_base` flow-level decorator. Hence,
1314
- you can use `@conda_base` to set packages required by all
1315
- steps and use `@conda` to specify step-specific overrides.
1316
-
1317
- Parameters
1318
- ----------
1319
- packages : Dict[str, str], default {}
1320
- Packages to use for this step. The key is the name of the package
1321
- and the value is the version to use.
1322
- libraries : Dict[str, str], default {}
1323
- Supported for backward compatibility. When used with packages, packages will take precedence.
1324
- python : str, optional, default None
1325
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1326
- that the version used will correspond to the version of the Python interpreter used to start the run.
1327
- disabled : bool, default False
1328
- If set to True, disables @conda.
1329
- """
1330
- ...
1331
-
1332
- @typing.overload
1333
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1361
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1334
1362
  """
1335
- Specifies a timeout for your step.
1336
-
1337
- This decorator is useful if this step may hang indefinitely.
1338
-
1339
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1340
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1341
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1342
-
1343
- Note that all the values specified in parameters are added together so if you specify
1344
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1363
+ Specifies environment variables to be set prior to the execution of a step.
1345
1364
 
1346
1365
  Parameters
1347
1366
  ----------
1348
- seconds : int, default 0
1349
- Number of seconds to wait prior to timing out.
1350
- minutes : int, default 0
1351
- Number of minutes to wait prior to timing out.
1352
- hours : int, default 0
1353
- Number of hours to wait prior to timing out.
1367
+ vars : Dict[str, str], default {}
1368
+ Dictionary of environment variables to set.
1354
1369
  """
1355
1370
  ...
1356
1371
 
1357
1372
  @typing.overload
1358
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1373
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1359
1374
  ...
1360
1375
 
1361
1376
  @typing.overload
1362
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1363
- ...
1364
-
1365
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1366
- """
1367
- Specifies a timeout for your step.
1368
-
1369
- This decorator is useful if this step may hang indefinitely.
1370
-
1371
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1372
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1373
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1374
-
1375
- Note that all the values specified in parameters are added together so if you specify
1376
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1377
-
1378
- Parameters
1379
- ----------
1380
- seconds : int, default 0
1381
- Number of seconds to wait prior to timing out.
1382
- minutes : int, default 0
1383
- Number of minutes to wait prior to timing out.
1384
- hours : int, default 0
1385
- Number of hours to wait prior to timing out.
1386
- """
1377
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1387
1378
  ...
1388
1379
 
1389
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1380
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1390
1381
  """
1391
- Specifies what flows belong to the same project.
1392
-
1393
- A project-specific namespace is created for all flows that
1394
- use the same `@project(name)`.
1382
+ Specifies environment variables to be set prior to the execution of a step.
1395
1383
 
1396
1384
  Parameters
1397
1385
  ----------
1398
- name : str
1399
- Project name. Make sure that the name is unique amongst all
1400
- projects that use the same production scheduler. The name may
1401
- contain only lowercase alphanumeric characters and underscores.
1402
-
1403
-
1386
+ vars : Dict[str, str], default {}
1387
+ Dictionary of environment variables to set.
1404
1388
  """
1405
1389
  ...
1406
1390
 
@@ -1489,143 +1473,80 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1489
1473
  Parameters
1490
1474
  ----------
1491
1475
  event : Union[str, Dict[str, Any]], optional, default None
1492
- Event dependency for this flow.
1493
- events : List[Union[str, Dict[str, Any]]], default []
1494
- Events dependency for this flow.
1495
- options : Dict[str, Any], default {}
1496
- Backend-specific configuration for tuning eventing behavior.
1497
-
1498
-
1499
- """
1500
- ...
1501
-
1502
- @typing.overload
1503
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1504
- """
1505
- Specifies the PyPI packages for all steps of the flow.
1506
-
1507
- Use `@pypi_base` to set common packages required by all
1508
- steps and use `@pypi` to specify step-specific overrides.
1509
- Parameters
1510
- ----------
1511
- packages : Dict[str, str], default: {}
1512
- Packages to use for this flow. The key is the name of the package
1513
- and the value is the version to use.
1514
- python : str, optional, default: None
1515
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1516
- that the version used will correspond to the version of the Python interpreter used to start the run.
1517
- """
1518
- ...
1519
-
1520
- @typing.overload
1521
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1522
- ...
1523
-
1524
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1525
- """
1526
- Specifies the PyPI packages for all steps of the flow.
1527
-
1528
- Use `@pypi_base` to set common packages required by all
1529
- steps and use `@pypi` to specify step-specific overrides.
1530
- Parameters
1531
- ----------
1532
- packages : Dict[str, str], default: {}
1533
- Packages to use for this flow. The key is the name of the package
1534
- and the value is the version to use.
1535
- python : str, optional, default: None
1536
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1537
- that the version used will correspond to the version of the Python interpreter used to start the run.
1476
+ Event dependency for this flow.
1477
+ events : List[Union[str, Dict[str, Any]]], default []
1478
+ Events dependency for this flow.
1479
+ options : Dict[str, Any], default {}
1480
+ Backend-specific configuration for tuning eventing behavior.
1481
+
1482
+
1538
1483
  """
1539
1484
  ...
1540
1485
 
1541
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1486
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1542
1487
  """
1543
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1544
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1545
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1546
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1547
- starts only after all sensors finish.
1488
+ Specifies what flows belong to the same project.
1489
+
1490
+ A project-specific namespace is created for all flows that
1491
+ use the same `@project(name)`.
1548
1492
 
1549
1493
  Parameters
1550
1494
  ----------
1551
- timeout : int
1552
- Time, in seconds before the task times out and fails. (Default: 3600)
1553
- poke_interval : int
1554
- Time in seconds that the job should wait in between each try. (Default: 60)
1555
- mode : str
1556
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1557
- exponential_backoff : bool
1558
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1559
- pool : str
1560
- the slot pool this task should run in,
1561
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1562
- soft_fail : bool
1563
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1564
1495
  name : str
1565
- Name of the sensor on Airflow
1566
- description : str
1567
- Description of sensor in the Airflow UI
1568
- bucket_key : Union[str, List[str]]
1569
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1570
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1571
- bucket_name : str
1572
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1573
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1574
- wildcard_match : bool
1575
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1576
- aws_conn_id : str
1577
- a reference to the s3 connection on Airflow. (Default: None)
1578
- verify : bool
1579
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1496
+ Project name. Make sure that the name is unique amongst all
1497
+ projects that use the same production scheduler. The name may
1498
+ contain only lowercase alphanumeric characters and underscores.
1499
+
1500
+
1580
1501
  """
1581
1502
  ...
1582
1503
 
1583
1504
  @typing.overload
1584
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1505
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1585
1506
  """
1586
- Specifies the Conda environment for all steps of the flow.
1587
-
1588
- Use `@conda_base` to set common libraries required by all
1589
- steps and use `@conda` to specify step-specific additions.
1507
+ Specifies the times when the flow should be run when running on a
1508
+ production scheduler.
1590
1509
 
1591
1510
  Parameters
1592
1511
  ----------
1593
- packages : Dict[str, str], default {}
1594
- Packages to use for this flow. The key is the name of the package
1595
- and the value is the version to use.
1596
- libraries : Dict[str, str], default {}
1597
- Supported for backward compatibility. When used with packages, packages will take precedence.
1598
- python : str, optional, default None
1599
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1600
- that the version used will correspond to the version of the Python interpreter used to start the run.
1601
- disabled : bool, default False
1602
- If set to True, disables Conda.
1512
+ hourly : bool, default False
1513
+ Run the workflow hourly.
1514
+ daily : bool, default True
1515
+ Run the workflow daily.
1516
+ weekly : bool, default False
1517
+ Run the workflow weekly.
1518
+ cron : str, optional, default None
1519
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1520
+ specified by this expression.
1521
+ timezone : str, optional, default None
1522
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1523
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1603
1524
  """
1604
1525
  ...
1605
1526
 
1606
1527
  @typing.overload
1607
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1528
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1608
1529
  ...
1609
1530
 
1610
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1531
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1611
1532
  """
1612
- Specifies the Conda environment for all steps of the flow.
1613
-
1614
- Use `@conda_base` to set common libraries required by all
1615
- steps and use `@conda` to specify step-specific additions.
1533
+ Specifies the times when the flow should be run when running on a
1534
+ production scheduler.
1616
1535
 
1617
1536
  Parameters
1618
1537
  ----------
1619
- packages : Dict[str, str], default {}
1620
- Packages to use for this flow. The key is the name of the package
1621
- and the value is the version to use.
1622
- libraries : Dict[str, str], default {}
1623
- Supported for backward compatibility. When used with packages, packages will take precedence.
1624
- python : str, optional, default None
1625
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1626
- that the version used will correspond to the version of the Python interpreter used to start the run.
1627
- disabled : bool, default False
1628
- If set to True, disables Conda.
1538
+ hourly : bool, default False
1539
+ Run the workflow hourly.
1540
+ daily : bool, default True
1541
+ Run the workflow daily.
1542
+ weekly : bool, default False
1543
+ Run the workflow weekly.
1544
+ cron : str, optional, default None
1545
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1546
+ specified by this expression.
1547
+ timezone : str, optional, default None
1548
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1549
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1629
1550
  """
1630
1551
  ...
1631
1552
 
@@ -1733,51 +1654,90 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1733
1654
  ...
1734
1655
 
1735
1656
  @typing.overload
1736
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1657
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1737
1658
  """
1738
- Specifies the times when the flow should be run when running on a
1739
- production scheduler.
1659
+ Specifies the Conda environment for all steps of the flow.
1660
+
1661
+ Use `@conda_base` to set common libraries required by all
1662
+ steps and use `@conda` to specify step-specific additions.
1740
1663
 
1741
1664
  Parameters
1742
1665
  ----------
1743
- hourly : bool, default False
1744
- Run the workflow hourly.
1745
- daily : bool, default True
1746
- Run the workflow daily.
1747
- weekly : bool, default False
1748
- Run the workflow weekly.
1749
- cron : str, optional, default None
1750
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1751
- specified by this expression.
1752
- timezone : str, optional, default None
1753
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1754
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1666
+ packages : Dict[str, str], default {}
1667
+ Packages to use for this flow. The key is the name of the package
1668
+ and the value is the version to use.
1669
+ libraries : Dict[str, str], default {}
1670
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1671
+ python : str, optional, default None
1672
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1673
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1674
+ disabled : bool, default False
1675
+ If set to True, disables Conda.
1755
1676
  """
1756
1677
  ...
1757
1678
 
1758
1679
  @typing.overload
1759
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1680
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1760
1681
  ...
1761
1682
 
1762
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1683
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1763
1684
  """
1764
- Specifies the times when the flow should be run when running on a
1765
- production scheduler.
1685
+ Specifies the Conda environment for all steps of the flow.
1686
+
1687
+ Use `@conda_base` to set common libraries required by all
1688
+ steps and use `@conda` to specify step-specific additions.
1766
1689
 
1767
1690
  Parameters
1768
1691
  ----------
1769
- hourly : bool, default False
1770
- Run the workflow hourly.
1771
- daily : bool, default True
1772
- Run the workflow daily.
1773
- weekly : bool, default False
1774
- Run the workflow weekly.
1775
- cron : str, optional, default None
1776
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1777
- specified by this expression.
1778
- timezone : str, optional, default None
1779
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1780
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1692
+ packages : Dict[str, str], default {}
1693
+ Packages to use for this flow. The key is the name of the package
1694
+ and the value is the version to use.
1695
+ libraries : Dict[str, str], default {}
1696
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1697
+ python : str, optional, default None
1698
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1699
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1700
+ disabled : bool, default False
1701
+ If set to True, disables Conda.
1702
+ """
1703
+ ...
1704
+
1705
+ @typing.overload
1706
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1707
+ """
1708
+ Specifies the PyPI packages for all steps of the flow.
1709
+
1710
+ Use `@pypi_base` to set common packages required by all
1711
+ steps and use `@pypi` to specify step-specific overrides.
1712
+ Parameters
1713
+ ----------
1714
+ packages : Dict[str, str], default: {}
1715
+ Packages to use for this flow. The key is the name of the package
1716
+ and the value is the version to use.
1717
+ python : str, optional, default: None
1718
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1719
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1720
+ """
1721
+ ...
1722
+
1723
+ @typing.overload
1724
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1725
+ ...
1726
+
1727
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1728
+ """
1729
+ Specifies the PyPI packages for all steps of the flow.
1730
+
1731
+ Use `@pypi_base` to set common packages required by all
1732
+ steps and use `@pypi` to specify step-specific overrides.
1733
+ Parameters
1734
+ ----------
1735
+ packages : Dict[str, str], default: {}
1736
+ Packages to use for this flow. The key is the name of the package
1737
+ and the value is the version to use.
1738
+ python : str, optional, default: None
1739
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1740
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1781
1741
  """
1782
1742
  ...
1783
1743
 
@@ -1823,6 +1783,48 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1823
1783
  """
1824
1784
  ...
1825
1785
 
1786
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1787
+ """
1788
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1789
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1790
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1791
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1792
+ starts only after all sensors finish.
1793
+
1794
+ Parameters
1795
+ ----------
1796
+ timeout : int
1797
+ Time, in seconds before the task times out and fails. (Default: 3600)
1798
+ poke_interval : int
1799
+ Time in seconds that the job should wait in between each try. (Default: 60)
1800
+ mode : str
1801
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1802
+ exponential_backoff : bool
1803
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1804
+ pool : str
1805
+ the slot pool this task should run in,
1806
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1807
+ soft_fail : bool
1808
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1809
+ name : str
1810
+ Name of the sensor on Airflow
1811
+ description : str
1812
+ Description of sensor in the Airflow UI
1813
+ bucket_key : Union[str, List[str]]
1814
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1815
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1816
+ bucket_name : str
1817
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1818
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1819
+ wildcard_match : bool
1820
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1821
+ aws_conn_id : str
1822
+ a reference to the s3 connection on Airflow. (Default: None)
1823
+ verify : bool
1824
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1825
+ """
1826
+ ...
1827
+
1826
1828
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1827
1829
  """
1828
1830
  Switch namespace to the one provided.