metaflow-stubs 2.11.10__py2.py3-none-any.whl → 2.11.11__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. metaflow-stubs/__init__.pyi +495 -495
  2. metaflow-stubs/cards.pyi +4 -4
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +5 -5
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +3 -3
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +4 -2
  16. metaflow-stubs/metaflow_current.pyi +18 -18
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  59. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  60. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  61. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  62. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  63. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  64. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  67. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  76. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  80. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  81. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  82. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  83. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  84. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  86. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  87. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  88. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  89. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  90. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  91. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  92. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  93. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/frameworks/pytorch.pyi +4 -4
  95. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  102. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  103. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  105. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  106. metaflow-stubs/plugins/package_cli.pyi +2 -2
  107. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  109. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  112. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  114. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  115. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  118. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  119. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  121. metaflow-stubs/plugins/tag_cli.pyi +3 -3
  122. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  123. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  124. metaflow-stubs/procpoll.pyi +2 -2
  125. metaflow-stubs/pylint_wrapper.pyi +2 -2
  126. metaflow-stubs/tagging_util.pyi +2 -2
  127. metaflow-stubs/version.pyi +2 -2
  128. {metaflow_stubs-2.11.10.dist-info → metaflow_stubs-2.11.11.dist-info}/METADATA +2 -2
  129. metaflow_stubs-2.11.11.dist-info/RECORD +132 -0
  130. metaflow_stubs-2.11.10.dist-info/RECORD +0 -132
  131. {metaflow_stubs-2.11.10.dist-info → metaflow_stubs-2.11.11.dist-info}/WHEEL +0 -0
  132. {metaflow_stubs-2.11.10.dist-info → metaflow_stubs-2.11.11.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,23 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.11.10 #
4
- # Generated on 2024-04-12T11:08:34.072022 #
3
+ # MF version: 2.11.11 #
4
+ # Generated on 2024-05-02T22:04:13.800222 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.metaflow_current
12
- import io
13
11
  import metaflow.client.core
14
- import metaflow.parameters
15
12
  import datetime
16
- import metaflow.events
17
13
  import metaflow._vendor.click.types
18
14
  import typing
19
- import metaflow.datastore.inputs
20
15
  import metaflow.plugins.datatools.s3.s3
16
+ import metaflow.datastore.inputs
17
+ import metaflow.metaflow_current
18
+ import metaflow.parameters
19
+ import io
20
+ import metaflow.events
21
21
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
22
  StepFlag = typing.NewType("StepFlag", bool)
23
23
 
@@ -726,59 +726,198 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
726
726
  ...
727
727
 
728
728
  @typing.overload
729
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
729
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
730
  """
731
- Specifies a timeout for your step.
732
-
733
- This decorator is useful if this step may hang indefinitely.
734
-
735
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
736
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
737
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
731
+ Specifies the PyPI packages for the step.
738
732
 
739
- Note that all the values specified in parameters are added together so if you specify
740
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
733
+ Information in this decorator will augment any
734
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
735
+ you can use `@pypi_base` to set packages required by all
736
+ steps and use `@pypi` to specify step-specific overrides.
741
737
 
742
738
  Parameters
743
739
  ----------
744
- seconds : int, default 0
745
- Number of seconds to wait prior to timing out.
746
- minutes : int, default 0
747
- Number of minutes to wait prior to timing out.
748
- hours : int, default 0
749
- Number of hours to wait prior to timing out.
740
+ packages : Dict[str, str], default: {}
741
+ Packages to use for this step. The key is the name of the package
742
+ and the value is the version to use.
743
+ python : str, optional, default: None
744
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
745
+ that the version used will correspond to the version of the Python interpreter used to start the run.
750
746
  """
751
747
  ...
752
748
 
753
749
  @typing.overload
754
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
750
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
755
751
  ...
756
752
 
757
753
  @typing.overload
758
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
754
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
759
755
  ...
760
756
 
761
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
757
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
762
758
  """
763
- Specifies a timeout for your step.
759
+ Specifies the PyPI packages for the step.
764
760
 
765
- This decorator is useful if this step may hang indefinitely.
761
+ Information in this decorator will augment any
762
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
763
+ you can use `@pypi_base` to set packages required by all
764
+ steps and use `@pypi` to specify step-specific overrides.
766
765
 
767
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
768
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
769
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
766
+ Parameters
767
+ ----------
768
+ packages : Dict[str, str], default: {}
769
+ Packages to use for this step. The key is the name of the package
770
+ and the value is the version to use.
771
+ python : str, optional, default: None
772
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
773
+ that the version used will correspond to the version of the Python interpreter used to start the run.
774
+ """
775
+ ...
776
+
777
+ @typing.overload
778
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
779
+ """
780
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
770
781
 
771
- Note that all the values specified in parameters are added together so if you specify
772
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
782
+ Parameters
783
+ ----------
784
+ cpu : int, default 1
785
+ Number of CPUs required for this step. If `@resources` is
786
+ also present, the maximum value from all decorators is used.
787
+ gpu : int, default 0
788
+ Number of GPUs required for this step. If `@resources` is
789
+ also present, the maximum value from all decorators is used.
790
+ memory : int, default 4096
791
+ Memory size (in MB) required for this step. If
792
+ `@resources` is also present, the maximum value from all decorators is
793
+ used.
794
+ image : str, optional, default None
795
+ Docker image to use when launching on AWS Batch. If not specified, and
796
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
797
+ not, a default Docker image mapping to the current version of Python is used.
798
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
799
+ AWS Batch Job Queue to submit the job to.
800
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
801
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
802
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
803
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
804
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
805
+ shared_memory : int, optional, default None
806
+ The value for the size (in MiB) of the /dev/shm volume for this step.
807
+ This parameter maps to the `--shm-size` option in Docker.
808
+ max_swap : int, optional, default None
809
+ The total amount of swap memory (in MiB) a container can use for this
810
+ step. This parameter is translated to the `--memory-swap` option in
811
+ Docker where the value is the sum of the container memory plus the
812
+ `max_swap` value.
813
+ swappiness : int, optional, default None
814
+ This allows you to tune memory swappiness behavior for this step.
815
+ A swappiness value of 0 causes swapping not to happen unless absolutely
816
+ necessary. A swappiness value of 100 causes pages to be swapped very
817
+ aggressively. Accepted values are whole numbers between 0 and 100.
818
+ use_tmpfs : bool, default False
819
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
820
+ not available on Fargate compute environments
821
+ tmpfs_tempdir : bool, default True
822
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
823
+ tmpfs_size : int, optional, default None
824
+ The value for the size (in MiB) of the tmpfs mount for this step.
825
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
826
+ memory allocated for this step.
827
+ tmpfs_path : str, optional, default None
828
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
829
+ inferentia : int, default 0
830
+ Number of Inferentia chips required for this step.
831
+ trainium : int, default None
832
+ Alias for inferentia. Use only one of the two.
833
+ efa : int, default 0
834
+ Number of elastic fabric adapter network devices to attach to container
835
+ ephemeral_storage: int, default None
836
+ The total amount, in GiB, of ephemeral storage to set for the task (21-200)
837
+ This is only relevant for Fargate compute environments
838
+ log_driver: str, optional, default None
839
+ The log driver to use for the Amazon ECS container.
840
+ log_options: List[str], optional, default None
841
+ List of strings containing options for the chosen log driver. The configurable values
842
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
843
+ Example usage: ["awslogs-group:aws/batch/job"]
844
+ """
845
+ ...
846
+
847
+ @typing.overload
848
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
849
+ ...
850
+
851
+ @typing.overload
852
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
853
+ ...
854
+
855
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
856
+ """
857
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
773
858
 
774
859
  Parameters
775
860
  ----------
776
- seconds : int, default 0
777
- Number of seconds to wait prior to timing out.
778
- minutes : int, default 0
779
- Number of minutes to wait prior to timing out.
780
- hours : int, default 0
781
- Number of hours to wait prior to timing out.
861
+ cpu : int, default 1
862
+ Number of CPUs required for this step. If `@resources` is
863
+ also present, the maximum value from all decorators is used.
864
+ gpu : int, default 0
865
+ Number of GPUs required for this step. If `@resources` is
866
+ also present, the maximum value from all decorators is used.
867
+ memory : int, default 4096
868
+ Memory size (in MB) required for this step. If
869
+ `@resources` is also present, the maximum value from all decorators is
870
+ used.
871
+ image : str, optional, default None
872
+ Docker image to use when launching on AWS Batch. If not specified, and
873
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
874
+ not, a default Docker image mapping to the current version of Python is used.
875
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
876
+ AWS Batch Job Queue to submit the job to.
877
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
878
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
879
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
880
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
881
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
882
+ shared_memory : int, optional, default None
883
+ The value for the size (in MiB) of the /dev/shm volume for this step.
884
+ This parameter maps to the `--shm-size` option in Docker.
885
+ max_swap : int, optional, default None
886
+ The total amount of swap memory (in MiB) a container can use for this
887
+ step. This parameter is translated to the `--memory-swap` option in
888
+ Docker where the value is the sum of the container memory plus the
889
+ `max_swap` value.
890
+ swappiness : int, optional, default None
891
+ This allows you to tune memory swappiness behavior for this step.
892
+ A swappiness value of 0 causes swapping not to happen unless absolutely
893
+ necessary. A swappiness value of 100 causes pages to be swapped very
894
+ aggressively. Accepted values are whole numbers between 0 and 100.
895
+ use_tmpfs : bool, default False
896
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
897
+ not available on Fargate compute environments
898
+ tmpfs_tempdir : bool, default True
899
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
900
+ tmpfs_size : int, optional, default None
901
+ The value for the size (in MiB) of the tmpfs mount for this step.
902
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
903
+ memory allocated for this step.
904
+ tmpfs_path : str, optional, default None
905
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
906
+ inferentia : int, default 0
907
+ Number of Inferentia chips required for this step.
908
+ trainium : int, default None
909
+ Alias for inferentia. Use only one of the two.
910
+ efa : int, default 0
911
+ Number of elastic fabric adapter network devices to attach to container
912
+ ephemeral_storage: int, default None
913
+ The total amount, in GiB, of ephemeral storage to set for the task (21-200)
914
+ This is only relevant for Fargate compute environments
915
+ log_driver: str, optional, default None
916
+ The log driver to use for the Amazon ECS container.
917
+ log_options: List[str], optional, default None
918
+ List of strings containing options for the chosen log driver. The configurable values
919
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
920
+ Example usage: ["awslogs-group:aws/batch/job"]
782
921
  """
783
922
  ...
784
923
 
@@ -860,157 +999,110 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
860
999
  ...
861
1000
 
862
1001
  @typing.overload
863
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1002
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
864
1003
  """
865
- Specifies the PyPI packages for the step.
1004
+ Creates a human-readable report, a Metaflow Card, after this step completes.
866
1005
 
867
- Information in this decorator will augment any
868
- attributes set in the `@pyi_base` flow-level decorator. Hence,
869
- you can use `@pypi_base` to set packages required by all
870
- steps and use `@pypi` to specify step-specific overrides.
1006
+ Note that you may add multiple `@card` decorators in a step with different parameters.
871
1007
 
872
1008
  Parameters
873
1009
  ----------
874
- packages : Dict[str, str], default: {}
875
- Packages to use for this step. The key is the name of the package
876
- and the value is the version to use.
877
- python : str, optional, default: None
878
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
879
- that the version used will correspond to the version of the Python interpreter used to start the run.
1010
+ type : str, default 'default'
1011
+ Card type.
1012
+ id : str, optional, default None
1013
+ If multiple cards are present, use this id to identify this card.
1014
+ options : Dict[str, Any], default {}
1015
+ Options passed to the card. The contents depend on the card type.
1016
+ timeout : int, default 45
1017
+ Interrupt reporting if it takes more than this many seconds.
1018
+
1019
+
880
1020
  """
881
1021
  ...
882
1022
 
883
1023
  @typing.overload
884
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1024
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
885
1025
  ...
886
1026
 
887
1027
  @typing.overload
888
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1028
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
889
1029
  ...
890
1030
 
891
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1031
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
892
1032
  """
893
- Specifies the PyPI packages for the step.
1033
+ Creates a human-readable report, a Metaflow Card, after this step completes.
894
1034
 
895
- Information in this decorator will augment any
896
- attributes set in the `@pyi_base` flow-level decorator. Hence,
897
- you can use `@pypi_base` to set packages required by all
898
- steps and use `@pypi` to specify step-specific overrides.
1035
+ Note that you may add multiple `@card` decorators in a step with different parameters.
899
1036
 
900
1037
  Parameters
901
1038
  ----------
902
- packages : Dict[str, str], default: {}
903
- Packages to use for this step. The key is the name of the package
904
- and the value is the version to use.
905
- python : str, optional, default: None
906
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
907
- that the version used will correspond to the version of the Python interpreter used to start the run.
908
- """
909
- ...
910
-
911
- @typing.overload
912
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
913
- """
914
- Specifies the Conda environment for the step.
1039
+ type : str, default 'default'
1040
+ Card type.
1041
+ id : str, optional, default None
1042
+ If multiple cards are present, use this id to identify this card.
1043
+ options : Dict[str, Any], default {}
1044
+ Options passed to the card. The contents depend on the card type.
1045
+ timeout : int, default 45
1046
+ Interrupt reporting if it takes more than this many seconds.
915
1047
 
916
- Information in this decorator will augment any
917
- attributes set in the `@conda_base` flow-level decorator. Hence,
918
- you can use `@conda_base` to set packages required by all
919
- steps and use `@conda` to specify step-specific overrides.
920
1048
 
921
- Parameters
922
- ----------
923
- packages : Dict[str, str], default {}
924
- Packages to use for this step. The key is the name of the package
925
- and the value is the version to use.
926
- libraries : Dict[str, str], default {}
927
- Supported for backward compatibility. When used with packages, packages will take precedence.
928
- python : str, optional, default None
929
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
930
- that the version used will correspond to the version of the Python interpreter used to start the run.
931
- disabled : bool, default False
932
- If set to True, disables @conda.
933
1049
  """
934
1050
  ...
935
1051
 
936
1052
  @typing.overload
937
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
938
- ...
939
-
940
- @typing.overload
941
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
942
- ...
943
-
944
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1053
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
945
1054
  """
946
- Specifies the Conda environment for the step.
1055
+ Specifies a timeout for your step.
947
1056
 
948
- Information in this decorator will augment any
949
- attributes set in the `@conda_base` flow-level decorator. Hence,
950
- you can use `@conda_base` to set packages required by all
951
- steps and use `@conda` to specify step-specific overrides.
1057
+ This decorator is useful if this step may hang indefinitely.
952
1058
 
953
- Parameters
954
- ----------
955
- packages : Dict[str, str], default {}
956
- Packages to use for this step. The key is the name of the package
957
- and the value is the version to use.
958
- libraries : Dict[str, str], default {}
959
- Supported for backward compatibility. When used with packages, packages will take precedence.
960
- python : str, optional, default None
961
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
962
- that the version used will correspond to the version of the Python interpreter used to start the run.
963
- disabled : bool, default False
964
- If set to True, disables @conda.
965
- """
966
- ...
967
-
968
- @typing.overload
969
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
970
- """
971
- Specifies that the step will success under all circumstances.
1059
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1060
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1061
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
972
1062
 
973
- The decorator will create an optional artifact, specified by `var`, which
974
- contains the exception raised. You can use it to detect the presence
975
- of errors, indicating that all happy-path artifacts produced by the step
976
- are missing.
1063
+ Note that all the values specified in parameters are added together so if you specify
1064
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
977
1065
 
978
1066
  Parameters
979
1067
  ----------
980
- var : str, optional, default None
981
- Name of the artifact in which to store the caught exception.
982
- If not specified, the exception is not stored.
983
- print_exception : bool, default True
984
- Determines whether or not the exception is printed to
985
- stdout when caught.
1068
+ seconds : int, default 0
1069
+ Number of seconds to wait prior to timing out.
1070
+ minutes : int, default 0
1071
+ Number of minutes to wait prior to timing out.
1072
+ hours : int, default 0
1073
+ Number of hours to wait prior to timing out.
986
1074
  """
987
1075
  ...
988
1076
 
989
1077
  @typing.overload
990
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1078
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
991
1079
  ...
992
1080
 
993
1081
  @typing.overload
994
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1082
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
995
1083
  ...
996
1084
 
997
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1085
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
998
1086
  """
999
- Specifies that the step will success under all circumstances.
1087
+ Specifies a timeout for your step.
1000
1088
 
1001
- The decorator will create an optional artifact, specified by `var`, which
1002
- contains the exception raised. You can use it to detect the presence
1003
- of errors, indicating that all happy-path artifacts produced by the step
1004
- are missing.
1089
+ This decorator is useful if this step may hang indefinitely.
1090
+
1091
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1092
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1093
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1094
+
1095
+ Note that all the values specified in parameters are added together so if you specify
1096
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1005
1097
 
1006
1098
  Parameters
1007
1099
  ----------
1008
- var : str, optional, default None
1009
- Name of the artifact in which to store the caught exception.
1010
- If not specified, the exception is not stored.
1011
- print_exception : bool, default True
1012
- Determines whether or not the exception is printed to
1013
- stdout when caught.
1100
+ seconds : int, default 0
1101
+ Number of seconds to wait prior to timing out.
1102
+ minutes : int, default 0
1103
+ Number of minutes to wait prior to timing out.
1104
+ hours : int, default 0
1105
+ Number of hours to wait prior to timing out.
1014
1106
  """
1015
1107
  ...
1016
1108
 
@@ -1068,149 +1160,108 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1068
1160
  ...
1069
1161
 
1070
1162
  @typing.overload
1071
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1163
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1072
1164
  """
1073
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1165
+ Specifies that the step will success under all circumstances.
1166
+
1167
+ The decorator will create an optional artifact, specified by `var`, which
1168
+ contains the exception raised. You can use it to detect the presence
1169
+ of errors, indicating that all happy-path artifacts produced by the step
1170
+ are missing.
1074
1171
 
1075
1172
  Parameters
1076
1173
  ----------
1077
- cpu : int, default 1
1078
- Number of CPUs required for this step. If `@resources` is
1079
- also present, the maximum value from all decorators is used.
1080
- gpu : int, default 0
1081
- Number of GPUs required for this step. If `@resources` is
1082
- also present, the maximum value from all decorators is used.
1083
- memory : int, default 4096
1084
- Memory size (in MB) required for this step. If
1085
- `@resources` is also present, the maximum value from all decorators is
1086
- used.
1087
- image : str, optional, default None
1088
- Docker image to use when launching on AWS Batch. If not specified, and
1089
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1090
- not, a default Docker image mapping to the current version of Python is used.
1091
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1092
- AWS Batch Job Queue to submit the job to.
1093
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1094
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1095
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1096
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1097
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1098
- shared_memory : int, optional, default None
1099
- The value for the size (in MiB) of the /dev/shm volume for this step.
1100
- This parameter maps to the `--shm-size` option in Docker.
1101
- max_swap : int, optional, default None
1102
- The total amount of swap memory (in MiB) a container can use for this
1103
- step. This parameter is translated to the `--memory-swap` option in
1104
- Docker where the value is the sum of the container memory plus the
1105
- `max_swap` value.
1106
- swappiness : int, optional, default None
1107
- This allows you to tune memory swappiness behavior for this step.
1108
- A swappiness value of 0 causes swapping not to happen unless absolutely
1109
- necessary. A swappiness value of 100 causes pages to be swapped very
1110
- aggressively. Accepted values are whole numbers between 0 and 100.
1111
- use_tmpfs : bool, default False
1112
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1113
- not available on Fargate compute environments
1114
- tmpfs_tempdir : bool, default True
1115
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1116
- tmpfs_size : int, optional, default None
1117
- The value for the size (in MiB) of the tmpfs mount for this step.
1118
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1119
- memory allocated for this step.
1120
- tmpfs_path : str, optional, default None
1121
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1122
- inferentia : int, default 0
1123
- Number of Inferentia chips required for this step.
1124
- trainium : int, default None
1125
- Alias for inferentia. Use only one of the two.
1126
- efa : int, default 0
1127
- Number of elastic fabric adapter network devices to attach to container
1128
- ephemeral_storage: int, default None
1129
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1130
- This is only relevant for Fargate compute environments
1131
- log_driver: str, optional, default None
1132
- The log driver to use for the Amazon ECS container.
1133
- log_options: List[str], optional, default None
1134
- List of strings containing options for the chosen log driver. The configurable values
1135
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1136
- Example usage: ["awslogs-group:aws/batch/job"]
1174
+ var : str, optional, default None
1175
+ Name of the artifact in which to store the caught exception.
1176
+ If not specified, the exception is not stored.
1177
+ print_exception : bool, default True
1178
+ Determines whether or not the exception is printed to
1179
+ stdout when caught.
1137
1180
  """
1138
1181
  ...
1139
1182
 
1140
1183
  @typing.overload
1141
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1184
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1142
1185
  ...
1143
1186
 
1144
1187
  @typing.overload
1145
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1188
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1146
1189
  ...
1147
1190
 
1148
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1191
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1149
1192
  """
1150
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1193
+ Specifies that the step will success under all circumstances.
1194
+
1195
+ The decorator will create an optional artifact, specified by `var`, which
1196
+ contains the exception raised. You can use it to detect the presence
1197
+ of errors, indicating that all happy-path artifacts produced by the step
1198
+ are missing.
1151
1199
 
1152
1200
  Parameters
1153
1201
  ----------
1154
- cpu : int, default 1
1155
- Number of CPUs required for this step. If `@resources` is
1156
- also present, the maximum value from all decorators is used.
1157
- gpu : int, default 0
1158
- Number of GPUs required for this step. If `@resources` is
1159
- also present, the maximum value from all decorators is used.
1160
- memory : int, default 4096
1161
- Memory size (in MB) required for this step. If
1162
- `@resources` is also present, the maximum value from all decorators is
1163
- used.
1164
- image : str, optional, default None
1165
- Docker image to use when launching on AWS Batch. If not specified, and
1166
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1167
- not, a default Docker image mapping to the current version of Python is used.
1168
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1169
- AWS Batch Job Queue to submit the job to.
1170
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1171
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1172
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1173
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1174
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1175
- shared_memory : int, optional, default None
1176
- The value for the size (in MiB) of the /dev/shm volume for this step.
1177
- This parameter maps to the `--shm-size` option in Docker.
1178
- max_swap : int, optional, default None
1179
- The total amount of swap memory (in MiB) a container can use for this
1180
- step. This parameter is translated to the `--memory-swap` option in
1181
- Docker where the value is the sum of the container memory plus the
1182
- `max_swap` value.
1183
- swappiness : int, optional, default None
1184
- This allows you to tune memory swappiness behavior for this step.
1185
- A swappiness value of 0 causes swapping not to happen unless absolutely
1186
- necessary. A swappiness value of 100 causes pages to be swapped very
1187
- aggressively. Accepted values are whole numbers between 0 and 100.
1188
- use_tmpfs : bool, default False
1189
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1190
- not available on Fargate compute environments
1191
- tmpfs_tempdir : bool, default True
1192
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1193
- tmpfs_size : int, optional, default None
1194
- The value for the size (in MiB) of the tmpfs mount for this step.
1195
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1196
- memory allocated for this step.
1197
- tmpfs_path : str, optional, default None
1198
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1199
- inferentia : int, default 0
1200
- Number of Inferentia chips required for this step.
1201
- trainium : int, default None
1202
- Alias for inferentia. Use only one of the two.
1203
- efa : int, default 0
1204
- Number of elastic fabric adapter network devices to attach to container
1205
- ephemeral_storage: int, default None
1206
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1207
- This is only relevant for Fargate compute environments
1208
- log_driver: str, optional, default None
1209
- The log driver to use for the Amazon ECS container.
1210
- log_options: List[str], optional, default None
1211
- List of strings containing options for the chosen log driver. The configurable values
1212
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1213
- Example usage: ["awslogs-group:aws/batch/job"]
1202
+ var : str, optional, default None
1203
+ Name of the artifact in which to store the caught exception.
1204
+ If not specified, the exception is not stored.
1205
+ print_exception : bool, default True
1206
+ Determines whether or not the exception is printed to
1207
+ stdout when caught.
1208
+ """
1209
+ ...
1210
+
1211
+ @typing.overload
1212
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1213
+ """
1214
+ Specifies the Conda environment for the step.
1215
+
1216
+ Information in this decorator will augment any
1217
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1218
+ you can use `@conda_base` to set packages required by all
1219
+ steps and use `@conda` to specify step-specific overrides.
1220
+
1221
+ Parameters
1222
+ ----------
1223
+ packages : Dict[str, str], default {}
1224
+ Packages to use for this step. The key is the name of the package
1225
+ and the value is the version to use.
1226
+ libraries : Dict[str, str], default {}
1227
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1228
+ python : str, optional, default None
1229
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1230
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1231
+ disabled : bool, default False
1232
+ If set to True, disables @conda.
1233
+ """
1234
+ ...
1235
+
1236
+ @typing.overload
1237
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1238
+ ...
1239
+
1240
+ @typing.overload
1241
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1242
+ ...
1243
+
1244
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1245
+ """
1246
+ Specifies the Conda environment for the step.
1247
+
1248
+ Information in this decorator will augment any
1249
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1250
+ you can use `@conda_base` to set packages required by all
1251
+ steps and use `@conda` to specify step-specific overrides.
1252
+
1253
+ Parameters
1254
+ ----------
1255
+ packages : Dict[str, str], default {}
1256
+ Packages to use for this step. The key is the name of the package
1257
+ and the value is the version to use.
1258
+ libraries : Dict[str, str], default {}
1259
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1260
+ python : str, optional, default None
1261
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1262
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1263
+ disabled : bool, default False
1264
+ If set to True, disables @conda.
1214
1265
  """
1215
1266
  ...
1216
1267
 
@@ -1248,53 +1299,33 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1248
1299
  ...
1249
1300
 
1250
1301
  @typing.overload
1251
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1302
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1252
1303
  """
1253
- Creates a human-readable report, a Metaflow Card, after this step completes.
1254
-
1255
- Note that you may add multiple `@card` decorators in a step with different parameters.
1304
+ Specifies environment variables to be set prior to the execution of a step.
1256
1305
 
1257
1306
  Parameters
1258
1307
  ----------
1259
- type : str, default 'default'
1260
- Card type.
1261
- id : str, optional, default None
1262
- If multiple cards are present, use this id to identify this card.
1263
- options : Dict[str, Any], default {}
1264
- Options passed to the card. The contents depend on the card type.
1265
- timeout : int, default 45
1266
- Interrupt reporting if it takes more than this many seconds.
1267
-
1268
-
1308
+ vars : Dict[str, str], default {}
1309
+ Dictionary of environment variables to set.
1269
1310
  """
1270
1311
  ...
1271
1312
 
1272
1313
  @typing.overload
1273
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1314
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1274
1315
  ...
1275
1316
 
1276
1317
  @typing.overload
1277
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1318
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1278
1319
  ...
1279
1320
 
1280
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1321
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1281
1322
  """
1282
- Creates a human-readable report, a Metaflow Card, after this step completes.
1283
-
1284
- Note that you may add multiple `@card` decorators in a step with different parameters.
1323
+ Specifies environment variables to be set prior to the execution of a step.
1285
1324
 
1286
1325
  Parameters
1287
1326
  ----------
1288
- type : str, default 'default'
1289
- Card type.
1290
- id : str, optional, default None
1291
- If multiple cards are present, use this id to identify this card.
1292
- options : Dict[str, Any], default {}
1293
- Options passed to the card. The contents depend on the card type.
1294
- timeout : int, default 45
1295
- Interrupt reporting if it takes more than this many seconds.
1296
-
1297
-
1327
+ vars : Dict[str, str], default {}
1328
+ Dictionary of environment variables to set.
1298
1329
  """
1299
1330
  ...
1300
1331
 
@@ -1358,33 +1389,150 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1358
1389
  ...
1359
1390
 
1360
1391
  @typing.overload
1361
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1392
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1362
1393
  """
1363
- Specifies environment variables to be set prior to the execution of a step.
1394
+ Specifies the PyPI packages for all steps of the flow.
1395
+
1396
+ Use `@pypi_base` to set common packages required by all
1397
+ steps and use `@pypi` to specify step-specific overrides.
1398
+ Parameters
1399
+ ----------
1400
+ packages : Dict[str, str], default: {}
1401
+ Packages to use for this flow. The key is the name of the package
1402
+ and the value is the version to use.
1403
+ python : str, optional, default: None
1404
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1405
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1406
+ """
1407
+ ...
1408
+
1409
+ @typing.overload
1410
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1411
+ ...
1412
+
1413
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1414
+ """
1415
+ Specifies the PyPI packages for all steps of the flow.
1416
+
1417
+ Use `@pypi_base` to set common packages required by all
1418
+ steps and use `@pypi` to specify step-specific overrides.
1419
+ Parameters
1420
+ ----------
1421
+ packages : Dict[str, str], default: {}
1422
+ Packages to use for this flow. The key is the name of the package
1423
+ and the value is the version to use.
1424
+ python : str, optional, default: None
1425
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1426
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1427
+ """
1428
+ ...
1429
+
1430
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1431
+ """
1432
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1433
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1434
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1435
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1436
+ starts only after all sensors finish.
1437
+
1438
+ Parameters
1439
+ ----------
1440
+ timeout : int
1441
+ Time, in seconds before the task times out and fails. (Default: 3600)
1442
+ poke_interval : int
1443
+ Time in seconds that the job should wait in between each try. (Default: 60)
1444
+ mode : str
1445
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1446
+ exponential_backoff : bool
1447
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1448
+ pool : str
1449
+ the slot pool this task should run in,
1450
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1451
+ soft_fail : bool
1452
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1453
+ name : str
1454
+ Name of the sensor on Airflow
1455
+ description : str
1456
+ Description of sensor in the Airflow UI
1457
+ bucket_key : Union[str, List[str]]
1458
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1459
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1460
+ bucket_name : str
1461
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1462
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1463
+ wildcard_match : bool
1464
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1465
+ aws_conn_id : str
1466
+ a reference to the s3 connection on Airflow. (Default: None)
1467
+ verify : bool
1468
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1469
+ """
1470
+ ...
1471
+
1472
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1473
+ """
1474
+ Specifies what flows belong to the same project.
1475
+
1476
+ A project-specific namespace is created for all flows that
1477
+ use the same `@project(name)`.
1478
+
1479
+ Parameters
1480
+ ----------
1481
+ name : str
1482
+ Project name. Make sure that the name is unique amongst all
1483
+ projects that use the same production scheduler. The name may
1484
+ contain only lowercase alphanumeric characters and underscores.
1485
+
1486
+
1487
+ """
1488
+ ...
1489
+
1490
+ @typing.overload
1491
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1492
+ """
1493
+ Specifies the times when the flow should be run when running on a
1494
+ production scheduler.
1364
1495
 
1365
1496
  Parameters
1366
1497
  ----------
1367
- vars : Dict[str, str], default {}
1368
- Dictionary of environment variables to set.
1498
+ hourly : bool, default False
1499
+ Run the workflow hourly.
1500
+ daily : bool, default True
1501
+ Run the workflow daily.
1502
+ weekly : bool, default False
1503
+ Run the workflow weekly.
1504
+ cron : str, optional, default None
1505
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1506
+ specified by this expression.
1507
+ timezone : str, optional, default None
1508
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1509
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1369
1510
  """
1370
1511
  ...
1371
1512
 
1372
1513
  @typing.overload
1373
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1374
- ...
1375
-
1376
- @typing.overload
1377
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1514
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1378
1515
  ...
1379
1516
 
1380
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1517
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1381
1518
  """
1382
- Specifies environment variables to be set prior to the execution of a step.
1519
+ Specifies the times when the flow should be run when running on a
1520
+ production scheduler.
1383
1521
 
1384
1522
  Parameters
1385
1523
  ----------
1386
- vars : Dict[str, str], default {}
1387
- Dictionary of environment variables to set.
1524
+ hourly : bool, default False
1525
+ Run the workflow hourly.
1526
+ daily : bool, default True
1527
+ Run the workflow daily.
1528
+ weekly : bool, default False
1529
+ Run the workflow weekly.
1530
+ cron : str, optional, default None
1531
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1532
+ specified by this expression.
1533
+ timezone : str, optional, default None
1534
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1535
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1388
1536
  """
1389
1537
  ...
1390
1538
 
@@ -1483,70 +1631,45 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1483
1631
  """
1484
1632
  ...
1485
1633
 
1486
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1634
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1487
1635
  """
1488
- Specifies what flows belong to the same project.
1489
-
1490
- A project-specific namespace is created for all flows that
1491
- use the same `@project(name)`.
1636
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1637
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1492
1638
 
1493
1639
  Parameters
1494
1640
  ----------
1641
+ timeout : int
1642
+ Time, in seconds before the task times out and fails. (Default: 3600)
1643
+ poke_interval : int
1644
+ Time in seconds that the job should wait in between each try. (Default: 60)
1645
+ mode : str
1646
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1647
+ exponential_backoff : bool
1648
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1649
+ pool : str
1650
+ the slot pool this task should run in,
1651
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1652
+ soft_fail : bool
1653
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1495
1654
  name : str
1496
- Project name. Make sure that the name is unique amongst all
1497
- projects that use the same production scheduler. The name may
1498
- contain only lowercase alphanumeric characters and underscores.
1499
-
1500
-
1501
- """
1502
- ...
1503
-
1504
- @typing.overload
1505
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1506
- """
1507
- Specifies the times when the flow should be run when running on a
1508
- production scheduler.
1509
-
1510
- Parameters
1511
- ----------
1512
- hourly : bool, default False
1513
- Run the workflow hourly.
1514
- daily : bool, default True
1515
- Run the workflow daily.
1516
- weekly : bool, default False
1517
- Run the workflow weekly.
1518
- cron : str, optional, default None
1519
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1520
- specified by this expression.
1521
- timezone : str, optional, default None
1522
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1523
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1524
- """
1525
- ...
1526
-
1527
- @typing.overload
1528
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1529
- ...
1530
-
1531
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1532
- """
1533
- Specifies the times when the flow should be run when running on a
1534
- production scheduler.
1535
-
1536
- Parameters
1537
- ----------
1538
- hourly : bool, default False
1539
- Run the workflow hourly.
1540
- daily : bool, default True
1541
- Run the workflow daily.
1542
- weekly : bool, default False
1543
- Run the workflow weekly.
1544
- cron : str, optional, default None
1545
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1546
- specified by this expression.
1547
- timezone : str, optional, default None
1548
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1549
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1655
+ Name of the sensor on Airflow
1656
+ description : str
1657
+ Description of sensor in the Airflow UI
1658
+ external_dag_id : str
1659
+ The dag_id that contains the task you want to wait for.
1660
+ external_task_ids : List[str]
1661
+ The list of task_ids that you want to wait for.
1662
+ If None (default value) the sensor waits for the DAG. (Default: None)
1663
+ allowed_states : List[str]
1664
+ Iterable of allowed states, (Default: ['success'])
1665
+ failed_states : List[str]
1666
+ Iterable of failed or dis-allowed states. (Default: None)
1667
+ execution_delta : datetime.timedelta
1668
+ time difference with the previous execution to look at,
1669
+ the default is the same logical date as the current task or DAG. (Default: None)
1670
+ check_existence: bool
1671
+ Set to True to check if the external task exists or check if
1672
+ the DAG to wait for exists. (Default: True)
1550
1673
  """
1551
1674
  ...
1552
1675
 
@@ -1702,129 +1825,6 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1702
1825
  """
1703
1826
  ...
1704
1827
 
1705
- @typing.overload
1706
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1707
- """
1708
- Specifies the PyPI packages for all steps of the flow.
1709
-
1710
- Use `@pypi_base` to set common packages required by all
1711
- steps and use `@pypi` to specify step-specific overrides.
1712
- Parameters
1713
- ----------
1714
- packages : Dict[str, str], default: {}
1715
- Packages to use for this flow. The key is the name of the package
1716
- and the value is the version to use.
1717
- python : str, optional, default: None
1718
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1719
- that the version used will correspond to the version of the Python interpreter used to start the run.
1720
- """
1721
- ...
1722
-
1723
- @typing.overload
1724
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1725
- ...
1726
-
1727
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1728
- """
1729
- Specifies the PyPI packages for all steps of the flow.
1730
-
1731
- Use `@pypi_base` to set common packages required by all
1732
- steps and use `@pypi` to specify step-specific overrides.
1733
- Parameters
1734
- ----------
1735
- packages : Dict[str, str], default: {}
1736
- Packages to use for this flow. The key is the name of the package
1737
- and the value is the version to use.
1738
- python : str, optional, default: None
1739
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1740
- that the version used will correspond to the version of the Python interpreter used to start the run.
1741
- """
1742
- ...
1743
-
1744
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1745
- """
1746
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1747
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1748
-
1749
- Parameters
1750
- ----------
1751
- timeout : int
1752
- Time, in seconds before the task times out and fails. (Default: 3600)
1753
- poke_interval : int
1754
- Time in seconds that the job should wait in between each try. (Default: 60)
1755
- mode : str
1756
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1757
- exponential_backoff : bool
1758
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1759
- pool : str
1760
- the slot pool this task should run in,
1761
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1762
- soft_fail : bool
1763
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1764
- name : str
1765
- Name of the sensor on Airflow
1766
- description : str
1767
- Description of sensor in the Airflow UI
1768
- external_dag_id : str
1769
- The dag_id that contains the task you want to wait for.
1770
- external_task_ids : List[str]
1771
- The list of task_ids that you want to wait for.
1772
- If None (default value) the sensor waits for the DAG. (Default: None)
1773
- allowed_states : List[str]
1774
- Iterable of allowed states, (Default: ['success'])
1775
- failed_states : List[str]
1776
- Iterable of failed or dis-allowed states. (Default: None)
1777
- execution_delta : datetime.timedelta
1778
- time difference with the previous execution to look at,
1779
- the default is the same logical date as the current task or DAG. (Default: None)
1780
- check_existence: bool
1781
- Set to True to check if the external task exists or check if
1782
- the DAG to wait for exists. (Default: True)
1783
- """
1784
- ...
1785
-
1786
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1787
- """
1788
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1789
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1790
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1791
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1792
- starts only after all sensors finish.
1793
-
1794
- Parameters
1795
- ----------
1796
- timeout : int
1797
- Time, in seconds before the task times out and fails. (Default: 3600)
1798
- poke_interval : int
1799
- Time in seconds that the job should wait in between each try. (Default: 60)
1800
- mode : str
1801
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1802
- exponential_backoff : bool
1803
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1804
- pool : str
1805
- the slot pool this task should run in,
1806
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1807
- soft_fail : bool
1808
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1809
- name : str
1810
- Name of the sensor on Airflow
1811
- description : str
1812
- Description of sensor in the Airflow UI
1813
- bucket_key : Union[str, List[str]]
1814
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1815
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1816
- bucket_name : str
1817
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1818
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1819
- wildcard_match : bool
1820
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1821
- aws_conn_id : str
1822
- a reference to the s3 connection on Airflow. (Default: None)
1823
- verify : bool
1824
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1825
- """
1826
- ...
1827
-
1828
1828
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1829
1829
  """
1830
1830
  Switch namespace to the one provided.