metaflow-stubs 2.12.20__py2.py3-none-any.whl → 2.12.22__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. metaflow-stubs/__init__.pyi +469 -473
  2. metaflow-stubs/cards.pyi +4 -4
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/info_file.pyi +16 -0
  14. metaflow-stubs/metadata/metadata.pyi +3 -3
  15. metaflow-stubs/metadata/util.pyi +2 -2
  16. metaflow-stubs/metaflow_config.pyi +2 -2
  17. metaflow-stubs/metaflow_current.pyi +23 -23
  18. metaflow-stubs/mflog/mflog.pyi +2 -2
  19. metaflow-stubs/multicore_utils.pyi +2 -2
  20. metaflow-stubs/parameters.pyi +3 -3
  21. metaflow-stubs/plugins/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +6 -6
  37. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +5 -5
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  39. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  46. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  47. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  49. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  59. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  62. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  63. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  64. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  65. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  66. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
  68. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/basic.pyi +4 -4
  74. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  100. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  102. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  109. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  110. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  112. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  114. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  115. metaflow-stubs/plugins/package_cli.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  121. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  124. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  128. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  130. metaflow-stubs/plugins/tag_cli.pyi +5 -5
  131. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  132. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  133. metaflow-stubs/procpoll.pyi +2 -2
  134. metaflow-stubs/pylint_wrapper.pyi +2 -2
  135. metaflow-stubs/runner/__init__.pyi +2 -2
  136. metaflow-stubs/runner/deployer.pyi +3 -3
  137. metaflow-stubs/runner/metaflow_runner.pyi +5 -5
  138. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  139. metaflow-stubs/runner/nbrun.pyi +2 -2
  140. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  141. metaflow-stubs/runner/utils.pyi +2 -2
  142. metaflow-stubs/system/__init__.pyi +3 -3
  143. metaflow-stubs/system/system_logger.pyi +3 -3
  144. metaflow-stubs/system/system_monitor.pyi +3 -3
  145. metaflow-stubs/tagging_util.pyi +2 -2
  146. metaflow-stubs/tuple_util.pyi +2 -2
  147. metaflow-stubs/version.pyi +2 -2
  148. {metaflow_stubs-2.12.20.dist-info → metaflow_stubs-2.12.22.dist-info}/METADATA +2 -2
  149. metaflow_stubs-2.12.22.dist-info/RECORD +152 -0
  150. metaflow_stubs-2.12.20.dist-info/RECORD +0 -151
  151. {metaflow_stubs-2.12.20.dist-info → metaflow_stubs-2.12.22.dist-info}/WHEEL +0 -0
  152. {metaflow_stubs-2.12.20.dist-info → metaflow_stubs-2.12.22.dist-info}/top_level.txt +0 -0
@@ -1,32 +1,28 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.20 #
4
- # Generated on 2024-09-16T18:11:29.503408 #
3
+ # MF version: 2.12.22 #
4
+ # Generated on 2024-09-20T00:45:49.586219 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.parameters
12
- import datetime
13
- import metaflow.plugins.datatools.s3.s3
14
11
  import metaflow.flowspec
15
- import typing
16
- import metaflow._vendor.click.types
17
- import metaflow.metaflow_current
18
- import metaflow.events
19
12
  import metaflow.runner.metaflow_runner
13
+ import metaflow.events
20
14
  import metaflow.client.core
15
+ import metaflow.plugins.datatools.s3.s3
16
+ import metaflow.metaflow_current
17
+ import datetime
21
18
  import metaflow.datastore.inputs
19
+ import typing
22
20
  import io
21
+ import metaflow._vendor.click.types
22
+ import metaflow.parameters
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
26
- CURRENT_DIRECTORY: str
27
-
28
- INFO_FILE: str
29
-
30
26
  EXT_PKG: str
31
27
 
32
28
  def parallel_imap_unordered(func: typing.Callable[[typing.Any], typing.Any], iterable: typing.Iterable[typing.Any], max_parallel: typing.Optional[int] = None, dir: typing.Optional[str] = None) -> typing.Iterator[typing.Any]:
@@ -727,37 +723,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
727
723
  """
728
724
  ...
729
725
 
730
- @typing.overload
731
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
- """
733
- Specifies environment variables to be set prior to the execution of a step.
734
-
735
- Parameters
736
- ----------
737
- vars : Dict[str, str], default {}
738
- Dictionary of environment variables to set.
739
- """
740
- ...
741
-
742
- @typing.overload
743
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
744
- ...
745
-
746
- @typing.overload
747
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
748
- ...
749
-
750
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
751
- """
752
- Specifies environment variables to be set prior to the execution of a step.
753
-
754
- Parameters
755
- ----------
756
- vars : Dict[str, str], default {}
757
- Dictionary of environment variables to set.
758
- """
759
- ...
760
-
761
726
  @typing.overload
762
727
  def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
763
728
  """
@@ -836,168 +801,104 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
836
801
  ...
837
802
 
838
803
  @typing.overload
839
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
804
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
840
805
  """
841
- Decorator prototype for all step decorators. This function gets specialized
842
- and imported for all decorators types by _import_plugin_decorators().
806
+ Specifies that the step will success under all circumstances.
807
+
808
+ The decorator will create an optional artifact, specified by `var`, which
809
+ contains the exception raised. You can use it to detect the presence
810
+ of errors, indicating that all happy-path artifacts produced by the step
811
+ are missing.
812
+
813
+ Parameters
814
+ ----------
815
+ var : str, optional, default None
816
+ Name of the artifact in which to store the caught exception.
817
+ If not specified, the exception is not stored.
818
+ print_exception : bool, default True
819
+ Determines whether or not the exception is printed to
820
+ stdout when caught.
843
821
  """
844
822
  ...
845
823
 
846
824
  @typing.overload
847
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
825
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
848
826
  ...
849
827
 
850
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
828
+ @typing.overload
829
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
830
+ ...
831
+
832
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
851
833
  """
852
- Decorator prototype for all step decorators. This function gets specialized
853
- and imported for all decorators types by _import_plugin_decorators().
834
+ Specifies that the step will success under all circumstances.
835
+
836
+ The decorator will create an optional artifact, specified by `var`, which
837
+ contains the exception raised. You can use it to detect the presence
838
+ of errors, indicating that all happy-path artifacts produced by the step
839
+ are missing.
840
+
841
+ Parameters
842
+ ----------
843
+ var : str, optional, default None
844
+ Name of the artifact in which to store the caught exception.
845
+ If not specified, the exception is not stored.
846
+ print_exception : bool, default True
847
+ Determines whether or not the exception is printed to
848
+ stdout when caught.
854
849
  """
855
850
  ...
856
851
 
857
852
  @typing.overload
858
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
853
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
859
854
  """
860
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
855
+ Specifies the number of times the task corresponding
856
+ to a step needs to be retried.
857
+
858
+ This decorator is useful for handling transient errors, such as networking issues.
859
+ If your task contains operations that can't be retried safely, e.g. database updates,
860
+ it is advisable to annotate it with `@retry(times=0)`.
861
+
862
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
863
+ decorator will execute a no-op task after all retries have been exhausted,
864
+ ensuring that the flow execution can continue.
861
865
 
862
866
  Parameters
863
867
  ----------
864
- cpu : int, default 1
865
- Number of CPUs required for this step. If `@resources` is
866
- also present, the maximum value from all decorators is used.
867
- gpu : int, default 0
868
- Number of GPUs required for this step. If `@resources` is
869
- also present, the maximum value from all decorators is used.
870
- memory : int, default 4096
871
- Memory size (in MB) required for this step. If
872
- `@resources` is also present, the maximum value from all decorators is
873
- used.
874
- image : str, optional, default None
875
- Docker image to use when launching on AWS Batch. If not specified, and
876
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
877
- not, a default Docker image mapping to the current version of Python is used.
878
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
879
- AWS Batch Job Queue to submit the job to.
880
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
881
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
882
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
883
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
884
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
885
- shared_memory : int, optional, default None
886
- The value for the size (in MiB) of the /dev/shm volume for this step.
887
- This parameter maps to the `--shm-size` option in Docker.
888
- max_swap : int, optional, default None
889
- The total amount of swap memory (in MiB) a container can use for this
890
- step. This parameter is translated to the `--memory-swap` option in
891
- Docker where the value is the sum of the container memory plus the
892
- `max_swap` value.
893
- swappiness : int, optional, default None
894
- This allows you to tune memory swappiness behavior for this step.
895
- A swappiness value of 0 causes swapping not to happen unless absolutely
896
- necessary. A swappiness value of 100 causes pages to be swapped very
897
- aggressively. Accepted values are whole numbers between 0 and 100.
898
- use_tmpfs : bool, default False
899
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
900
- not available on Fargate compute environments
901
- tmpfs_tempdir : bool, default True
902
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
903
- tmpfs_size : int, optional, default None
904
- The value for the size (in MiB) of the tmpfs mount for this step.
905
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
906
- memory allocated for this step.
907
- tmpfs_path : str, optional, default None
908
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
909
- inferentia : int, default 0
910
- Number of Inferentia chips required for this step.
911
- trainium : int, default None
912
- Alias for inferentia. Use only one of the two.
913
- efa : int, default 0
914
- Number of elastic fabric adapter network devices to attach to container
915
- ephemeral_storage : int, default None
916
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
917
- This is only relevant for Fargate compute environments
918
- log_driver: str, optional, default None
919
- The log driver to use for the Amazon ECS container.
920
- log_options: List[str], optional, default None
921
- List of strings containing options for the chosen log driver. The configurable values
922
- depend on the `log driver` chosen. Validation of these options is not supported yet.
923
- Example: [`awslogs-group:aws/batch/job`]
868
+ times : int, default 3
869
+ Number of times to retry this task.
870
+ minutes_between_retries : int, default 2
871
+ Number of minutes between retries.
924
872
  """
925
873
  ...
926
874
 
927
875
  @typing.overload
928
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
876
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
929
877
  ...
930
878
 
931
879
  @typing.overload
932
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
880
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
933
881
  ...
934
882
 
935
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
883
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
936
884
  """
937
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
885
+ Specifies the number of times the task corresponding
886
+ to a step needs to be retried.
887
+
888
+ This decorator is useful for handling transient errors, such as networking issues.
889
+ If your task contains operations that can't be retried safely, e.g. database updates,
890
+ it is advisable to annotate it with `@retry(times=0)`.
891
+
892
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
893
+ decorator will execute a no-op task after all retries have been exhausted,
894
+ ensuring that the flow execution can continue.
938
895
 
939
896
  Parameters
940
897
  ----------
941
- cpu : int, default 1
942
- Number of CPUs required for this step. If `@resources` is
943
- also present, the maximum value from all decorators is used.
944
- gpu : int, default 0
945
- Number of GPUs required for this step. If `@resources` is
946
- also present, the maximum value from all decorators is used.
947
- memory : int, default 4096
948
- Memory size (in MB) required for this step. If
949
- `@resources` is also present, the maximum value from all decorators is
950
- used.
951
- image : str, optional, default None
952
- Docker image to use when launching on AWS Batch. If not specified, and
953
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
954
- not, a default Docker image mapping to the current version of Python is used.
955
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
956
- AWS Batch Job Queue to submit the job to.
957
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
958
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
959
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
960
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
961
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
962
- shared_memory : int, optional, default None
963
- The value for the size (in MiB) of the /dev/shm volume for this step.
964
- This parameter maps to the `--shm-size` option in Docker.
965
- max_swap : int, optional, default None
966
- The total amount of swap memory (in MiB) a container can use for this
967
- step. This parameter is translated to the `--memory-swap` option in
968
- Docker where the value is the sum of the container memory plus the
969
- `max_swap` value.
970
- swappiness : int, optional, default None
971
- This allows you to tune memory swappiness behavior for this step.
972
- A swappiness value of 0 causes swapping not to happen unless absolutely
973
- necessary. A swappiness value of 100 causes pages to be swapped very
974
- aggressively. Accepted values are whole numbers between 0 and 100.
975
- use_tmpfs : bool, default False
976
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
977
- not available on Fargate compute environments
978
- tmpfs_tempdir : bool, default True
979
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
980
- tmpfs_size : int, optional, default None
981
- The value for the size (in MiB) of the tmpfs mount for this step.
982
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
983
- memory allocated for this step.
984
- tmpfs_path : str, optional, default None
985
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
986
- inferentia : int, default 0
987
- Number of Inferentia chips required for this step.
988
- trainium : int, default None
989
- Alias for inferentia. Use only one of the two.
990
- efa : int, default 0
991
- Number of elastic fabric adapter network devices to attach to container
992
- ephemeral_storage : int, default None
993
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
994
- This is only relevant for Fargate compute environments
995
- log_driver: str, optional, default None
996
- The log driver to use for the Amazon ECS container.
997
- log_options: List[str], optional, default None
998
- List of strings containing options for the chosen log driver. The configurable values
999
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1000
- Example: [`awslogs-group:aws/batch/job`]
898
+ times : int, default 3
899
+ Number of times to retry this task.
900
+ minutes_between_retries : int, default 2
901
+ Number of minutes between retries.
1001
902
  """
1002
903
  ...
1003
904
 
@@ -1084,102 +985,237 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1084
985
  ...
1085
986
 
1086
987
  @typing.overload
1087
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
988
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1088
989
  """
1089
- Specifies that the step will success under all circumstances.
990
+ Specifies the Conda environment for the step.
1090
991
 
1091
- The decorator will create an optional artifact, specified by `var`, which
1092
- contains the exception raised. You can use it to detect the presence
1093
- of errors, indicating that all happy-path artifacts produced by the step
1094
- are missing.
992
+ Information in this decorator will augment any
993
+ attributes set in the `@conda_base` flow-level decorator. Hence,
994
+ you can use `@conda_base` to set packages required by all
995
+ steps and use `@conda` to specify step-specific overrides.
1095
996
 
1096
997
  Parameters
1097
998
  ----------
1098
- var : str, optional, default None
1099
- Name of the artifact in which to store the caught exception.
1100
- If not specified, the exception is not stored.
1101
- print_exception : bool, default True
1102
- Determines whether or not the exception is printed to
1103
- stdout when caught.
999
+ packages : Dict[str, str], default {}
1000
+ Packages to use for this step. The key is the name of the package
1001
+ and the value is the version to use.
1002
+ libraries : Dict[str, str], default {}
1003
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1004
+ python : str, optional, default None
1005
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1006
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1007
+ disabled : bool, default False
1008
+ If set to True, disables @conda.
1104
1009
  """
1105
1010
  ...
1106
1011
 
1107
1012
  @typing.overload
1108
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1013
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1109
1014
  ...
1110
1015
 
1111
1016
  @typing.overload
1112
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1017
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1113
1018
  ...
1114
1019
 
1115
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1020
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1116
1021
  """
1117
- Specifies that the step will success under all circumstances.
1022
+ Specifies the Conda environment for the step.
1118
1023
 
1119
- The decorator will create an optional artifact, specified by `var`, which
1120
- contains the exception raised. You can use it to detect the presence
1121
- of errors, indicating that all happy-path artifacts produced by the step
1122
- are missing.
1024
+ Information in this decorator will augment any
1025
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1026
+ you can use `@conda_base` to set packages required by all
1027
+ steps and use `@conda` to specify step-specific overrides.
1123
1028
 
1124
1029
  Parameters
1125
1030
  ----------
1126
- var : str, optional, default None
1127
- Name of the artifact in which to store the caught exception.
1128
- If not specified, the exception is not stored.
1129
- print_exception : bool, default True
1130
- Determines whether or not the exception is printed to
1131
- stdout when caught.
1031
+ packages : Dict[str, str], default {}
1032
+ Packages to use for this step. The key is the name of the package
1033
+ and the value is the version to use.
1034
+ libraries : Dict[str, str], default {}
1035
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1036
+ python : str, optional, default None
1037
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1038
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1039
+ disabled : bool, default False
1040
+ If set to True, disables @conda.
1132
1041
  """
1133
1042
  ...
1134
1043
 
1135
1044
  @typing.overload
1136
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1045
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1137
1046
  """
1138
- Creates a human-readable report, a Metaflow Card, after this step completes.
1139
-
1140
- Note that you may add multiple `@card` decorators in a step with different parameters.
1047
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1141
1048
 
1142
1049
  Parameters
1143
1050
  ----------
1144
- type : str, default 'default'
1145
- Card type.
1146
- id : str, optional, default None
1147
- If multiple cards are present, use this id to identify this card.
1148
- options : Dict[str, Any], default {}
1149
- Options passed to the card. The contents depend on the card type.
1150
- timeout : int, default 45
1151
- Interrupt reporting if it takes more than this many seconds.
1152
-
1153
-
1051
+ cpu : int, default 1
1052
+ Number of CPUs required for this step. If `@resources` is
1053
+ also present, the maximum value from all decorators is used.
1054
+ gpu : int, default 0
1055
+ Number of GPUs required for this step. If `@resources` is
1056
+ also present, the maximum value from all decorators is used.
1057
+ memory : int, default 4096
1058
+ Memory size (in MB) required for this step. If
1059
+ `@resources` is also present, the maximum value from all decorators is
1060
+ used.
1061
+ image : str, optional, default None
1062
+ Docker image to use when launching on AWS Batch. If not specified, and
1063
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1064
+ not, a default Docker image mapping to the current version of Python is used.
1065
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1066
+ AWS Batch Job Queue to submit the job to.
1067
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1068
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1069
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1070
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1071
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1072
+ shared_memory : int, optional, default None
1073
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1074
+ This parameter maps to the `--shm-size` option in Docker.
1075
+ max_swap : int, optional, default None
1076
+ The total amount of swap memory (in MiB) a container can use for this
1077
+ step. This parameter is translated to the `--memory-swap` option in
1078
+ Docker where the value is the sum of the container memory plus the
1079
+ `max_swap` value.
1080
+ swappiness : int, optional, default None
1081
+ This allows you to tune memory swappiness behavior for this step.
1082
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1083
+ necessary. A swappiness value of 100 causes pages to be swapped very
1084
+ aggressively. Accepted values are whole numbers between 0 and 100.
1085
+ use_tmpfs : bool, default False
1086
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1087
+ not available on Fargate compute environments
1088
+ tmpfs_tempdir : bool, default True
1089
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1090
+ tmpfs_size : int, optional, default None
1091
+ The value for the size (in MiB) of the tmpfs mount for this step.
1092
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1093
+ memory allocated for this step.
1094
+ tmpfs_path : str, optional, default None
1095
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1096
+ inferentia : int, default 0
1097
+ Number of Inferentia chips required for this step.
1098
+ trainium : int, default None
1099
+ Alias for inferentia. Use only one of the two.
1100
+ efa : int, default 0
1101
+ Number of elastic fabric adapter network devices to attach to container
1102
+ ephemeral_storage : int, default None
1103
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1104
+ This is only relevant for Fargate compute environments
1105
+ log_driver: str, optional, default None
1106
+ The log driver to use for the Amazon ECS container.
1107
+ log_options: List[str], optional, default None
1108
+ List of strings containing options for the chosen log driver. The configurable values
1109
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1110
+ Example: [`awslogs-group:aws/batch/job`]
1154
1111
  """
1155
1112
  ...
1156
1113
 
1157
1114
  @typing.overload
1158
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1115
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1159
1116
  ...
1160
1117
 
1161
1118
  @typing.overload
1162
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1119
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1163
1120
  ...
1164
1121
 
1165
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1122
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1166
1123
  """
1167
- Creates a human-readable report, a Metaflow Card, after this step completes.
1168
-
1169
- Note that you may add multiple `@card` decorators in a step with different parameters.
1124
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1170
1125
 
1171
1126
  Parameters
1172
1127
  ----------
1173
- type : str, default 'default'
1174
- Card type.
1175
- id : str, optional, default None
1176
- If multiple cards are present, use this id to identify this card.
1177
- options : Dict[str, Any], default {}
1178
- Options passed to the card. The contents depend on the card type.
1179
- timeout : int, default 45
1180
- Interrupt reporting if it takes more than this many seconds.
1128
+ cpu : int, default 1
1129
+ Number of CPUs required for this step. If `@resources` is
1130
+ also present, the maximum value from all decorators is used.
1131
+ gpu : int, default 0
1132
+ Number of GPUs required for this step. If `@resources` is
1133
+ also present, the maximum value from all decorators is used.
1134
+ memory : int, default 4096
1135
+ Memory size (in MB) required for this step. If
1136
+ `@resources` is also present, the maximum value from all decorators is
1137
+ used.
1138
+ image : str, optional, default None
1139
+ Docker image to use when launching on AWS Batch. If not specified, and
1140
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1141
+ not, a default Docker image mapping to the current version of Python is used.
1142
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1143
+ AWS Batch Job Queue to submit the job to.
1144
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1145
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1146
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1147
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1148
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1149
+ shared_memory : int, optional, default None
1150
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1151
+ This parameter maps to the `--shm-size` option in Docker.
1152
+ max_swap : int, optional, default None
1153
+ The total amount of swap memory (in MiB) a container can use for this
1154
+ step. This parameter is translated to the `--memory-swap` option in
1155
+ Docker where the value is the sum of the container memory plus the
1156
+ `max_swap` value.
1157
+ swappiness : int, optional, default None
1158
+ This allows you to tune memory swappiness behavior for this step.
1159
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1160
+ necessary. A swappiness value of 100 causes pages to be swapped very
1161
+ aggressively. Accepted values are whole numbers between 0 and 100.
1162
+ use_tmpfs : bool, default False
1163
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1164
+ not available on Fargate compute environments
1165
+ tmpfs_tempdir : bool, default True
1166
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1167
+ tmpfs_size : int, optional, default None
1168
+ The value for the size (in MiB) of the tmpfs mount for this step.
1169
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1170
+ memory allocated for this step.
1171
+ tmpfs_path : str, optional, default None
1172
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1173
+ inferentia : int, default 0
1174
+ Number of Inferentia chips required for this step.
1175
+ trainium : int, default None
1176
+ Alias for inferentia. Use only one of the two.
1177
+ efa : int, default 0
1178
+ Number of elastic fabric adapter network devices to attach to container
1179
+ ephemeral_storage : int, default None
1180
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1181
+ This is only relevant for Fargate compute environments
1182
+ log_driver: str, optional, default None
1183
+ The log driver to use for the Amazon ECS container.
1184
+ log_options: List[str], optional, default None
1185
+ List of strings containing options for the chosen log driver. The configurable values
1186
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1187
+ Example: [`awslogs-group:aws/batch/job`]
1188
+ """
1189
+ ...
1190
+
1191
+ @typing.overload
1192
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1193
+ """
1194
+ Specifies environment variables to be set prior to the execution of a step.
1181
1195
 
1196
+ Parameters
1197
+ ----------
1198
+ vars : Dict[str, str], default {}
1199
+ Dictionary of environment variables to set.
1200
+ """
1201
+ ...
1202
+
1203
+ @typing.overload
1204
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1205
+ ...
1206
+
1207
+ @typing.overload
1208
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1209
+ ...
1210
+
1211
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1212
+ """
1213
+ Specifies environment variables to be set prior to the execution of a step.
1182
1214
 
1215
+ Parameters
1216
+ ----------
1217
+ vars : Dict[str, str], default {}
1218
+ Dictionary of environment variables to set.
1183
1219
  """
1184
1220
  ...
1185
1221
 
@@ -1241,59 +1277,53 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1241
1277
  ...
1242
1278
 
1243
1279
  @typing.overload
1244
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1280
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1245
1281
  """
1246
- Specifies the Conda environment for the step.
1282
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1247
1283
 
1248
- Information in this decorator will augment any
1249
- attributes set in the `@conda_base` flow-level decorator. Hence,
1250
- you can use `@conda_base` to set packages required by all
1251
- steps and use `@conda` to specify step-specific overrides.
1284
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1252
1285
 
1253
1286
  Parameters
1254
1287
  ----------
1255
- packages : Dict[str, str], default {}
1256
- Packages to use for this step. The key is the name of the package
1257
- and the value is the version to use.
1258
- libraries : Dict[str, str], default {}
1259
- Supported for backward compatibility. When used with packages, packages will take precedence.
1260
- python : str, optional, default None
1261
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1262
- that the version used will correspond to the version of the Python interpreter used to start the run.
1263
- disabled : bool, default False
1264
- If set to True, disables @conda.
1288
+ type : str, default 'default'
1289
+ Card type.
1290
+ id : str, optional, default None
1291
+ If multiple cards are present, use this id to identify this card.
1292
+ options : Dict[str, Any], default {}
1293
+ Options passed to the card. The contents depend on the card type.
1294
+ timeout : int, default 45
1295
+ Interrupt reporting if it takes more than this many seconds.
1296
+
1297
+
1265
1298
  """
1266
1299
  ...
1267
1300
 
1268
1301
  @typing.overload
1269
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1302
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1270
1303
  ...
1271
1304
 
1272
1305
  @typing.overload
1273
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1306
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1274
1307
  ...
1275
1308
 
1276
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1309
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1277
1310
  """
1278
- Specifies the Conda environment for the step.
1311
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1279
1312
 
1280
- Information in this decorator will augment any
1281
- attributes set in the `@conda_base` flow-level decorator. Hence,
1282
- you can use `@conda_base` to set packages required by all
1283
- steps and use `@conda` to specify step-specific overrides.
1313
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1284
1314
 
1285
1315
  Parameters
1286
1316
  ----------
1287
- packages : Dict[str, str], default {}
1288
- Packages to use for this step. The key is the name of the package
1289
- and the value is the version to use.
1290
- libraries : Dict[str, str], default {}
1291
- Supported for backward compatibility. When used with packages, packages will take precedence.
1292
- python : str, optional, default None
1293
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1294
- that the version used will correspond to the version of the Python interpreter used to start the run.
1295
- disabled : bool, default False
1296
- If set to True, disables @conda.
1317
+ type : str, default 'default'
1318
+ Card type.
1319
+ id : str, optional, default None
1320
+ If multiple cards are present, use this id to identify this card.
1321
+ options : Dict[str, Any], default {}
1322
+ Options passed to the card. The contents depend on the card type.
1323
+ timeout : int, default 45
1324
+ Interrupt reporting if it takes more than this many seconds.
1325
+
1326
+
1297
1327
  """
1298
1328
  ...
1299
1329
 
@@ -1364,55 +1394,21 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1364
1394
  ...
1365
1395
 
1366
1396
  @typing.overload
1367
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1397
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1368
1398
  """
1369
- Specifies the number of times the task corresponding
1370
- to a step needs to be retried.
1371
-
1372
- This decorator is useful for handling transient errors, such as networking issues.
1373
- If your task contains operations that can't be retried safely, e.g. database updates,
1374
- it is advisable to annotate it with `@retry(times=0)`.
1375
-
1376
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1377
- decorator will execute a no-op task after all retries have been exhausted,
1378
- ensuring that the flow execution can continue.
1379
-
1380
- Parameters
1381
- ----------
1382
- times : int, default 3
1383
- Number of times to retry this task.
1384
- minutes_between_retries : int, default 2
1385
- Number of minutes between retries.
1399
+ Decorator prototype for all step decorators. This function gets specialized
1400
+ and imported for all decorators types by _import_plugin_decorators().
1386
1401
  """
1387
1402
  ...
1388
1403
 
1389
1404
  @typing.overload
1390
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1391
- ...
1392
-
1393
- @typing.overload
1394
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1405
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1395
1406
  ...
1396
1407
 
1397
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1408
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1398
1409
  """
1399
- Specifies the number of times the task corresponding
1400
- to a step needs to be retried.
1401
-
1402
- This decorator is useful for handling transient errors, such as networking issues.
1403
- If your task contains operations that can't be retried safely, e.g. database updates,
1404
- it is advisable to annotate it with `@retry(times=0)`.
1405
-
1406
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1407
- decorator will execute a no-op task after all retries have been exhausted,
1408
- ensuring that the flow execution can continue.
1409
-
1410
- Parameters
1411
- ----------
1412
- times : int, default 3
1413
- Number of times to retry this task.
1414
- minutes_between_retries : int, default 2
1415
- Number of minutes between retries.
1410
+ Decorator prototype for all step decorators. This function gets specialized
1411
+ and imported for all decorators types by _import_plugin_decorators().
1416
1412
  """
1417
1413
  ...
1418
1414
 
@@ -1520,90 +1516,174 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1520
1516
  ...
1521
1517
 
1522
1518
  @typing.overload
1523
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1519
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1524
1520
  """
1525
- Specifies the Conda environment for all steps of the flow.
1521
+ Specifies the PyPI packages for all steps of the flow.
1526
1522
 
1527
- Use `@conda_base` to set common libraries required by all
1528
- steps and use `@conda` to specify step-specific additions.
1523
+ Use `@pypi_base` to set common packages required by all
1524
+ steps and use `@pypi` to specify step-specific overrides.
1525
+ Parameters
1526
+ ----------
1527
+ packages : Dict[str, str], default: {}
1528
+ Packages to use for this flow. The key is the name of the package
1529
+ and the value is the version to use.
1530
+ python : str, optional, default: None
1531
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1532
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1533
+ """
1534
+ ...
1535
+
1536
+ @typing.overload
1537
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1538
+ ...
1539
+
1540
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1541
+ """
1542
+ Specifies the PyPI packages for all steps of the flow.
1529
1543
 
1544
+ Use `@pypi_base` to set common packages required by all
1545
+ steps and use `@pypi` to specify step-specific overrides.
1530
1546
  Parameters
1531
1547
  ----------
1532
- packages : Dict[str, str], default {}
1548
+ packages : Dict[str, str], default: {}
1533
1549
  Packages to use for this flow. The key is the name of the package
1534
1550
  and the value is the version to use.
1535
- libraries : Dict[str, str], default {}
1536
- Supported for backward compatibility. When used with packages, packages will take precedence.
1537
- python : str, optional, default None
1551
+ python : str, optional, default: None
1538
1552
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1539
1553
  that the version used will correspond to the version of the Python interpreter used to start the run.
1540
- disabled : bool, default False
1541
- If set to True, disables Conda.
1554
+ """
1555
+ ...
1556
+
1557
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1558
+ """
1559
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1560
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1561
+
1562
+ Parameters
1563
+ ----------
1564
+ timeout : int
1565
+ Time, in seconds before the task times out and fails. (Default: 3600)
1566
+ poke_interval : int
1567
+ Time in seconds that the job should wait in between each try. (Default: 60)
1568
+ mode : str
1569
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1570
+ exponential_backoff : bool
1571
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1572
+ pool : str
1573
+ the slot pool this task should run in,
1574
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1575
+ soft_fail : bool
1576
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1577
+ name : str
1578
+ Name of the sensor on Airflow
1579
+ description : str
1580
+ Description of sensor in the Airflow UI
1581
+ external_dag_id : str
1582
+ The dag_id that contains the task you want to wait for.
1583
+ external_task_ids : List[str]
1584
+ The list of task_ids that you want to wait for.
1585
+ If None (default value) the sensor waits for the DAG. (Default: None)
1586
+ allowed_states : List[str]
1587
+ Iterable of allowed states, (Default: ['success'])
1588
+ failed_states : List[str]
1589
+ Iterable of failed or dis-allowed states. (Default: None)
1590
+ execution_delta : datetime.timedelta
1591
+ time difference with the previous execution to look at,
1592
+ the default is the same logical date as the current task or DAG. (Default: None)
1593
+ check_existence: bool
1594
+ Set to True to check if the external task exists or check if
1595
+ the DAG to wait for exists. (Default: True)
1542
1596
  """
1543
1597
  ...
1544
1598
 
1545
1599
  @typing.overload
1546
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1547
- ...
1548
-
1549
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1600
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1550
1601
  """
1551
- Specifies the Conda environment for all steps of the flow.
1552
-
1553
- Use `@conda_base` to set common libraries required by all
1554
- steps and use `@conda` to specify step-specific additions.
1602
+ Specifies the times when the flow should be run when running on a
1603
+ production scheduler.
1555
1604
 
1556
1605
  Parameters
1557
1606
  ----------
1558
- packages : Dict[str, str], default {}
1559
- Packages to use for this flow. The key is the name of the package
1560
- and the value is the version to use.
1561
- libraries : Dict[str, str], default {}
1562
- Supported for backward compatibility. When used with packages, packages will take precedence.
1563
- python : str, optional, default None
1564
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1565
- that the version used will correspond to the version of the Python interpreter used to start the run.
1566
- disabled : bool, default False
1567
- If set to True, disables Conda.
1607
+ hourly : bool, default False
1608
+ Run the workflow hourly.
1609
+ daily : bool, default True
1610
+ Run the workflow daily.
1611
+ weekly : bool, default False
1612
+ Run the workflow weekly.
1613
+ cron : str, optional, default None
1614
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1615
+ specified by this expression.
1616
+ timezone : str, optional, default None
1617
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1618
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1568
1619
  """
1569
1620
  ...
1570
1621
 
1571
1622
  @typing.overload
1572
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1623
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1624
+ ...
1625
+
1626
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1573
1627
  """
1574
- Specifies the PyPI packages for all steps of the flow.
1628
+ Specifies the times when the flow should be run when running on a
1629
+ production scheduler.
1575
1630
 
1576
- Use `@pypi_base` to set common packages required by all
1577
- steps and use `@pypi` to specify step-specific overrides.
1578
1631
  Parameters
1579
1632
  ----------
1580
- packages : Dict[str, str], default: {}
1581
- Packages to use for this flow. The key is the name of the package
1582
- and the value is the version to use.
1583
- python : str, optional, default: None
1584
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1585
- that the version used will correspond to the version of the Python interpreter used to start the run.
1633
+ hourly : bool, default False
1634
+ Run the workflow hourly.
1635
+ daily : bool, default True
1636
+ Run the workflow daily.
1637
+ weekly : bool, default False
1638
+ Run the workflow weekly.
1639
+ cron : str, optional, default None
1640
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1641
+ specified by this expression.
1642
+ timezone : str, optional, default None
1643
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1644
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1586
1645
  """
1587
1646
  ...
1588
1647
 
1589
- @typing.overload
1590
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1591
- ...
1592
-
1593
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1648
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1594
1649
  """
1595
- Specifies the PyPI packages for all steps of the flow.
1650
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1651
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1652
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1653
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1654
+ starts only after all sensors finish.
1596
1655
 
1597
- Use `@pypi_base` to set common packages required by all
1598
- steps and use `@pypi` to specify step-specific overrides.
1599
1656
  Parameters
1600
1657
  ----------
1601
- packages : Dict[str, str], default: {}
1602
- Packages to use for this flow. The key is the name of the package
1603
- and the value is the version to use.
1604
- python : str, optional, default: None
1605
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1606
- that the version used will correspond to the version of the Python interpreter used to start the run.
1658
+ timeout : int
1659
+ Time, in seconds before the task times out and fails. (Default: 3600)
1660
+ poke_interval : int
1661
+ Time in seconds that the job should wait in between each try. (Default: 60)
1662
+ mode : str
1663
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1664
+ exponential_backoff : bool
1665
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1666
+ pool : str
1667
+ the slot pool this task should run in,
1668
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1669
+ soft_fail : bool
1670
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1671
+ name : str
1672
+ Name of the sensor on Airflow
1673
+ description : str
1674
+ Description of sensor in the Airflow UI
1675
+ bucket_key : Union[str, List[str]]
1676
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1677
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1678
+ bucket_name : str
1679
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1680
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1681
+ wildcard_match : bool
1682
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1683
+ aws_conn_id : str
1684
+ a reference to the s3 connection on Airflow. (Default: None)
1685
+ verify : bool
1686
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1607
1687
  """
1608
1688
  ...
1609
1689
 
@@ -1720,136 +1800,52 @@ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typ
1720
1800
  """
1721
1801
  ...
1722
1802
 
1723
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1724
- """
1725
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1726
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1727
-
1728
- Parameters
1729
- ----------
1730
- timeout : int
1731
- Time, in seconds before the task times out and fails. (Default: 3600)
1732
- poke_interval : int
1733
- Time in seconds that the job should wait in between each try. (Default: 60)
1734
- mode : str
1735
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1736
- exponential_backoff : bool
1737
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1738
- pool : str
1739
- the slot pool this task should run in,
1740
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1741
- soft_fail : bool
1742
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1743
- name : str
1744
- Name of the sensor on Airflow
1745
- description : str
1746
- Description of sensor in the Airflow UI
1747
- external_dag_id : str
1748
- The dag_id that contains the task you want to wait for.
1749
- external_task_ids : List[str]
1750
- The list of task_ids that you want to wait for.
1751
- If None (default value) the sensor waits for the DAG. (Default: None)
1752
- allowed_states : List[str]
1753
- Iterable of allowed states, (Default: ['success'])
1754
- failed_states : List[str]
1755
- Iterable of failed or dis-allowed states. (Default: None)
1756
- execution_delta : datetime.timedelta
1757
- time difference with the previous execution to look at,
1758
- the default is the same logical date as the current task or DAG. (Default: None)
1759
- check_existence: bool
1760
- Set to True to check if the external task exists or check if
1761
- the DAG to wait for exists. (Default: True)
1762
- """
1763
- ...
1764
-
1765
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1766
- """
1767
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1768
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1769
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1770
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1771
- starts only after all sensors finish.
1772
-
1773
- Parameters
1774
- ----------
1775
- timeout : int
1776
- Time, in seconds before the task times out and fails. (Default: 3600)
1777
- poke_interval : int
1778
- Time in seconds that the job should wait in between each try. (Default: 60)
1779
- mode : str
1780
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1781
- exponential_backoff : bool
1782
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1783
- pool : str
1784
- the slot pool this task should run in,
1785
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1786
- soft_fail : bool
1787
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1788
- name : str
1789
- Name of the sensor on Airflow
1790
- description : str
1791
- Description of sensor in the Airflow UI
1792
- bucket_key : Union[str, List[str]]
1793
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1794
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1795
- bucket_name : str
1796
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1797
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1798
- wildcard_match : bool
1799
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1800
- aws_conn_id : str
1801
- a reference to the s3 connection on Airflow. (Default: None)
1802
- verify : bool
1803
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1804
- """
1805
- ...
1806
-
1807
1803
  @typing.overload
1808
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1804
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1809
1805
  """
1810
- Specifies the times when the flow should be run when running on a
1811
- production scheduler.
1806
+ Specifies the Conda environment for all steps of the flow.
1807
+
1808
+ Use `@conda_base` to set common libraries required by all
1809
+ steps and use `@conda` to specify step-specific additions.
1812
1810
 
1813
1811
  Parameters
1814
1812
  ----------
1815
- hourly : bool, default False
1816
- Run the workflow hourly.
1817
- daily : bool, default True
1818
- Run the workflow daily.
1819
- weekly : bool, default False
1820
- Run the workflow weekly.
1821
- cron : str, optional, default None
1822
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1823
- specified by this expression.
1824
- timezone : str, optional, default None
1825
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1826
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1813
+ packages : Dict[str, str], default {}
1814
+ Packages to use for this flow. The key is the name of the package
1815
+ and the value is the version to use.
1816
+ libraries : Dict[str, str], default {}
1817
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1818
+ python : str, optional, default None
1819
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1820
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1821
+ disabled : bool, default False
1822
+ If set to True, disables Conda.
1827
1823
  """
1828
1824
  ...
1829
1825
 
1830
1826
  @typing.overload
1831
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1827
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1832
1828
  ...
1833
1829
 
1834
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1830
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1835
1831
  """
1836
- Specifies the times when the flow should be run when running on a
1837
- production scheduler.
1832
+ Specifies the Conda environment for all steps of the flow.
1833
+
1834
+ Use `@conda_base` to set common libraries required by all
1835
+ steps and use `@conda` to specify step-specific additions.
1838
1836
 
1839
1837
  Parameters
1840
1838
  ----------
1841
- hourly : bool, default False
1842
- Run the workflow hourly.
1843
- daily : bool, default True
1844
- Run the workflow daily.
1845
- weekly : bool, default False
1846
- Run the workflow weekly.
1847
- cron : str, optional, default None
1848
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1849
- specified by this expression.
1850
- timezone : str, optional, default None
1851
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1852
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1839
+ packages : Dict[str, str], default {}
1840
+ Packages to use for this flow. The key is the name of the package
1841
+ and the value is the version to use.
1842
+ libraries : Dict[str, str], default {}
1843
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1844
+ python : str, optional, default None
1845
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1846
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1847
+ disabled : bool, default False
1848
+ If set to True, disables Conda.
1853
1849
  """
1854
1850
  ...
1855
1851