ob-metaflow-stubs 2.11.9.1__py2.py3-none-any.whl → 2.11.10.3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (133) hide show
  1. metaflow-stubs/__init__.pyi +622 -622
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +2 -2
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +5 -5
  13. metaflow-stubs/metadata/metadata.pyi +2 -2
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +17 -17
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +4 -4
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  60. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  61. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  62. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  63. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  64. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  68. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  70. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  77. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  80. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  81. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  82. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  83. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  84. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  86. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  88. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  90. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  91. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  92. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  93. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  94. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  95. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  96. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  98. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  104. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +4 -4
  105. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  107. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  108. metaflow-stubs/plugins/package_cli.pyi +2 -2
  109. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  111. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  112. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  114. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  117. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  119. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  120. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  121. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  123. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  124. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  126. metaflow-stubs/procpoll.pyi +2 -2
  127. metaflow-stubs/pylint_wrapper.pyi +2 -2
  128. metaflow-stubs/tagging_util.pyi +2 -2
  129. {ob_metaflow_stubs-2.11.9.1.dist-info → ob_metaflow_stubs-2.11.10.3.dist-info}/METADATA +2 -2
  130. ob_metaflow_stubs-2.11.10.3.dist-info/RECORD +133 -0
  131. ob_metaflow_stubs-2.11.9.1.dist-info/RECORD +0 -133
  132. {ob_metaflow_stubs-2.11.9.1.dist-info → ob_metaflow_stubs-2.11.10.3.dist-info}/WHEEL +0 -0
  133. {ob_metaflow_stubs-2.11.9.1.dist-info → ob_metaflow_stubs-2.11.10.3.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,23 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.11.9.1 #
4
- # Generated on 2024-03-30T09:14:00.953272 #
3
+ # MF version: 2.11.10.3 #
4
+ # Generated on 2024-04-12T18:07:07.339961 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.parameters
11
+ import io
12
+ import metaflow.metaflow_current
13
+ import metaflow._vendor.click.types
14
+ import typing
12
15
  import metaflow.datastore.inputs
13
16
  import metaflow.client.core
14
- import metaflow.events
17
+ import metaflow.parameters
15
18
  import metaflow.plugins.datatools.s3.s3
16
- import io
19
+ import metaflow.events
17
20
  import datetime
18
- import typing
19
- import metaflow._vendor.click.types
20
- import metaflow.metaflow_current
21
21
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
22
  StepFlag = typing.NewType("StepFlag", bool)
23
23
 
@@ -726,132 +726,51 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
726
726
  ...
727
727
 
728
728
  @typing.overload
729
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
- """
731
- Specifies the resources needed when executing this step.
732
-
733
- Use `@resources` to specify the resource requirements
734
- independently of the specific compute layer (`@batch`, `@kubernetes`).
735
-
736
- You can choose the compute layer on the command line by executing e.g.
737
- ```
738
- python myflow.py run --with batch
739
- ```
740
- or
741
- ```
742
- python myflow.py run --with kubernetes
743
- ```
744
- which executes the flow on the desired system using the
745
- requirements specified in `@resources`.
746
-
747
- Parameters
748
- ----------
749
- cpu : int, default 1
750
- Number of CPUs required for this step.
751
- gpu : int, default 0
752
- Number of GPUs required for this step.
753
- disk : int, optional, default None
754
- Disk size (in MB) required for this step. Only applies on Kubernetes.
755
- memory : int, default 4096
756
- Memory size (in MB) required for this step.
757
- shared_memory : int, optional, default None
758
- The value for the size (in MiB) of the /dev/shm volume for this step.
759
- This parameter maps to the `--shm-size` option in Docker.
760
- """
761
- ...
762
-
763
- @typing.overload
764
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
765
- ...
766
-
767
- @typing.overload
768
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
769
- ...
770
-
771
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
772
- """
773
- Specifies the resources needed when executing this step.
774
-
775
- Use `@resources` to specify the resource requirements
776
- independently of the specific compute layer (`@batch`, `@kubernetes`).
777
-
778
- You can choose the compute layer on the command line by executing e.g.
779
- ```
780
- python myflow.py run --with batch
781
- ```
782
- or
783
- ```
784
- python myflow.py run --with kubernetes
785
- ```
786
- which executes the flow on the desired system using the
787
- requirements specified in `@resources`.
788
-
789
- Parameters
790
- ----------
791
- cpu : int, default 1
792
- Number of CPUs required for this step.
793
- gpu : int, default 0
794
- Number of GPUs required for this step.
795
- disk : int, optional, default None
796
- Disk size (in MB) required for this step. Only applies on Kubernetes.
797
- memory : int, default 4096
798
- Memory size (in MB) required for this step.
799
- shared_memory : int, optional, default None
800
- The value for the size (in MiB) of the /dev/shm volume for this step.
801
- This parameter maps to the `--shm-size` option in Docker.
802
- """
803
- ...
804
-
805
- @typing.overload
806
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
729
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
807
730
  """
808
- Specifies the number of times the task corresponding
809
- to a step needs to be retried.
810
-
811
- This decorator is useful for handling transient errors, such as networking issues.
812
- If your task contains operations that can't be retried safely, e.g. database updates,
813
- it is advisable to annotate it with `@retry(times=0)`.
731
+ Specifies that the step will success under all circumstances.
814
732
 
815
- This can be used in conjunction with the `@catch` decorator. The `@catch`
816
- decorator will execute a no-op task after all retries have been exhausted,
817
- ensuring that the flow execution can continue.
733
+ The decorator will create an optional artifact, specified by `var`, which
734
+ contains the exception raised. You can use it to detect the presence
735
+ of errors, indicating that all happy-path artifacts produced by the step
736
+ are missing.
818
737
 
819
738
  Parameters
820
739
  ----------
821
- times : int, default 3
822
- Number of times to retry this task.
823
- minutes_between_retries : int, default 2
824
- Number of minutes between retries.
740
+ var : str, optional, default None
741
+ Name of the artifact in which to store the caught exception.
742
+ If not specified, the exception is not stored.
743
+ print_exception : bool, default True
744
+ Determines whether or not the exception is printed to
745
+ stdout when caught.
825
746
  """
826
747
  ...
827
748
 
828
749
  @typing.overload
829
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
750
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
830
751
  ...
831
752
 
832
753
  @typing.overload
833
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
754
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
834
755
  ...
835
756
 
836
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
757
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
837
758
  """
838
- Specifies the number of times the task corresponding
839
- to a step needs to be retried.
840
-
841
- This decorator is useful for handling transient errors, such as networking issues.
842
- If your task contains operations that can't be retried safely, e.g. database updates,
843
- it is advisable to annotate it with `@retry(times=0)`.
759
+ Specifies that the step will success under all circumstances.
844
760
 
845
- This can be used in conjunction with the `@catch` decorator. The `@catch`
846
- decorator will execute a no-op task after all retries have been exhausted,
847
- ensuring that the flow execution can continue.
761
+ The decorator will create an optional artifact, specified by `var`, which
762
+ contains the exception raised. You can use it to detect the presence
763
+ of errors, indicating that all happy-path artifacts produced by the step
764
+ are missing.
848
765
 
849
766
  Parameters
850
767
  ----------
851
- times : int, default 3
852
- Number of times to retry this task.
853
- minutes_between_retries : int, default 2
854
- Number of minutes between retries.
768
+ var : str, optional, default None
769
+ Name of the artifact in which to store the caught exception.
770
+ If not specified, the exception is not stored.
771
+ print_exception : bool, default True
772
+ Determines whether or not the exception is printed to
773
+ stdout when caught.
855
774
  """
856
775
  ...
857
776
 
@@ -912,184 +831,96 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
912
831
  """
913
832
  ...
914
833
 
915
- @typing.overload
916
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
834
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
917
835
  """
918
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
836
+ Specifies that this step should execute on Kubernetes.
919
837
 
920
838
  Parameters
921
839
  ----------
922
840
  cpu : int, default 1
923
841
  Number of CPUs required for this step. If `@resources` is
924
842
  also present, the maximum value from all decorators is used.
925
- gpu : int, default 0
926
- Number of GPUs required for this step. If `@resources` is
927
- also present, the maximum value from all decorators is used.
928
843
  memory : int, default 4096
929
844
  Memory size (in MB) required for this step. If
930
845
  `@resources` is also present, the maximum value from all decorators is
931
846
  used.
847
+ disk : int, default 10240
848
+ Disk size (in MB) required for this step. If
849
+ `@resources` is also present, the maximum value from all decorators is
850
+ used.
932
851
  image : str, optional, default None
933
- Docker image to use when launching on AWS Batch. If not specified, and
934
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
852
+ Docker image to use when launching on Kubernetes. If not specified, and
853
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
935
854
  not, a default Docker image mapping to the current version of Python is used.
936
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
937
- AWS Batch Job Queue to submit the job to.
938
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
939
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
940
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
941
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
942
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
943
- shared_memory : int, optional, default None
944
- The value for the size (in MiB) of the /dev/shm volume for this step.
945
- This parameter maps to the `--shm-size` option in Docker.
946
- max_swap : int, optional, default None
947
- The total amount of swap memory (in MiB) a container can use for this
948
- step. This parameter is translated to the `--memory-swap` option in
949
- Docker where the value is the sum of the container memory plus the
950
- `max_swap` value.
951
- swappiness : int, optional, default None
952
- This allows you to tune memory swappiness behavior for this step.
953
- A swappiness value of 0 causes swapping not to happen unless absolutely
954
- necessary. A swappiness value of 100 causes pages to be swapped very
955
- aggressively. Accepted values are whole numbers between 0 and 100.
855
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
856
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
857
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
858
+ Kubernetes service account to use when launching pod in Kubernetes.
859
+ secrets : List[str], optional, default None
860
+ Kubernetes secrets to use when launching pod in Kubernetes. These
861
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
862
+ in Metaflow configuration.
863
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
864
+ Kubernetes namespace to use when launching pod in Kubernetes.
865
+ gpu : int, optional, default None
866
+ Number of GPUs required for this step. A value of zero implies that
867
+ the scheduled node should not have GPUs.
868
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
869
+ The vendor of the GPUs to be used for this step.
870
+ tolerations : List[str], default []
871
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
872
+ Kubernetes tolerations to use when launching pod in Kubernetes.
956
873
  use_tmpfs : bool, default False
957
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
958
- not available on Fargate compute environments
874
+ This enables an explicit tmpfs mount for this step.
959
875
  tmpfs_tempdir : bool, default True
960
876
  sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
961
- tmpfs_size : int, optional, default None
877
+ tmpfs_size : int, optional, default: None
962
878
  The value for the size (in MiB) of the tmpfs mount for this step.
963
879
  This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
964
880
  memory allocated for this step.
965
- tmpfs_path : str, optional, default None
966
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
967
- inferentia : int, default 0
968
- Number of Inferentia chips required for this step.
969
- trainium : int, default None
970
- Alias for inferentia. Use only one of the two.
971
- efa : int, default 0
972
- Number of elastic fabric adapter network devices to attach to container
973
- ephemeral_storage: int, default None
974
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
975
- This is only relevant for Fargate compute environments
976
- log_driver: str, optional, default None
977
- The log driver to use for the Amazon ECS container.
978
- log_options: List[str], optional, default None
979
- List of strings containing options for the chosen log driver. The configurable values
980
- depend on the `log driver` chosen. Validation of these options is not supported yet.
981
- Example usage: ["awslogs-group:aws/batch/job"]
881
+ tmpfs_path : str, optional, default /metaflow_temp
882
+ Path to tmpfs mount for this step.
883
+ persistent_volume_claims : Dict[str, str], optional, default None
884
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
885
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
886
+ shared_memory: int, optional
887
+ Shared memory size (in MiB) required for this step
888
+ port: int, optional
889
+ Port number to specify in the Kubernetes job object
982
890
  """
983
891
  ...
984
892
 
985
893
  @typing.overload
986
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
894
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
895
+ """
896
+ Creates a human-readable report, a Metaflow Card, after this step completes.
897
+
898
+ Note that you may add multiple `@card` decorators in a step with different parameters.
899
+
900
+ Parameters
901
+ ----------
902
+ type : str, default 'default'
903
+ Card type.
904
+ id : str, optional, default None
905
+ If multiple cards are present, use this id to identify this card.
906
+ options : Dict[str, Any], default {}
907
+ Options passed to the card. The contents depend on the card type.
908
+ timeout : int, default 45
909
+ Interrupt reporting if it takes more than this many seconds.
910
+
911
+
912
+ """
987
913
  ...
988
914
 
989
915
  @typing.overload
990
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
916
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
991
917
  ...
992
918
 
993
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
994
- """
995
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
996
-
997
- Parameters
998
- ----------
999
- cpu : int, default 1
1000
- Number of CPUs required for this step. If `@resources` is
1001
- also present, the maximum value from all decorators is used.
1002
- gpu : int, default 0
1003
- Number of GPUs required for this step. If `@resources` is
1004
- also present, the maximum value from all decorators is used.
1005
- memory : int, default 4096
1006
- Memory size (in MB) required for this step. If
1007
- `@resources` is also present, the maximum value from all decorators is
1008
- used.
1009
- image : str, optional, default None
1010
- Docker image to use when launching on AWS Batch. If not specified, and
1011
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1012
- not, a default Docker image mapping to the current version of Python is used.
1013
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1014
- AWS Batch Job Queue to submit the job to.
1015
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1016
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1017
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1018
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1019
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1020
- shared_memory : int, optional, default None
1021
- The value for the size (in MiB) of the /dev/shm volume for this step.
1022
- This parameter maps to the `--shm-size` option in Docker.
1023
- max_swap : int, optional, default None
1024
- The total amount of swap memory (in MiB) a container can use for this
1025
- step. This parameter is translated to the `--memory-swap` option in
1026
- Docker where the value is the sum of the container memory plus the
1027
- `max_swap` value.
1028
- swappiness : int, optional, default None
1029
- This allows you to tune memory swappiness behavior for this step.
1030
- A swappiness value of 0 causes swapping not to happen unless absolutely
1031
- necessary. A swappiness value of 100 causes pages to be swapped very
1032
- aggressively. Accepted values are whole numbers between 0 and 100.
1033
- use_tmpfs : bool, default False
1034
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1035
- not available on Fargate compute environments
1036
- tmpfs_tempdir : bool, default True
1037
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1038
- tmpfs_size : int, optional, default None
1039
- The value for the size (in MiB) of the tmpfs mount for this step.
1040
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1041
- memory allocated for this step.
1042
- tmpfs_path : str, optional, default None
1043
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1044
- inferentia : int, default 0
1045
- Number of Inferentia chips required for this step.
1046
- trainium : int, default None
1047
- Alias for inferentia. Use only one of the two.
1048
- efa : int, default 0
1049
- Number of elastic fabric adapter network devices to attach to container
1050
- ephemeral_storage: int, default None
1051
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1052
- This is only relevant for Fargate compute environments
1053
- log_driver: str, optional, default None
1054
- The log driver to use for the Amazon ECS container.
1055
- log_options: List[str], optional, default None
1056
- List of strings containing options for the chosen log driver. The configurable values
1057
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1058
- Example usage: ["awslogs-group:aws/batch/job"]
1059
- """
1060
- ...
1061
-
1062
- @typing.overload
1063
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1064
- """
1065
- Creates a human-readable report, a Metaflow Card, after this step completes.
1066
-
1067
- Note that you may add multiple `@card` decorators in a step with different parameters.
1068
-
1069
- Parameters
1070
- ----------
1071
- type : str, default 'default'
1072
- Card type.
1073
- id : str, optional, default None
1074
- If multiple cards are present, use this id to identify this card.
1075
- options : Dict[str, Any], default {}
1076
- Options passed to the card. The contents depend on the card type.
1077
- timeout : int, default 45
1078
- Interrupt reporting if it takes more than this many seconds.
1079
-
1080
-
1081
- """
1082
- ...
1083
-
1084
- @typing.overload
1085
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1086
- ...
1087
-
1088
- @typing.overload
1089
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1090
- ...
1091
-
1092
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
919
+ @typing.overload
920
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
921
+ ...
922
+
923
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1093
924
  """
1094
925
  Creates a human-readable report, a Metaflow Card, after this step completes.
1095
926
 
@@ -1110,55 +941,6 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1110
941
  """
1111
942
  ...
1112
943
 
1113
- @typing.overload
1114
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1115
- """
1116
- Specifies that the step will success under all circumstances.
1117
-
1118
- The decorator will create an optional artifact, specified by `var`, which
1119
- contains the exception raised. You can use it to detect the presence
1120
- of errors, indicating that all happy-path artifacts produced by the step
1121
- are missing.
1122
-
1123
- Parameters
1124
- ----------
1125
- var : str, optional, default None
1126
- Name of the artifact in which to store the caught exception.
1127
- If not specified, the exception is not stored.
1128
- print_exception : bool, default True
1129
- Determines whether or not the exception is printed to
1130
- stdout when caught.
1131
- """
1132
- ...
1133
-
1134
- @typing.overload
1135
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1136
- ...
1137
-
1138
- @typing.overload
1139
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1140
- ...
1141
-
1142
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1143
- """
1144
- Specifies that the step will success under all circumstances.
1145
-
1146
- The decorator will create an optional artifact, specified by `var`, which
1147
- contains the exception raised. You can use it to detect the presence
1148
- of errors, indicating that all happy-path artifacts produced by the step
1149
- are missing.
1150
-
1151
- Parameters
1152
- ----------
1153
- var : str, optional, default None
1154
- Name of the artifact in which to store the caught exception.
1155
- If not specified, the exception is not stored.
1156
- print_exception : bool, default True
1157
- Determines whether or not the exception is printed to
1158
- stdout when caught.
1159
- """
1160
- ...
1161
-
1162
944
  @typing.overload
1163
945
  def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1164
946
  """
@@ -1192,65 +974,6 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1192
974
  """
1193
975
  ...
1194
976
 
1195
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, port: typing.Optional[int] = None, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1196
- """
1197
- Specifies that this step should execute on Kubernetes.
1198
-
1199
- Parameters
1200
- ----------
1201
- cpu : int, default 1
1202
- Number of CPUs required for this step. If `@resources` is
1203
- also present, the maximum value from all decorators is used.
1204
- memory : int, default 4096
1205
- Memory size (in MB) required for this step. If
1206
- `@resources` is also present, the maximum value from all decorators is
1207
- used.
1208
- disk : int, default 10240
1209
- Disk size (in MB) required for this step. If
1210
- `@resources` is also present, the maximum value from all decorators is
1211
- used.
1212
- image : str, optional, default None
1213
- Docker image to use when launching on Kubernetes. If not specified, and
1214
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1215
- not, a default Docker image mapping to the current version of Python is used.
1216
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1217
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1218
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1219
- Kubernetes service account to use when launching pod in Kubernetes.
1220
- secrets : List[str], optional, default None
1221
- Kubernetes secrets to use when launching pod in Kubernetes. These
1222
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1223
- in Metaflow configuration.
1224
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1225
- Kubernetes namespace to use when launching pod in Kubernetes.
1226
- gpu : int, optional, default None
1227
- Number of GPUs required for this step. A value of zero implies that
1228
- the scheduled node should not have GPUs.
1229
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1230
- The vendor of the GPUs to be used for this step.
1231
- tolerations : List[str], default []
1232
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1233
- Kubernetes tolerations to use when launching pod in Kubernetes.
1234
- use_tmpfs : bool, default False
1235
- This enables an explicit tmpfs mount for this step.
1236
- tmpfs_tempdir : bool, default True
1237
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1238
- tmpfs_size : int, optional, default: None
1239
- The value for the size (in MiB) of the tmpfs mount for this step.
1240
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1241
- memory allocated for this step.
1242
- tmpfs_path : str, optional, default /metaflow_temp
1243
- Path to tmpfs mount for this step.
1244
- persistent_volume_claims : Dict[str, str], optional, default None
1245
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1246
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1247
- port: int, optional
1248
- Number of the port to specify in the Kubernetes job object
1249
- shared_memory: int, optional
1250
- Shared memory size (in MiB) required for this steps
1251
- """
1252
- ...
1253
-
1254
977
  @typing.overload
1255
978
  def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1256
979
  """
@@ -1301,52 +1024,182 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1301
1024
  ...
1302
1025
 
1303
1026
  @typing.overload
1304
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1027
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1305
1028
  """
1306
- Specifies the Conda environment for the step.
1029
+ Specifies the number of times the task corresponding
1030
+ to a step needs to be retried.
1307
1031
 
1308
- Information in this decorator will augment any
1309
- attributes set in the `@conda_base` flow-level decorator. Hence,
1310
- you can use `@conda_base` to set packages required by all
1311
- steps and use `@conda` to specify step-specific overrides.
1032
+ This decorator is useful for handling transient errors, such as networking issues.
1033
+ If your task contains operations that can't be retried safely, e.g. database updates,
1034
+ it is advisable to annotate it with `@retry(times=0)`.
1035
+
1036
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1037
+ decorator will execute a no-op task after all retries have been exhausted,
1038
+ ensuring that the flow execution can continue.
1312
1039
 
1313
1040
  Parameters
1314
1041
  ----------
1315
- packages : Dict[str, str], default {}
1316
- Packages to use for this step. The key is the name of the package
1317
- and the value is the version to use.
1318
- libraries : Dict[str, str], default {}
1319
- Supported for backward compatibility. When used with packages, packages will take precedence.
1320
- python : str, optional, default None
1321
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1322
- that the version used will correspond to the version of the Python interpreter used to start the run.
1323
- disabled : bool, default False
1324
- If set to True, disables @conda.
1042
+ times : int, default 3
1043
+ Number of times to retry this task.
1044
+ minutes_between_retries : int, default 2
1045
+ Number of minutes between retries.
1325
1046
  """
1326
1047
  ...
1327
1048
 
1328
1049
  @typing.overload
1329
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1050
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1330
1051
  ...
1331
1052
 
1332
1053
  @typing.overload
1333
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1054
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1334
1055
  ...
1335
1056
 
1336
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1057
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1337
1058
  """
1338
- Specifies the Conda environment for the step.
1059
+ Specifies the number of times the task corresponding
1060
+ to a step needs to be retried.
1339
1061
 
1340
- Information in this decorator will augment any
1341
- attributes set in the `@conda_base` flow-level decorator. Hence,
1342
- you can use `@conda_base` to set packages required by all
1343
- steps and use `@conda` to specify step-specific overrides.
1062
+ This decorator is useful for handling transient errors, such as networking issues.
1063
+ If your task contains operations that can't be retried safely, e.g. database updates,
1064
+ it is advisable to annotate it with `@retry(times=0)`.
1065
+
1066
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1067
+ decorator will execute a no-op task after all retries have been exhausted,
1068
+ ensuring that the flow execution can continue.
1344
1069
 
1345
1070
  Parameters
1346
1071
  ----------
1347
- packages : Dict[str, str], default {}
1348
- Packages to use for this step. The key is the name of the package
1349
- and the value is the version to use.
1072
+ times : int, default 3
1073
+ Number of times to retry this task.
1074
+ minutes_between_retries : int, default 2
1075
+ Number of minutes between retries.
1076
+ """
1077
+ ...
1078
+
1079
+ @typing.overload
1080
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1081
+ """
1082
+ Specifies the resources needed when executing this step.
1083
+
1084
+ Use `@resources` to specify the resource requirements
1085
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1086
+
1087
+ You can choose the compute layer on the command line by executing e.g.
1088
+ ```
1089
+ python myflow.py run --with batch
1090
+ ```
1091
+ or
1092
+ ```
1093
+ python myflow.py run --with kubernetes
1094
+ ```
1095
+ which executes the flow on the desired system using the
1096
+ requirements specified in `@resources`.
1097
+
1098
+ Parameters
1099
+ ----------
1100
+ cpu : int, default 1
1101
+ Number of CPUs required for this step.
1102
+ gpu : int, default 0
1103
+ Number of GPUs required for this step.
1104
+ disk : int, optional, default None
1105
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1106
+ memory : int, default 4096
1107
+ Memory size (in MB) required for this step.
1108
+ shared_memory : int, optional, default None
1109
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1110
+ This parameter maps to the `--shm-size` option in Docker.
1111
+ """
1112
+ ...
1113
+
1114
+ @typing.overload
1115
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1116
+ ...
1117
+
1118
+ @typing.overload
1119
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1120
+ ...
1121
+
1122
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1123
+ """
1124
+ Specifies the resources needed when executing this step.
1125
+
1126
+ Use `@resources` to specify the resource requirements
1127
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1128
+
1129
+ You can choose the compute layer on the command line by executing e.g.
1130
+ ```
1131
+ python myflow.py run --with batch
1132
+ ```
1133
+ or
1134
+ ```
1135
+ python myflow.py run --with kubernetes
1136
+ ```
1137
+ which executes the flow on the desired system using the
1138
+ requirements specified in `@resources`.
1139
+
1140
+ Parameters
1141
+ ----------
1142
+ cpu : int, default 1
1143
+ Number of CPUs required for this step.
1144
+ gpu : int, default 0
1145
+ Number of GPUs required for this step.
1146
+ disk : int, optional, default None
1147
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1148
+ memory : int, default 4096
1149
+ Memory size (in MB) required for this step.
1150
+ shared_memory : int, optional, default None
1151
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1152
+ This parameter maps to the `--shm-size` option in Docker.
1153
+ """
1154
+ ...
1155
+
1156
+ @typing.overload
1157
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1158
+ """
1159
+ Specifies the Conda environment for the step.
1160
+
1161
+ Information in this decorator will augment any
1162
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1163
+ you can use `@conda_base` to set packages required by all
1164
+ steps and use `@conda` to specify step-specific overrides.
1165
+
1166
+ Parameters
1167
+ ----------
1168
+ packages : Dict[str, str], default {}
1169
+ Packages to use for this step. The key is the name of the package
1170
+ and the value is the version to use.
1171
+ libraries : Dict[str, str], default {}
1172
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1173
+ python : str, optional, default None
1174
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1175
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1176
+ disabled : bool, default False
1177
+ If set to True, disables @conda.
1178
+ """
1179
+ ...
1180
+
1181
+ @typing.overload
1182
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1183
+ ...
1184
+
1185
+ @typing.overload
1186
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1187
+ ...
1188
+
1189
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1190
+ """
1191
+ Specifies the Conda environment for the step.
1192
+
1193
+ Information in this decorator will augment any
1194
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1195
+ you can use `@conda_base` to set packages required by all
1196
+ steps and use `@conda` to specify step-specific overrides.
1197
+
1198
+ Parameters
1199
+ ----------
1200
+ packages : Dict[str, str], default {}
1201
+ Packages to use for this step. The key is the name of the package
1202
+ and the value is the version to use.
1350
1203
  libraries : Dict[str, str], default {}
1351
1204
  Supported for backward compatibility. When used with packages, packages will take precedence.
1352
1205
  python : str, optional, default None
@@ -1388,10 +1241,160 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
1388
1241
  """
1389
1242
  ...
1390
1243
 
1391
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1244
+ @typing.overload
1245
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1392
1246
  """
1393
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1394
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1247
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1248
+
1249
+ Parameters
1250
+ ----------
1251
+ cpu : int, default 1
1252
+ Number of CPUs required for this step. If `@resources` is
1253
+ also present, the maximum value from all decorators is used.
1254
+ gpu : int, default 0
1255
+ Number of GPUs required for this step. If `@resources` is
1256
+ also present, the maximum value from all decorators is used.
1257
+ memory : int, default 4096
1258
+ Memory size (in MB) required for this step. If
1259
+ `@resources` is also present, the maximum value from all decorators is
1260
+ used.
1261
+ image : str, optional, default None
1262
+ Docker image to use when launching on AWS Batch. If not specified, and
1263
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1264
+ not, a default Docker image mapping to the current version of Python is used.
1265
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1266
+ AWS Batch Job Queue to submit the job to.
1267
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1268
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1269
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1270
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1271
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1272
+ shared_memory : int, optional, default None
1273
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1274
+ This parameter maps to the `--shm-size` option in Docker.
1275
+ max_swap : int, optional, default None
1276
+ The total amount of swap memory (in MiB) a container can use for this
1277
+ step. This parameter is translated to the `--memory-swap` option in
1278
+ Docker where the value is the sum of the container memory plus the
1279
+ `max_swap` value.
1280
+ swappiness : int, optional, default None
1281
+ This allows you to tune memory swappiness behavior for this step.
1282
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1283
+ necessary. A swappiness value of 100 causes pages to be swapped very
1284
+ aggressively. Accepted values are whole numbers between 0 and 100.
1285
+ use_tmpfs : bool, default False
1286
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1287
+ not available on Fargate compute environments
1288
+ tmpfs_tempdir : bool, default True
1289
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1290
+ tmpfs_size : int, optional, default None
1291
+ The value for the size (in MiB) of the tmpfs mount for this step.
1292
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1293
+ memory allocated for this step.
1294
+ tmpfs_path : str, optional, default None
1295
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1296
+ inferentia : int, default 0
1297
+ Number of Inferentia chips required for this step.
1298
+ trainium : int, default None
1299
+ Alias for inferentia. Use only one of the two.
1300
+ efa : int, default 0
1301
+ Number of elastic fabric adapter network devices to attach to container
1302
+ ephemeral_storage: int, default None
1303
+ The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1304
+ This is only relevant for Fargate compute environments
1305
+ log_driver: str, optional, default None
1306
+ The log driver to use for the Amazon ECS container.
1307
+ log_options: List[str], optional, default None
1308
+ List of strings containing options for the chosen log driver. The configurable values
1309
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1310
+ Example usage: ["awslogs-group:aws/batch/job"]
1311
+ """
1312
+ ...
1313
+
1314
+ @typing.overload
1315
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1316
+ ...
1317
+
1318
+ @typing.overload
1319
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1320
+ ...
1321
+
1322
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1323
+ """
1324
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1325
+
1326
+ Parameters
1327
+ ----------
1328
+ cpu : int, default 1
1329
+ Number of CPUs required for this step. If `@resources` is
1330
+ also present, the maximum value from all decorators is used.
1331
+ gpu : int, default 0
1332
+ Number of GPUs required for this step. If `@resources` is
1333
+ also present, the maximum value from all decorators is used.
1334
+ memory : int, default 4096
1335
+ Memory size (in MB) required for this step. If
1336
+ `@resources` is also present, the maximum value from all decorators is
1337
+ used.
1338
+ image : str, optional, default None
1339
+ Docker image to use when launching on AWS Batch. If not specified, and
1340
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1341
+ not, a default Docker image mapping to the current version of Python is used.
1342
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1343
+ AWS Batch Job Queue to submit the job to.
1344
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1345
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1346
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1347
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1348
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1349
+ shared_memory : int, optional, default None
1350
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1351
+ This parameter maps to the `--shm-size` option in Docker.
1352
+ max_swap : int, optional, default None
1353
+ The total amount of swap memory (in MiB) a container can use for this
1354
+ step. This parameter is translated to the `--memory-swap` option in
1355
+ Docker where the value is the sum of the container memory plus the
1356
+ `max_swap` value.
1357
+ swappiness : int, optional, default None
1358
+ This allows you to tune memory swappiness behavior for this step.
1359
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1360
+ necessary. A swappiness value of 100 causes pages to be swapped very
1361
+ aggressively. Accepted values are whole numbers between 0 and 100.
1362
+ use_tmpfs : bool, default False
1363
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1364
+ not available on Fargate compute environments
1365
+ tmpfs_tempdir : bool, default True
1366
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1367
+ tmpfs_size : int, optional, default None
1368
+ The value for the size (in MiB) of the tmpfs mount for this step.
1369
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1370
+ memory allocated for this step.
1371
+ tmpfs_path : str, optional, default None
1372
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1373
+ inferentia : int, default 0
1374
+ Number of Inferentia chips required for this step.
1375
+ trainium : int, default None
1376
+ Alias for inferentia. Use only one of the two.
1377
+ efa : int, default 0
1378
+ Number of elastic fabric adapter network devices to attach to container
1379
+ ephemeral_storage: int, default None
1380
+ The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1381
+ This is only relevant for Fargate compute environments
1382
+ log_driver: str, optional, default None
1383
+ The log driver to use for the Amazon ECS container.
1384
+ log_options: List[str], optional, default None
1385
+ List of strings containing options for the chosen log driver. The configurable values
1386
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1387
+ Example usage: ["awslogs-group:aws/batch/job"]
1388
+ """
1389
+ ...
1390
+
1391
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1392
+ """
1393
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1394
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1395
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1396
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1397
+ starts only after all sensors finish.
1395
1398
 
1396
1399
  Parameters
1397
1400
  ----------
@@ -1412,120 +1415,162 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1412
1415
  Name of the sensor on Airflow
1413
1416
  description : str
1414
1417
  Description of sensor in the Airflow UI
1415
- external_dag_id : str
1416
- The dag_id that contains the task you want to wait for.
1417
- external_task_ids : List[str]
1418
- The list of task_ids that you want to wait for.
1419
- If None (default value) the sensor waits for the DAG. (Default: None)
1420
- allowed_states : List[str]
1421
- Iterable of allowed states, (Default: ['success'])
1422
- failed_states : List[str]
1423
- Iterable of failed or dis-allowed states. (Default: None)
1424
- execution_delta : datetime.timedelta
1425
- time difference with the previous execution to look at,
1426
- the default is the same logical date as the current task or DAG. (Default: None)
1427
- check_existence: bool
1428
- Set to True to check if the external task exists or check if
1429
- the DAG to wait for exists. (Default: True)
1418
+ bucket_key : Union[str, List[str]]
1419
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1420
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1421
+ bucket_name : str
1422
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1423
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1424
+ wildcard_match : bool
1425
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1426
+ aws_conn_id : str
1427
+ a reference to the s3 connection on Airflow. (Default: None)
1428
+ verify : bool
1429
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1430
1430
  """
1431
1431
  ...
1432
1432
 
1433
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1433
+ @typing.overload
1434
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1434
1435
  """
1435
- Specifies what flows belong to the same project.
1436
+ Specifies the Conda environment for all steps of the flow.
1436
1437
 
1437
- A project-specific namespace is created for all flows that
1438
- use the same `@project(name)`.
1438
+ Use `@conda_base` to set common libraries required by all
1439
+ steps and use `@conda` to specify step-specific additions.
1439
1440
 
1440
1441
  Parameters
1441
1442
  ----------
1442
- name : str
1443
- Project name. Make sure that the name is unique amongst all
1444
- projects that use the same production scheduler. The name may
1445
- contain only lowercase alphanumeric characters and underscores.
1446
-
1447
-
1443
+ packages : Dict[str, str], default {}
1444
+ Packages to use for this flow. The key is the name of the package
1445
+ and the value is the version to use.
1446
+ libraries : Dict[str, str], default {}
1447
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1448
+ python : str, optional, default None
1449
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1450
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1451
+ disabled : bool, default False
1452
+ If set to True, disables Conda.
1448
1453
  """
1449
1454
  ...
1450
1455
 
1451
1456
  @typing.overload
1452
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1457
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1458
+ ...
1459
+
1460
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1453
1461
  """
1454
- Specifies the PyPI packages for all steps of the flow.
1462
+ Specifies the Conda environment for all steps of the flow.
1463
+
1464
+ Use `@conda_base` to set common libraries required by all
1465
+ steps and use `@conda` to specify step-specific additions.
1455
1466
 
1456
- Use `@pypi_base` to set common packages required by all
1457
- steps and use `@pypi` to specify step-specific overrides.
1458
1467
  Parameters
1459
1468
  ----------
1460
- packages : Dict[str, str], default: {}
1469
+ packages : Dict[str, str], default {}
1461
1470
  Packages to use for this flow. The key is the name of the package
1462
1471
  and the value is the version to use.
1463
- python : str, optional, default: None
1472
+ libraries : Dict[str, str], default {}
1473
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1474
+ python : str, optional, default None
1464
1475
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1465
1476
  that the version used will correspond to the version of the Python interpreter used to start the run.
1477
+ disabled : bool, default False
1478
+ If set to True, disables Conda.
1466
1479
  """
1467
1480
  ...
1468
1481
 
1469
1482
  @typing.overload
1470
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1483
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1484
+ """
1485
+ Specifies the times when the flow should be run when running on a
1486
+ production scheduler.
1487
+
1488
+ Parameters
1489
+ ----------
1490
+ hourly : bool, default False
1491
+ Run the workflow hourly.
1492
+ daily : bool, default True
1493
+ Run the workflow daily.
1494
+ weekly : bool, default False
1495
+ Run the workflow weekly.
1496
+ cron : str, optional, default None
1497
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1498
+ specified by this expression.
1499
+ timezone : str, optional, default None
1500
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1501
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1502
+ """
1471
1503
  ...
1472
1504
 
1473
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1505
+ @typing.overload
1506
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1507
+ ...
1508
+
1509
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1474
1510
  """
1475
- Specifies the PyPI packages for all steps of the flow.
1511
+ Specifies the times when the flow should be run when running on a
1512
+ production scheduler.
1476
1513
 
1477
- Use `@pypi_base` to set common packages required by all
1478
- steps and use `@pypi` to specify step-specific overrides.
1479
1514
  Parameters
1480
1515
  ----------
1481
- packages : Dict[str, str], default: {}
1482
- Packages to use for this flow. The key is the name of the package
1483
- and the value is the version to use.
1484
- python : str, optional, default: None
1485
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1486
- that the version used will correspond to the version of the Python interpreter used to start the run.
1516
+ hourly : bool, default False
1517
+ Run the workflow hourly.
1518
+ daily : bool, default True
1519
+ Run the workflow daily.
1520
+ weekly : bool, default False
1521
+ Run the workflow weekly.
1522
+ cron : str, optional, default None
1523
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1524
+ specified by this expression.
1525
+ timezone : str, optional, default None
1526
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1527
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1487
1528
  """
1488
1529
  ...
1489
1530
 
1490
1531
  @typing.overload
1491
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1532
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1492
1533
  """
1493
- Specifies the event(s) that this flow depends on.
1534
+ Specifies the flow(s) that this flow depends on.
1494
1535
 
1495
1536
  ```
1496
- @trigger(event='foo')
1537
+ @trigger_on_finish(flow='FooFlow')
1497
1538
  ```
1498
1539
  or
1499
1540
  ```
1500
- @trigger(events=['foo', 'bar'])
1541
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1501
1542
  ```
1543
+ This decorator respects the @project decorator and triggers the flow
1544
+ when upstream runs within the same namespace complete successfully
1502
1545
 
1503
- Additionally, you can specify the parameter mappings
1504
- to map event payload to Metaflow parameters for the flow.
1546
+ Additionally, you can specify project aware upstream flow dependencies
1547
+ by specifying the fully qualified project_flow_name.
1505
1548
  ```
1506
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1549
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1507
1550
  ```
1508
1551
  or
1509
1552
  ```
1510
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1511
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1553
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1512
1554
  ```
1513
1555
 
1514
- 'parameters' can also be a list of strings and tuples like so:
1515
- ```
1516
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1517
- ```
1518
- This is equivalent to:
1556
+ You can also specify just the project or project branch (other values will be
1557
+ inferred from the current project or project branch):
1519
1558
  ```
1520
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1559
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1521
1560
  ```
1522
1561
 
1562
+ Note that `branch` is typically one of:
1563
+ - `prod`
1564
+ - `user.bob`
1565
+ - `test.my_experiment`
1566
+ - `prod.staging`
1567
+
1523
1568
  Parameters
1524
1569
  ----------
1525
- event : Union[str, Dict[str, Any]], optional, default None
1526
- Event dependency for this flow.
1527
- events : List[Union[str, Dict[str, Any]]], default []
1528
- Events dependency for this flow.
1570
+ flow : Union[str, Dict[str, str]], optional, default None
1571
+ Upstream flow dependency for this flow.
1572
+ flows : List[Union[str, Dict[str, str]]], default []
1573
+ Upstream flow dependencies for this flow.
1529
1574
  options : Dict[str, Any], default {}
1530
1575
  Backend-specific configuration for tuning eventing behavior.
1531
1576
 
@@ -1534,47 +1579,51 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
1534
1579
  ...
1535
1580
 
1536
1581
  @typing.overload
1537
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1582
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1538
1583
  ...
1539
1584
 
1540
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1585
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1541
1586
  """
1542
- Specifies the event(s) that this flow depends on.
1587
+ Specifies the flow(s) that this flow depends on.
1543
1588
 
1544
1589
  ```
1545
- @trigger(event='foo')
1590
+ @trigger_on_finish(flow='FooFlow')
1546
1591
  ```
1547
1592
  or
1548
1593
  ```
1549
- @trigger(events=['foo', 'bar'])
1594
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1550
1595
  ```
1551
-
1552
- Additionally, you can specify the parameter mappings
1553
- to map event payload to Metaflow parameters for the flow.
1596
+ This decorator respects the @project decorator and triggers the flow
1597
+ when upstream runs within the same namespace complete successfully
1598
+
1599
+ Additionally, you can specify project aware upstream flow dependencies
1600
+ by specifying the fully qualified project_flow_name.
1554
1601
  ```
1555
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1602
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1556
1603
  ```
1557
1604
  or
1558
1605
  ```
1559
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1560
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1606
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1561
1607
  ```
1562
1608
 
1563
- 'parameters' can also be a list of strings and tuples like so:
1564
- ```
1565
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1566
- ```
1567
- This is equivalent to:
1609
+ You can also specify just the project or project branch (other values will be
1610
+ inferred from the current project or project branch):
1568
1611
  ```
1569
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1612
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1570
1613
  ```
1571
1614
 
1615
+ Note that `branch` is typically one of:
1616
+ - `prod`
1617
+ - `user.bob`
1618
+ - `test.my_experiment`
1619
+ - `prod.staging`
1620
+
1572
1621
  Parameters
1573
1622
  ----------
1574
- event : Union[str, Dict[str, Any]], optional, default None
1575
- Event dependency for this flow.
1576
- events : List[Union[str, Dict[str, Any]]], default []
1577
- Events dependency for this flow.
1623
+ flow : Union[str, Dict[str, str]], optional, default None
1624
+ Upstream flow dependency for this flow.
1625
+ flows : List[Union[str, Dict[str, str]]], default []
1626
+ Upstream flow dependencies for this flow.
1578
1627
  options : Dict[str, Any], default {}
1579
1628
  Backend-specific configuration for tuning eventing behavior.
1580
1629
 
@@ -1583,110 +1632,48 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1583
1632
  ...
1584
1633
 
1585
1634
  @typing.overload
1586
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1587
- """
1588
- Specifies the times when the flow should be run when running on a
1589
- production scheduler.
1590
-
1591
- Parameters
1592
- ----------
1593
- hourly : bool, default False
1594
- Run the workflow hourly.
1595
- daily : bool, default True
1596
- Run the workflow daily.
1597
- weekly : bool, default False
1598
- Run the workflow weekly.
1599
- cron : str, optional, default None
1600
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1601
- specified by this expression.
1602
- timezone : str, optional, default None
1603
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1604
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1605
- """
1606
- ...
1607
-
1608
- @typing.overload
1609
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1610
- ...
1611
-
1612
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1613
- """
1614
- Specifies the times when the flow should be run when running on a
1615
- production scheduler.
1616
-
1617
- Parameters
1618
- ----------
1619
- hourly : bool, default False
1620
- Run the workflow hourly.
1621
- daily : bool, default True
1622
- Run the workflow daily.
1623
- weekly : bool, default False
1624
- Run the workflow weekly.
1625
- cron : str, optional, default None
1626
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1627
- specified by this expression.
1628
- timezone : str, optional, default None
1629
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1630
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1631
- """
1632
- ...
1633
-
1634
- @typing.overload
1635
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1635
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1636
1636
  """
1637
- Specifies the Conda environment for all steps of the flow.
1638
-
1639
- Use `@conda_base` to set common libraries required by all
1640
- steps and use `@conda` to specify step-specific additions.
1637
+ Specifies the PyPI packages for all steps of the flow.
1641
1638
 
1639
+ Use `@pypi_base` to set common packages required by all
1640
+ steps and use `@pypi` to specify step-specific overrides.
1642
1641
  Parameters
1643
1642
  ----------
1644
- packages : Dict[str, str], default {}
1643
+ packages : Dict[str, str], default: {}
1645
1644
  Packages to use for this flow. The key is the name of the package
1646
1645
  and the value is the version to use.
1647
- libraries : Dict[str, str], default {}
1648
- Supported for backward compatibility. When used with packages, packages will take precedence.
1649
- python : str, optional, default None
1646
+ python : str, optional, default: None
1650
1647
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1651
1648
  that the version used will correspond to the version of the Python interpreter used to start the run.
1652
- disabled : bool, default False
1653
- If set to True, disables Conda.
1654
1649
  """
1655
1650
  ...
1656
1651
 
1657
1652
  @typing.overload
1658
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1653
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1659
1654
  ...
1660
1655
 
1661
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1656
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1662
1657
  """
1663
- Specifies the Conda environment for all steps of the flow.
1664
-
1665
- Use `@conda_base` to set common libraries required by all
1666
- steps and use `@conda` to specify step-specific additions.
1658
+ Specifies the PyPI packages for all steps of the flow.
1667
1659
 
1660
+ Use `@pypi_base` to set common packages required by all
1661
+ steps and use `@pypi` to specify step-specific overrides.
1668
1662
  Parameters
1669
1663
  ----------
1670
- packages : Dict[str, str], default {}
1664
+ packages : Dict[str, str], default: {}
1671
1665
  Packages to use for this flow. The key is the name of the package
1672
1666
  and the value is the version to use.
1673
- libraries : Dict[str, str], default {}
1674
- Supported for backward compatibility. When used with packages, packages will take precedence.
1675
- python : str, optional, default None
1667
+ python : str, optional, default: None
1676
1668
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1677
1669
  that the version used will correspond to the version of the Python interpreter used to start the run.
1678
- disabled : bool, default False
1679
- If set to True, disables Conda.
1680
1670
  """
1681
1671
  ...
1682
1672
 
1683
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1673
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1684
1674
  """
1685
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1686
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1687
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1688
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1689
- starts only after all sensors finish.
1675
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1676
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1690
1677
 
1691
1678
  Parameters
1692
1679
  ----------
@@ -1707,64 +1694,81 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1707
1694
  Name of the sensor on Airflow
1708
1695
  description : str
1709
1696
  Description of sensor in the Airflow UI
1710
- bucket_key : Union[str, List[str]]
1711
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1712
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1713
- bucket_name : str
1714
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1715
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1716
- wildcard_match : bool
1717
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1718
- aws_conn_id : str
1719
- a reference to the s3 connection on Airflow. (Default: None)
1720
- verify : bool
1721
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1697
+ external_dag_id : str
1698
+ The dag_id that contains the task you want to wait for.
1699
+ external_task_ids : List[str]
1700
+ The list of task_ids that you want to wait for.
1701
+ If None (default value) the sensor waits for the DAG. (Default: None)
1702
+ allowed_states : List[str]
1703
+ Iterable of allowed states, (Default: ['success'])
1704
+ failed_states : List[str]
1705
+ Iterable of failed or dis-allowed states. (Default: None)
1706
+ execution_delta : datetime.timedelta
1707
+ time difference with the previous execution to look at,
1708
+ the default is the same logical date as the current task or DAG. (Default: None)
1709
+ check_existence: bool
1710
+ Set to True to check if the external task exists or check if
1711
+ the DAG to wait for exists. (Default: True)
1712
+ """
1713
+ ...
1714
+
1715
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1716
+ """
1717
+ Specifies what flows belong to the same project.
1718
+
1719
+ A project-specific namespace is created for all flows that
1720
+ use the same `@project(name)`.
1721
+
1722
+ Parameters
1723
+ ----------
1724
+ name : str
1725
+ Project name. Make sure that the name is unique amongst all
1726
+ projects that use the same production scheduler. The name may
1727
+ contain only lowercase alphanumeric characters and underscores.
1728
+
1729
+
1722
1730
  """
1723
1731
  ...
1724
1732
 
1725
1733
  @typing.overload
1726
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1734
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1727
1735
  """
1728
- Specifies the flow(s) that this flow depends on.
1736
+ Specifies the event(s) that this flow depends on.
1729
1737
 
1730
1738
  ```
1731
- @trigger_on_finish(flow='FooFlow')
1739
+ @trigger(event='foo')
1732
1740
  ```
1733
1741
  or
1734
1742
  ```
1735
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1743
+ @trigger(events=['foo', 'bar'])
1736
1744
  ```
1737
- This decorator respects the @project decorator and triggers the flow
1738
- when upstream runs within the same namespace complete successfully
1739
1745
 
1740
- Additionally, you can specify project aware upstream flow dependencies
1741
- by specifying the fully qualified project_flow_name.
1746
+ Additionally, you can specify the parameter mappings
1747
+ to map event payload to Metaflow parameters for the flow.
1742
1748
  ```
1743
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1749
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1744
1750
  ```
1745
1751
  or
1746
1752
  ```
1747
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1753
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1754
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1748
1755
  ```
1749
1756
 
1750
- You can also specify just the project or project branch (other values will be
1751
- inferred from the current project or project branch):
1757
+ 'parameters' can also be a list of strings and tuples like so:
1752
1758
  ```
1753
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1759
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1760
+ ```
1761
+ This is equivalent to:
1762
+ ```
1763
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1754
1764
  ```
1755
-
1756
- Note that `branch` is typically one of:
1757
- - `prod`
1758
- - `user.bob`
1759
- - `test.my_experiment`
1760
- - `prod.staging`
1761
1765
 
1762
1766
  Parameters
1763
1767
  ----------
1764
- flow : Union[str, Dict[str, str]], optional, default None
1765
- Upstream flow dependency for this flow.
1766
- flows : List[Union[str, Dict[str, str]]], default []
1767
- Upstream flow dependencies for this flow.
1768
+ event : Union[str, Dict[str, Any]], optional, default None
1769
+ Event dependency for this flow.
1770
+ events : List[Union[str, Dict[str, Any]]], default []
1771
+ Events dependency for this flow.
1768
1772
  options : Dict[str, Any], default {}
1769
1773
  Backend-specific configuration for tuning eventing behavior.
1770
1774
 
@@ -1773,51 +1777,47 @@ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] =
1773
1777
  ...
1774
1778
 
1775
1779
  @typing.overload
1776
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1780
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1777
1781
  ...
1778
1782
 
1779
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1783
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1780
1784
  """
1781
- Specifies the flow(s) that this flow depends on.
1785
+ Specifies the event(s) that this flow depends on.
1782
1786
 
1783
1787
  ```
1784
- @trigger_on_finish(flow='FooFlow')
1788
+ @trigger(event='foo')
1785
1789
  ```
1786
1790
  or
1787
1791
  ```
1788
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1792
+ @trigger(events=['foo', 'bar'])
1789
1793
  ```
1790
- This decorator respects the @project decorator and triggers the flow
1791
- when upstream runs within the same namespace complete successfully
1792
1794
 
1793
- Additionally, you can specify project aware upstream flow dependencies
1794
- by specifying the fully qualified project_flow_name.
1795
+ Additionally, you can specify the parameter mappings
1796
+ to map event payload to Metaflow parameters for the flow.
1795
1797
  ```
1796
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1798
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1797
1799
  ```
1798
1800
  or
1799
1801
  ```
1800
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1802
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1803
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1801
1804
  ```
1802
1805
 
1803
- You can also specify just the project or project branch (other values will be
1804
- inferred from the current project or project branch):
1806
+ 'parameters' can also be a list of strings and tuples like so:
1805
1807
  ```
1806
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1808
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1809
+ ```
1810
+ This is equivalent to:
1811
+ ```
1812
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1807
1813
  ```
1808
-
1809
- Note that `branch` is typically one of:
1810
- - `prod`
1811
- - `user.bob`
1812
- - `test.my_experiment`
1813
- - `prod.staging`
1814
1814
 
1815
1815
  Parameters
1816
1816
  ----------
1817
- flow : Union[str, Dict[str, str]], optional, default None
1818
- Upstream flow dependency for this flow.
1819
- flows : List[Union[str, Dict[str, str]]], default []
1820
- Upstream flow dependencies for this flow.
1817
+ event : Union[str, Dict[str, Any]], optional, default None
1818
+ Event dependency for this flow.
1819
+ events : List[Union[str, Dict[str, Any]]], default []
1820
+ Events dependency for this flow.
1821
1821
  options : Dict[str, Any], default {}
1822
1822
  Backend-specific configuration for tuning eventing behavior.
1823
1823