ob-metaflow-stubs 4.6__py2.py3-none-any.whl → 4.7__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. metaflow-stubs/__init__.pyi +495 -495
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +6 -6
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +2 -2
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +4 -4
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +4 -4
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +4 -4
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +4 -4
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  60. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  61. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  62. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  63. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  65. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  78. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  82. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  83. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  84. metaflow-stubs/plugins/catch_decorator.pyi +4 -4
  85. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  86. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  88. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  89. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  91. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  92. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  93. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  94. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  95. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/frameworks/pytorch.pyi +4 -4
  97. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  99. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  105. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  106. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  108. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +3 -3
  109. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  110. metaflow-stubs/plugins/package_cli.pyi +2 -2
  111. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  112. metaflow-stubs/plugins/perimeters.pyi +2 -2
  113. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  114. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  117. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  119. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  120. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  123. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  124. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  126. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  127. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  128. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  129. metaflow-stubs/procpoll.pyi +2 -2
  130. metaflow-stubs/profilers/__init__.pyi +2 -2
  131. metaflow-stubs/pylint_wrapper.pyi +2 -2
  132. metaflow-stubs/runner/__init__.pyi +2 -2
  133. metaflow-stubs/runner/metaflow_runner.pyi +5 -5
  134. metaflow-stubs/runner/nbrun.pyi +2 -2
  135. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  136. metaflow-stubs/system/__init__.pyi +3 -3
  137. metaflow-stubs/system/system_logger.pyi +2 -2
  138. metaflow-stubs/system/system_monitor.pyi +3 -3
  139. metaflow-stubs/tagging_util.pyi +2 -2
  140. metaflow-stubs/tuple_util.pyi +2 -2
  141. {ob_metaflow_stubs-4.6.dist-info → ob_metaflow_stubs-4.7.dist-info}/METADATA +1 -1
  142. ob_metaflow_stubs-4.7.dist-info/RECORD +145 -0
  143. ob_metaflow_stubs-4.6.dist-info/RECORD +0 -145
  144. {ob_metaflow_stubs-4.6.dist-info → ob_metaflow_stubs-4.7.dist-info}/WHEEL +0 -0
  145. {ob_metaflow_stubs-4.6.dist-info → ob_metaflow_stubs-4.7.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.7.1+ob(v1) #
4
- # Generated on 2024-07-12T16:22:02.506423 #
3
+ # MF version: 2.12.7.2+ob(v1) #
4
+ # Generated on 2024-07-16T17:10:32.274678 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.metaflow_current
12
- import metaflow.flowspec
13
11
  import typing
14
- import metaflow.client.core
12
+ import datetime
13
+ import metaflow.plugins.datatools.s3.s3
14
+ import metaflow.parameters
15
+ import metaflow.datastore.inputs
15
16
  import metaflow.runner.metaflow_runner
16
17
  import io
17
- import metaflow.parameters
18
- import datetime
18
+ import metaflow.metaflow_current
19
+ import metaflow.flowspec
19
20
  import metaflow._vendor.click.types
20
- import metaflow.plugins.datatools.s3.s3
21
+ import metaflow.client.core
21
22
  import metaflow.events
22
- import metaflow.datastore.inputs
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -727,139 +727,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
727
727
  """
728
728
  ...
729
729
 
730
- @typing.overload
731
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
- """
733
- Specifies secrets to be retrieved and injected as environment variables prior to
734
- the execution of a step.
735
-
736
- Parameters
737
- ----------
738
- sources : List[Union[str, Dict[str, Any]]], default: []
739
- List of secret specs, defining how the secrets are to be retrieved
740
- """
741
- ...
742
-
743
- @typing.overload
744
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
745
- ...
746
-
747
- @typing.overload
748
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
749
- ...
750
-
751
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
752
- """
753
- Specifies secrets to be retrieved and injected as environment variables prior to
754
- the execution of a step.
755
-
756
- Parameters
757
- ----------
758
- sources : List[Union[str, Dict[str, Any]]], default: []
759
- List of secret specs, defining how the secrets are to be retrieved
760
- """
761
- ...
762
-
763
- @typing.overload
764
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
765
- """
766
- Specifies that the step will success under all circumstances.
767
-
768
- The decorator will create an optional artifact, specified by `var`, which
769
- contains the exception raised. You can use it to detect the presence
770
- of errors, indicating that all happy-path artifacts produced by the step
771
- are missing.
772
-
773
- Parameters
774
- ----------
775
- var : str, optional, default None
776
- Name of the artifact in which to store the caught exception.
777
- If not specified, the exception is not stored.
778
- print_exception : bool, default True
779
- Determines whether or not the exception is printed to
780
- stdout when caught.
781
- """
782
- ...
783
-
784
- @typing.overload
785
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
786
- ...
787
-
788
- @typing.overload
789
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
790
- ...
791
-
792
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
793
- """
794
- Specifies that the step will success under all circumstances.
795
-
796
- The decorator will create an optional artifact, specified by `var`, which
797
- contains the exception raised. You can use it to detect the presence
798
- of errors, indicating that all happy-path artifacts produced by the step
799
- are missing.
800
-
801
- Parameters
802
- ----------
803
- var : str, optional, default None
804
- Name of the artifact in which to store the caught exception.
805
- If not specified, the exception is not stored.
806
- print_exception : bool, default True
807
- Determines whether or not the exception is printed to
808
- stdout when caught.
809
- """
810
- ...
811
-
812
- @typing.overload
813
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
814
- """
815
- Creates a human-readable report, a Metaflow Card, after this step completes.
816
-
817
- Note that you may add multiple `@card` decorators in a step with different parameters.
818
-
819
- Parameters
820
- ----------
821
- type : str, default 'default'
822
- Card type.
823
- id : str, optional, default None
824
- If multiple cards are present, use this id to identify this card.
825
- options : Dict[str, Any], default {}
826
- Options passed to the card. The contents depend on the card type.
827
- timeout : int, default 45
828
- Interrupt reporting if it takes more than this many seconds.
829
-
830
-
831
- """
832
- ...
833
-
834
- @typing.overload
835
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
836
- ...
837
-
838
- @typing.overload
839
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
840
- ...
841
-
842
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
843
- """
844
- Creates a human-readable report, a Metaflow Card, after this step completes.
845
-
846
- Note that you may add multiple `@card` decorators in a step with different parameters.
847
-
848
- Parameters
849
- ----------
850
- type : str, default 'default'
851
- Card type.
852
- id : str, optional, default None
853
- If multiple cards are present, use this id to identify this card.
854
- options : Dict[str, Any], default {}
855
- Options passed to the card. The contents depend on the card type.
856
- timeout : int, default 45
857
- Interrupt reporting if it takes more than this many seconds.
858
-
859
-
860
- """
861
- ...
862
-
863
730
  @typing.overload
864
731
  def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
865
732
  """
@@ -914,33 +781,79 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
914
781
  ...
915
782
 
916
783
  @typing.overload
917
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
784
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
918
785
  """
919
- Specifies environment variables to be set prior to the execution of a step.
786
+ Specifies the resources needed when executing this step.
787
+
788
+ Use `@resources` to specify the resource requirements
789
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
790
+
791
+ You can choose the compute layer on the command line by executing e.g.
792
+ ```
793
+ python myflow.py run --with batch
794
+ ```
795
+ or
796
+ ```
797
+ python myflow.py run --with kubernetes
798
+ ```
799
+ which executes the flow on the desired system using the
800
+ requirements specified in `@resources`.
920
801
 
921
802
  Parameters
922
803
  ----------
923
- vars : Dict[str, str], default {}
924
- Dictionary of environment variables to set.
804
+ cpu : int, default 1
805
+ Number of CPUs required for this step.
806
+ gpu : int, default 0
807
+ Number of GPUs required for this step.
808
+ disk : int, optional, default None
809
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
810
+ memory : int, default 4096
811
+ Memory size (in MB) required for this step.
812
+ shared_memory : int, optional, default None
813
+ The value for the size (in MiB) of the /dev/shm volume for this step.
814
+ This parameter maps to the `--shm-size` option in Docker.
925
815
  """
926
816
  ...
927
817
 
928
818
  @typing.overload
929
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
819
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
930
820
  ...
931
821
 
932
822
  @typing.overload
933
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
823
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
934
824
  ...
935
825
 
936
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
826
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
937
827
  """
938
- Specifies environment variables to be set prior to the execution of a step.
828
+ Specifies the resources needed when executing this step.
829
+
830
+ Use `@resources` to specify the resource requirements
831
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
832
+
833
+ You can choose the compute layer on the command line by executing e.g.
834
+ ```
835
+ python myflow.py run --with batch
836
+ ```
837
+ or
838
+ ```
839
+ python myflow.py run --with kubernetes
840
+ ```
841
+ which executes the flow on the desired system using the
842
+ requirements specified in `@resources`.
939
843
 
940
844
  Parameters
941
845
  ----------
942
- vars : Dict[str, str], default {}
943
- Dictionary of environment variables to set.
846
+ cpu : int, default 1
847
+ Number of CPUs required for this step.
848
+ gpu : int, default 0
849
+ Number of GPUs required for this step.
850
+ disk : int, optional, default None
851
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
852
+ memory : int, default 4096
853
+ Memory size (in MB) required for this step.
854
+ shared_memory : int, optional, default None
855
+ The value for the size (in MiB) of the /dev/shm volume for this step.
856
+ This parameter maps to the `--shm-size` option in Docker.
944
857
  """
945
858
  ...
946
859
 
@@ -1092,66 +1005,41 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1092
1005
  ...
1093
1006
 
1094
1007
  @typing.overload
1095
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1008
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1096
1009
  """
1097
- Specifies the Conda environment for the step.
1010
+ Specifies a timeout for your step.
1098
1011
 
1099
- Information in this decorator will augment any
1100
- attributes set in the `@conda_base` flow-level decorator. Hence,
1101
- you can use `@conda_base` to set packages required by all
1102
- steps and use `@conda` to specify step-specific overrides.
1012
+ This decorator is useful if this step may hang indefinitely.
1013
+
1014
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1015
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1016
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1017
+
1018
+ Note that all the values specified in parameters are added together so if you specify
1019
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1103
1020
 
1104
1021
  Parameters
1105
1022
  ----------
1106
- packages : Dict[str, str], default {}
1107
- Packages to use for this step. The key is the name of the package
1108
- and the value is the version to use.
1109
- libraries : Dict[str, str], default {}
1110
- Supported for backward compatibility. When used with packages, packages will take precedence.
1111
- python : str, optional, default None
1112
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1113
- that the version used will correspond to the version of the Python interpreter used to start the run.
1114
- disabled : bool, default False
1115
- If set to True, disables @conda.
1023
+ seconds : int, default 0
1024
+ Number of seconds to wait prior to timing out.
1025
+ minutes : int, default 0
1026
+ Number of minutes to wait prior to timing out.
1027
+ hours : int, default 0
1028
+ Number of hours to wait prior to timing out.
1116
1029
  """
1117
1030
  ...
1118
1031
 
1119
1032
  @typing.overload
1120
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1033
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1121
1034
  ...
1122
1035
 
1123
1036
  @typing.overload
1124
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1037
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1125
1038
  ...
1126
1039
 
1127
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1040
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1128
1041
  """
1129
- Specifies the Conda environment for the step.
1130
-
1131
- Information in this decorator will augment any
1132
- attributes set in the `@conda_base` flow-level decorator. Hence,
1133
- you can use `@conda_base` to set packages required by all
1134
- steps and use `@conda` to specify step-specific overrides.
1135
-
1136
- Parameters
1137
- ----------
1138
- packages : Dict[str, str], default {}
1139
- Packages to use for this step. The key is the name of the package
1140
- and the value is the version to use.
1141
- libraries : Dict[str, str], default {}
1142
- Supported for backward compatibility. When used with packages, packages will take precedence.
1143
- python : str, optional, default None
1144
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1145
- that the version used will correspond to the version of the Python interpreter used to start the run.
1146
- disabled : bool, default False
1147
- If set to True, disables @conda.
1148
- """
1149
- ...
1150
-
1151
- @typing.overload
1152
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1153
- """
1154
- Specifies a timeout for your step.
1042
+ Specifies a timeout for your step.
1155
1043
 
1156
1044
  This decorator is useful if this step may hang indefinitely.
1157
1045
 
@@ -1174,34 +1062,35 @@ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Cal
1174
1062
  ...
1175
1063
 
1176
1064
  @typing.overload
1177
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1065
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1066
+ """
1067
+ Specifies secrets to be retrieved and injected as environment variables prior to
1068
+ the execution of a step.
1069
+
1070
+ Parameters
1071
+ ----------
1072
+ sources : List[Union[str, Dict[str, Any]]], default: []
1073
+ List of secret specs, defining how the secrets are to be retrieved
1074
+ """
1178
1075
  ...
1179
1076
 
1180
1077
  @typing.overload
1181
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1078
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1182
1079
  ...
1183
1080
 
1184
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1081
+ @typing.overload
1082
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1083
+ ...
1084
+
1085
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1185
1086
  """
1186
- Specifies a timeout for your step.
1187
-
1188
- This decorator is useful if this step may hang indefinitely.
1189
-
1190
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1191
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1192
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1193
-
1194
- Note that all the values specified in parameters are added together so if you specify
1195
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1087
+ Specifies secrets to be retrieved and injected as environment variables prior to
1088
+ the execution of a step.
1196
1089
 
1197
1090
  Parameters
1198
1091
  ----------
1199
- seconds : int, default 0
1200
- Number of seconds to wait prior to timing out.
1201
- minutes : int, default 0
1202
- Number of minutes to wait prior to timing out.
1203
- hours : int, default 0
1204
- Number of hours to wait prior to timing out.
1092
+ sources : List[Union[str, Dict[str, Any]]], default: []
1093
+ List of secret specs, defining how the secrets are to be retrieved
1205
1094
  """
1206
1095
  ...
1207
1096
 
@@ -1265,300 +1154,285 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1265
1154
  ...
1266
1155
 
1267
1156
  @typing.overload
1268
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1157
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1269
1158
  """
1270
- Specifies the PyPI packages for the step.
1159
+ Specifies the Conda environment for the step.
1271
1160
 
1272
1161
  Information in this decorator will augment any
1273
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1274
- you can use `@pypi_base` to set packages required by all
1275
- steps and use `@pypi` to specify step-specific overrides.
1162
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1163
+ you can use `@conda_base` to set packages required by all
1164
+ steps and use `@conda` to specify step-specific overrides.
1276
1165
 
1277
1166
  Parameters
1278
1167
  ----------
1279
- packages : Dict[str, str], default: {}
1168
+ packages : Dict[str, str], default {}
1280
1169
  Packages to use for this step. The key is the name of the package
1281
1170
  and the value is the version to use.
1282
- python : str, optional, default: None
1171
+ libraries : Dict[str, str], default {}
1172
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1173
+ python : str, optional, default None
1283
1174
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1284
1175
  that the version used will correspond to the version of the Python interpreter used to start the run.
1176
+ disabled : bool, default False
1177
+ If set to True, disables @conda.
1285
1178
  """
1286
1179
  ...
1287
1180
 
1288
1181
  @typing.overload
1289
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1182
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1290
1183
  ...
1291
1184
 
1292
1185
  @typing.overload
1293
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1186
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1294
1187
  ...
1295
1188
 
1296
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1189
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1297
1190
  """
1298
- Specifies the PyPI packages for the step.
1191
+ Specifies the Conda environment for the step.
1299
1192
 
1300
1193
  Information in this decorator will augment any
1301
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1302
- you can use `@pypi_base` to set packages required by all
1303
- steps and use `@pypi` to specify step-specific overrides.
1194
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1195
+ you can use `@conda_base` to set packages required by all
1196
+ steps and use `@conda` to specify step-specific overrides.
1304
1197
 
1305
1198
  Parameters
1306
1199
  ----------
1307
- packages : Dict[str, str], default: {}
1200
+ packages : Dict[str, str], default {}
1308
1201
  Packages to use for this step. The key is the name of the package
1309
1202
  and the value is the version to use.
1310
- python : str, optional, default: None
1203
+ libraries : Dict[str, str], default {}
1204
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1205
+ python : str, optional, default None
1311
1206
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1312
1207
  that the version used will correspond to the version of the Python interpreter used to start the run.
1208
+ disabled : bool, default False
1209
+ If set to True, disables @conda.
1313
1210
  """
1314
1211
  ...
1315
1212
 
1316
1213
  @typing.overload
1317
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1214
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1318
1215
  """
1319
- Specifies the resources needed when executing this step.
1320
-
1321
- Use `@resources` to specify the resource requirements
1322
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1216
+ Specifies that the step will success under all circumstances.
1323
1217
 
1324
- You can choose the compute layer on the command line by executing e.g.
1325
- ```
1326
- python myflow.py run --with batch
1327
- ```
1328
- or
1329
- ```
1330
- python myflow.py run --with kubernetes
1331
- ```
1332
- which executes the flow on the desired system using the
1333
- requirements specified in `@resources`.
1218
+ The decorator will create an optional artifact, specified by `var`, which
1219
+ contains the exception raised. You can use it to detect the presence
1220
+ of errors, indicating that all happy-path artifacts produced by the step
1221
+ are missing.
1334
1222
 
1335
1223
  Parameters
1336
1224
  ----------
1337
- cpu : int, default 1
1338
- Number of CPUs required for this step.
1339
- gpu : int, default 0
1340
- Number of GPUs required for this step.
1341
- disk : int, optional, default None
1342
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1343
- memory : int, default 4096
1344
- Memory size (in MB) required for this step.
1345
- shared_memory : int, optional, default None
1346
- The value for the size (in MiB) of the /dev/shm volume for this step.
1347
- This parameter maps to the `--shm-size` option in Docker.
1225
+ var : str, optional, default None
1226
+ Name of the artifact in which to store the caught exception.
1227
+ If not specified, the exception is not stored.
1228
+ print_exception : bool, default True
1229
+ Determines whether or not the exception is printed to
1230
+ stdout when caught.
1348
1231
  """
1349
1232
  ...
1350
1233
 
1351
1234
  @typing.overload
1352
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1235
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1353
1236
  ...
1354
1237
 
1355
1238
  @typing.overload
1356
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1239
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1357
1240
  ...
1358
1241
 
1359
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1242
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1360
1243
  """
1361
- Specifies the resources needed when executing this step.
1362
-
1363
- Use `@resources` to specify the resource requirements
1364
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1244
+ Specifies that the step will success under all circumstances.
1365
1245
 
1366
- You can choose the compute layer on the command line by executing e.g.
1367
- ```
1368
- python myflow.py run --with batch
1369
- ```
1370
- or
1371
- ```
1372
- python myflow.py run --with kubernetes
1373
- ```
1374
- which executes the flow on the desired system using the
1375
- requirements specified in `@resources`.
1246
+ The decorator will create an optional artifact, specified by `var`, which
1247
+ contains the exception raised. You can use it to detect the presence
1248
+ of errors, indicating that all happy-path artifacts produced by the step
1249
+ are missing.
1376
1250
 
1377
1251
  Parameters
1378
1252
  ----------
1379
- cpu : int, default 1
1380
- Number of CPUs required for this step.
1381
- gpu : int, default 0
1382
- Number of GPUs required for this step.
1383
- disk : int, optional, default None
1384
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1385
- memory : int, default 4096
1386
- Memory size (in MB) required for this step.
1387
- shared_memory : int, optional, default None
1388
- The value for the size (in MiB) of the /dev/shm volume for this step.
1389
- This parameter maps to the `--shm-size` option in Docker.
1253
+ var : str, optional, default None
1254
+ Name of the artifact in which to store the caught exception.
1255
+ If not specified, the exception is not stored.
1256
+ print_exception : bool, default True
1257
+ Determines whether or not the exception is printed to
1258
+ stdout when caught.
1390
1259
  """
1391
1260
  ...
1392
1261
 
1393
1262
  @typing.overload
1394
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1263
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1395
1264
  """
1396
- Specifies the times when the flow should be run when running on a
1397
- production scheduler.
1265
+ Specifies the PyPI packages for the step.
1266
+
1267
+ Information in this decorator will augment any
1268
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1269
+ you can use `@pypi_base` to set packages required by all
1270
+ steps and use `@pypi` to specify step-specific overrides.
1398
1271
 
1399
1272
  Parameters
1400
1273
  ----------
1401
- hourly : bool, default False
1402
- Run the workflow hourly.
1403
- daily : bool, default True
1404
- Run the workflow daily.
1405
- weekly : bool, default False
1406
- Run the workflow weekly.
1407
- cron : str, optional, default None
1408
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1409
- specified by this expression.
1410
- timezone : str, optional, default None
1411
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1412
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1274
+ packages : Dict[str, str], default: {}
1275
+ Packages to use for this step. The key is the name of the package
1276
+ and the value is the version to use.
1277
+ python : str, optional, default: None
1278
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1279
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1413
1280
  """
1414
1281
  ...
1415
1282
 
1416
1283
  @typing.overload
1417
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1284
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1418
1285
  ...
1419
1286
 
1420
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1287
+ @typing.overload
1288
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1289
+ ...
1290
+
1291
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1421
1292
  """
1422
- Specifies the times when the flow should be run when running on a
1423
- production scheduler.
1293
+ Specifies the PyPI packages for the step.
1294
+
1295
+ Information in this decorator will augment any
1296
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1297
+ you can use `@pypi_base` to set packages required by all
1298
+ steps and use `@pypi` to specify step-specific overrides.
1424
1299
 
1425
1300
  Parameters
1426
1301
  ----------
1427
- hourly : bool, default False
1428
- Run the workflow hourly.
1429
- daily : bool, default True
1430
- Run the workflow daily.
1431
- weekly : bool, default False
1432
- Run the workflow weekly.
1433
- cron : str, optional, default None
1434
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1435
- specified by this expression.
1436
- timezone : str, optional, default None
1437
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1438
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1302
+ packages : Dict[str, str], default: {}
1303
+ Packages to use for this step. The key is the name of the package
1304
+ and the value is the version to use.
1305
+ python : str, optional, default: None
1306
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1307
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1439
1308
  """
1440
1309
  ...
1441
1310
 
1442
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1311
+ @typing.overload
1312
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1443
1313
  """
1444
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1445
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1314
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1315
+
1316
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1446
1317
 
1447
1318
  Parameters
1448
1319
  ----------
1449
- timeout : int
1450
- Time, in seconds before the task times out and fails. (Default: 3600)
1451
- poke_interval : int
1452
- Time in seconds that the job should wait in between each try. (Default: 60)
1453
- mode : str
1454
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1455
- exponential_backoff : bool
1456
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1457
- pool : str
1458
- the slot pool this task should run in,
1459
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1460
- soft_fail : bool
1461
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1462
- name : str
1463
- Name of the sensor on Airflow
1464
- description : str
1465
- Description of sensor in the Airflow UI
1466
- external_dag_id : str
1467
- The dag_id that contains the task you want to wait for.
1468
- external_task_ids : List[str]
1469
- The list of task_ids that you want to wait for.
1470
- If None (default value) the sensor waits for the DAG. (Default: None)
1471
- allowed_states : List[str]
1472
- Iterable of allowed states, (Default: ['success'])
1473
- failed_states : List[str]
1474
- Iterable of failed or dis-allowed states. (Default: None)
1475
- execution_delta : datetime.timedelta
1476
- time difference with the previous execution to look at,
1477
- the default is the same logical date as the current task or DAG. (Default: None)
1478
- check_existence: bool
1479
- Set to True to check if the external task exists or check if
1480
- the DAG to wait for exists. (Default: True)
1320
+ type : str, default 'default'
1321
+ Card type.
1322
+ id : str, optional, default None
1323
+ If multiple cards are present, use this id to identify this card.
1324
+ options : Dict[str, Any], default {}
1325
+ Options passed to the card. The contents depend on the card type.
1326
+ timeout : int, default 45
1327
+ Interrupt reporting if it takes more than this many seconds.
1328
+
1329
+
1481
1330
  """
1482
1331
  ...
1483
1332
 
1484
1333
  @typing.overload
1485
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1334
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1335
+ ...
1336
+
1337
+ @typing.overload
1338
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1339
+ ...
1340
+
1341
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1486
1342
  """
1487
- Specifies the PyPI packages for all steps of the flow.
1343
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1344
+
1345
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1488
1346
 
1489
- Use `@pypi_base` to set common packages required by all
1490
- steps and use `@pypi` to specify step-specific overrides.
1491
1347
  Parameters
1492
1348
  ----------
1493
- packages : Dict[str, str], default: {}
1494
- Packages to use for this flow. The key is the name of the package
1495
- and the value is the version to use.
1496
- python : str, optional, default: None
1497
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1498
- that the version used will correspond to the version of the Python interpreter used to start the run.
1349
+ type : str, default 'default'
1350
+ Card type.
1351
+ id : str, optional, default None
1352
+ If multiple cards are present, use this id to identify this card.
1353
+ options : Dict[str, Any], default {}
1354
+ Options passed to the card. The contents depend on the card type.
1355
+ timeout : int, default 45
1356
+ Interrupt reporting if it takes more than this many seconds.
1357
+
1358
+
1499
1359
  """
1500
1360
  ...
1501
1361
 
1502
1362
  @typing.overload
1503
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1363
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1364
+ """
1365
+ Specifies environment variables to be set prior to the execution of a step.
1366
+
1367
+ Parameters
1368
+ ----------
1369
+ vars : Dict[str, str], default {}
1370
+ Dictionary of environment variables to set.
1371
+ """
1504
1372
  ...
1505
1373
 
1506
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1374
+ @typing.overload
1375
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1376
+ ...
1377
+
1378
+ @typing.overload
1379
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1380
+ ...
1381
+
1382
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1507
1383
  """
1508
- Specifies the PyPI packages for all steps of the flow.
1384
+ Specifies environment variables to be set prior to the execution of a step.
1509
1385
 
1510
- Use `@pypi_base` to set common packages required by all
1511
- steps and use `@pypi` to specify step-specific overrides.
1512
1386
  Parameters
1513
1387
  ----------
1514
- packages : Dict[str, str], default: {}
1515
- Packages to use for this flow. The key is the name of the package
1516
- and the value is the version to use.
1517
- python : str, optional, default: None
1518
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1519
- that the version used will correspond to the version of the Python interpreter used to start the run.
1388
+ vars : Dict[str, str], default {}
1389
+ Dictionary of environment variables to set.
1520
1390
  """
1521
1391
  ...
1522
1392
 
1523
1393
  @typing.overload
1524
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1394
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1525
1395
  """
1526
- Specifies the event(s) that this flow depends on.
1396
+ Specifies the flow(s) that this flow depends on.
1527
1397
 
1528
1398
  ```
1529
- @trigger(event='foo')
1399
+ @trigger_on_finish(flow='FooFlow')
1530
1400
  ```
1531
1401
  or
1532
1402
  ```
1533
- @trigger(events=['foo', 'bar'])
1403
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1534
1404
  ```
1405
+ This decorator respects the @project decorator and triggers the flow
1406
+ when upstream runs within the same namespace complete successfully
1535
1407
 
1536
- Additionally, you can specify the parameter mappings
1537
- to map event payload to Metaflow parameters for the flow.
1408
+ Additionally, you can specify project aware upstream flow dependencies
1409
+ by specifying the fully qualified project_flow_name.
1538
1410
  ```
1539
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1411
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1540
1412
  ```
1541
1413
  or
1542
1414
  ```
1543
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1544
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1415
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1545
1416
  ```
1546
1417
 
1547
- 'parameters' can also be a list of strings and tuples like so:
1548
- ```
1549
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1550
- ```
1551
- This is equivalent to:
1418
+ You can also specify just the project or project branch (other values will be
1419
+ inferred from the current project or project branch):
1552
1420
  ```
1553
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1421
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1554
1422
  ```
1555
1423
 
1424
+ Note that `branch` is typically one of:
1425
+ - `prod`
1426
+ - `user.bob`
1427
+ - `test.my_experiment`
1428
+ - `prod.staging`
1429
+
1556
1430
  Parameters
1557
1431
  ----------
1558
- event : Union[str, Dict[str, Any]], optional, default None
1559
- Event dependency for this flow.
1560
- events : List[Union[str, Dict[str, Any]]], default []
1561
- Events dependency for this flow.
1432
+ flow : Union[str, Dict[str, str]], optional, default None
1433
+ Upstream flow dependency for this flow.
1434
+ flows : List[Union[str, Dict[str, str]]], default []
1435
+ Upstream flow dependencies for this flow.
1562
1436
  options : Dict[str, Any], default {}
1563
1437
  Backend-specific configuration for tuning eventing behavior.
1564
1438
 
@@ -1567,47 +1441,51 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
1567
1441
  ...
1568
1442
 
1569
1443
  @typing.overload
1570
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1444
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1571
1445
  ...
1572
1446
 
1573
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1447
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1574
1448
  """
1575
- Specifies the event(s) that this flow depends on.
1449
+ Specifies the flow(s) that this flow depends on.
1576
1450
 
1577
1451
  ```
1578
- @trigger(event='foo')
1452
+ @trigger_on_finish(flow='FooFlow')
1579
1453
  ```
1580
1454
  or
1581
1455
  ```
1582
- @trigger(events=['foo', 'bar'])
1456
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1583
1457
  ```
1458
+ This decorator respects the @project decorator and triggers the flow
1459
+ when upstream runs within the same namespace complete successfully
1584
1460
 
1585
- Additionally, you can specify the parameter mappings
1586
- to map event payload to Metaflow parameters for the flow.
1461
+ Additionally, you can specify project aware upstream flow dependencies
1462
+ by specifying the fully qualified project_flow_name.
1587
1463
  ```
1588
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1464
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1589
1465
  ```
1590
1466
  or
1591
1467
  ```
1592
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1593
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1468
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1594
1469
  ```
1595
1470
 
1596
- 'parameters' can also be a list of strings and tuples like so:
1597
- ```
1598
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1599
- ```
1600
- This is equivalent to:
1471
+ You can also specify just the project or project branch (other values will be
1472
+ inferred from the current project or project branch):
1601
1473
  ```
1602
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1474
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1603
1475
  ```
1604
1476
 
1477
+ Note that `branch` is typically one of:
1478
+ - `prod`
1479
+ - `user.bob`
1480
+ - `test.my_experiment`
1481
+ - `prod.staging`
1482
+
1605
1483
  Parameters
1606
1484
  ----------
1607
- event : Union[str, Dict[str, Any]], optional, default None
1608
- Event dependency for this flow.
1609
- events : List[Union[str, Dict[str, Any]]], default []
1610
- Events dependency for this flow.
1485
+ flow : Union[str, Dict[str, str]], optional, default None
1486
+ Upstream flow dependency for this flow.
1487
+ flows : List[Union[str, Dict[str, str]]], default []
1488
+ Upstream flow dependencies for this flow.
1611
1489
  options : Dict[str, Any], default {}
1612
1490
  Backend-specific configuration for tuning eventing behavior.
1613
1491
 
@@ -1615,6 +1493,87 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1615
1493
  """
1616
1494
  ...
1617
1495
 
1496
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1497
+ """
1498
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1499
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1500
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1501
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1502
+ starts only after all sensors finish.
1503
+
1504
+ Parameters
1505
+ ----------
1506
+ timeout : int
1507
+ Time, in seconds before the task times out and fails. (Default: 3600)
1508
+ poke_interval : int
1509
+ Time in seconds that the job should wait in between each try. (Default: 60)
1510
+ mode : str
1511
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1512
+ exponential_backoff : bool
1513
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1514
+ pool : str
1515
+ the slot pool this task should run in,
1516
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1517
+ soft_fail : bool
1518
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1519
+ name : str
1520
+ Name of the sensor on Airflow
1521
+ description : str
1522
+ Description of sensor in the Airflow UI
1523
+ bucket_key : Union[str, List[str]]
1524
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1525
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1526
+ bucket_name : str
1527
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1528
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1529
+ wildcard_match : bool
1530
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1531
+ aws_conn_id : str
1532
+ a reference to the s3 connection on Airflow. (Default: None)
1533
+ verify : bool
1534
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1535
+ """
1536
+ ...
1537
+
1538
+ @typing.overload
1539
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1540
+ """
1541
+ Specifies the PyPI packages for all steps of the flow.
1542
+
1543
+ Use `@pypi_base` to set common packages required by all
1544
+ steps and use `@pypi` to specify step-specific overrides.
1545
+ Parameters
1546
+ ----------
1547
+ packages : Dict[str, str], default: {}
1548
+ Packages to use for this flow. The key is the name of the package
1549
+ and the value is the version to use.
1550
+ python : str, optional, default: None
1551
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1552
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1553
+ """
1554
+ ...
1555
+
1556
+ @typing.overload
1557
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1558
+ ...
1559
+
1560
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1561
+ """
1562
+ Specifies the PyPI packages for all steps of the flow.
1563
+
1564
+ Use `@pypi_base` to set common packages required by all
1565
+ steps and use `@pypi` to specify step-specific overrides.
1566
+ Parameters
1567
+ ----------
1568
+ packages : Dict[str, str], default: {}
1569
+ Packages to use for this flow. The key is the name of the package
1570
+ and the value is the version to use.
1571
+ python : str, optional, default: None
1572
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1573
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1574
+ """
1575
+ ...
1576
+
1618
1577
  def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1619
1578
  """
1620
1579
  This decorator is used to run NIM containers in Metaflow tasks as sidecars.
@@ -1646,6 +1605,24 @@ def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[Fl
1646
1605
  """
1647
1606
  ...
1648
1607
 
1608
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1609
+ """
1610
+ Specifies what flows belong to the same project.
1611
+
1612
+ A project-specific namespace is created for all flows that
1613
+ use the same `@project(name)`.
1614
+
1615
+ Parameters
1616
+ ----------
1617
+ name : str
1618
+ Project name. Make sure that the name is unique amongst all
1619
+ projects that use the same production scheduler. The name may
1620
+ contain only lowercase alphanumeric characters and underscores.
1621
+
1622
+
1623
+ """
1624
+ ...
1625
+
1649
1626
  @typing.overload
1650
1627
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1651
1628
  """
@@ -1695,31 +1672,10 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1695
1672
  """
1696
1673
  ...
1697
1674
 
1698
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1699
- """
1700
- Specifies what flows belong to the same project.
1701
-
1702
- A project-specific namespace is created for all flows that
1703
- use the same `@project(name)`.
1704
-
1705
- Parameters
1706
- ----------
1707
- name : str
1708
- Project name. Make sure that the name is unique amongst all
1709
- projects that use the same production scheduler. The name may
1710
- contain only lowercase alphanumeric characters and underscores.
1711
-
1712
-
1713
- """
1714
- ...
1715
-
1716
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1675
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1717
1676
  """
1718
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1719
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1720
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1721
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1722
- starts only after all sensors finish.
1677
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1678
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1723
1679
 
1724
1680
  Parameters
1725
1681
  ----------
@@ -1740,64 +1696,63 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1740
1696
  Name of the sensor on Airflow
1741
1697
  description : str
1742
1698
  Description of sensor in the Airflow UI
1743
- bucket_key : Union[str, List[str]]
1744
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1745
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1746
- bucket_name : str
1747
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1748
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1749
- wildcard_match : bool
1750
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1751
- aws_conn_id : str
1752
- a reference to the s3 connection on Airflow. (Default: None)
1753
- verify : bool
1754
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1699
+ external_dag_id : str
1700
+ The dag_id that contains the task you want to wait for.
1701
+ external_task_ids : List[str]
1702
+ The list of task_ids that you want to wait for.
1703
+ If None (default value) the sensor waits for the DAG. (Default: None)
1704
+ allowed_states : List[str]
1705
+ Iterable of allowed states, (Default: ['success'])
1706
+ failed_states : List[str]
1707
+ Iterable of failed or dis-allowed states. (Default: None)
1708
+ execution_delta : datetime.timedelta
1709
+ time difference with the previous execution to look at,
1710
+ the default is the same logical date as the current task or DAG. (Default: None)
1711
+ check_existence: bool
1712
+ Set to True to check if the external task exists or check if
1713
+ the DAG to wait for exists. (Default: True)
1755
1714
  """
1756
1715
  ...
1757
1716
 
1758
1717
  @typing.overload
1759
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1718
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1760
1719
  """
1761
- Specifies the flow(s) that this flow depends on.
1720
+ Specifies the event(s) that this flow depends on.
1762
1721
 
1763
1722
  ```
1764
- @trigger_on_finish(flow='FooFlow')
1723
+ @trigger(event='foo')
1765
1724
  ```
1766
1725
  or
1767
1726
  ```
1768
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1727
+ @trigger(events=['foo', 'bar'])
1769
1728
  ```
1770
- This decorator respects the @project decorator and triggers the flow
1771
- when upstream runs within the same namespace complete successfully
1772
1729
 
1773
- Additionally, you can specify project aware upstream flow dependencies
1774
- by specifying the fully qualified project_flow_name.
1730
+ Additionally, you can specify the parameter mappings
1731
+ to map event payload to Metaflow parameters for the flow.
1775
1732
  ```
1776
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1733
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1777
1734
  ```
1778
1735
  or
1779
1736
  ```
1780
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1737
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1738
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1781
1739
  ```
1782
1740
 
1783
- You can also specify just the project or project branch (other values will be
1784
- inferred from the current project or project branch):
1741
+ 'parameters' can also be a list of strings and tuples like so:
1785
1742
  ```
1786
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1743
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1744
+ ```
1745
+ This is equivalent to:
1746
+ ```
1747
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1787
1748
  ```
1788
-
1789
- Note that `branch` is typically one of:
1790
- - `prod`
1791
- - `user.bob`
1792
- - `test.my_experiment`
1793
- - `prod.staging`
1794
1749
 
1795
1750
  Parameters
1796
1751
  ----------
1797
- flow : Union[str, Dict[str, str]], optional, default None
1798
- Upstream flow dependency for this flow.
1799
- flows : List[Union[str, Dict[str, str]]], default []
1800
- Upstream flow dependencies for this flow.
1752
+ event : Union[str, Dict[str, Any]], optional, default None
1753
+ Event dependency for this flow.
1754
+ events : List[Union[str, Dict[str, Any]]], default []
1755
+ Events dependency for this flow.
1801
1756
  options : Dict[str, Any], default {}
1802
1757
  Backend-specific configuration for tuning eventing behavior.
1803
1758
 
@@ -1806,51 +1761,47 @@ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] =
1806
1761
  ...
1807
1762
 
1808
1763
  @typing.overload
1809
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1764
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1810
1765
  ...
1811
1766
 
1812
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1767
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1813
1768
  """
1814
- Specifies the flow(s) that this flow depends on.
1769
+ Specifies the event(s) that this flow depends on.
1815
1770
 
1816
1771
  ```
1817
- @trigger_on_finish(flow='FooFlow')
1772
+ @trigger(event='foo')
1818
1773
  ```
1819
1774
  or
1820
1775
  ```
1821
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1776
+ @trigger(events=['foo', 'bar'])
1822
1777
  ```
1823
- This decorator respects the @project decorator and triggers the flow
1824
- when upstream runs within the same namespace complete successfully
1825
1778
 
1826
- Additionally, you can specify project aware upstream flow dependencies
1827
- by specifying the fully qualified project_flow_name.
1779
+ Additionally, you can specify the parameter mappings
1780
+ to map event payload to Metaflow parameters for the flow.
1828
1781
  ```
1829
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1782
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1830
1783
  ```
1831
1784
  or
1832
1785
  ```
1833
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1786
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1787
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1834
1788
  ```
1835
1789
 
1836
- You can also specify just the project or project branch (other values will be
1837
- inferred from the current project or project branch):
1790
+ 'parameters' can also be a list of strings and tuples like so:
1838
1791
  ```
1839
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1792
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1793
+ ```
1794
+ This is equivalent to:
1795
+ ```
1796
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1840
1797
  ```
1841
-
1842
- Note that `branch` is typically one of:
1843
- - `prod`
1844
- - `user.bob`
1845
- - `test.my_experiment`
1846
- - `prod.staging`
1847
1798
 
1848
1799
  Parameters
1849
1800
  ----------
1850
- flow : Union[str, Dict[str, str]], optional, default None
1851
- Upstream flow dependency for this flow.
1852
- flows : List[Union[str, Dict[str, str]]], default []
1853
- Upstream flow dependencies for this flow.
1801
+ event : Union[str, Dict[str, Any]], optional, default None
1802
+ Event dependency for this flow.
1803
+ events : List[Union[str, Dict[str, Any]]], default []
1804
+ Events dependency for this flow.
1854
1805
  options : Dict[str, Any], default {}
1855
1806
  Backend-specific configuration for tuning eventing behavior.
1856
1807
 
@@ -1858,6 +1809,55 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1858
1809
  """
1859
1810
  ...
1860
1811
 
1812
+ @typing.overload
1813
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1814
+ """
1815
+ Specifies the times when the flow should be run when running on a
1816
+ production scheduler.
1817
+
1818
+ Parameters
1819
+ ----------
1820
+ hourly : bool, default False
1821
+ Run the workflow hourly.
1822
+ daily : bool, default True
1823
+ Run the workflow daily.
1824
+ weekly : bool, default False
1825
+ Run the workflow weekly.
1826
+ cron : str, optional, default None
1827
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1828
+ specified by this expression.
1829
+ timezone : str, optional, default None
1830
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1831
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1832
+ """
1833
+ ...
1834
+
1835
+ @typing.overload
1836
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1837
+ ...
1838
+
1839
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1840
+ """
1841
+ Specifies the times when the flow should be run when running on a
1842
+ production scheduler.
1843
+
1844
+ Parameters
1845
+ ----------
1846
+ hourly : bool, default False
1847
+ Run the workflow hourly.
1848
+ daily : bool, default True
1849
+ Run the workflow daily.
1850
+ weekly : bool, default False
1851
+ Run the workflow weekly.
1852
+ cron : str, optional, default None
1853
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1854
+ specified by this expression.
1855
+ timezone : str, optional, default None
1856
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1857
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1858
+ """
1859
+ ...
1860
+
1861
1861
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1862
1862
  """
1863
1863
  Switch namespace to the one provided.