metaflow-stubs 2.12.26__py2.py3-none-any.whl → 2.12.28__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. metaflow-stubs/__init__.pyi +580 -580
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +9 -7
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +6 -6
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/info_file.pyi +2 -2
  14. metaflow-stubs/metadata/metadata.pyi +6 -3
  15. metaflow-stubs/metadata/util.pyi +2 -2
  16. metaflow-stubs/metaflow_config.pyi +2 -2
  17. metaflow-stubs/metaflow_current.pyi +26 -26
  18. metaflow-stubs/mflog/mflog.pyi +2 -2
  19. metaflow-stubs/multicore_utils.pyi +2 -2
  20. metaflow-stubs/parameters.pyi +3 -3
  21. metaflow-stubs/plugins/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow.pyi +4 -4
  24. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +5 -5
  37. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  39. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  46. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +4 -4
  47. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  59. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  62. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  63. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  64. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  65. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  66. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
  68. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  72. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/basic.pyi +4 -4
  74. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  98. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  100. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  110. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +4 -4
  112. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  114. metaflow-stubs/plugins/logs_cli.pyi +2 -2
  115. metaflow-stubs/plugins/package_cli.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +3 -3
  117. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  121. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  124. metaflow-stubs/plugins/resources_decorator.pyi +3 -3
  125. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  128. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  130. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  131. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  132. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  133. metaflow-stubs/procpoll.pyi +2 -2
  134. metaflow-stubs/pylint_wrapper.pyi +2 -2
  135. metaflow-stubs/runner/__init__.pyi +2 -2
  136. metaflow-stubs/runner/deployer.pyi +2 -2
  137. metaflow-stubs/runner/metaflow_runner.pyi +5 -33
  138. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  139. metaflow-stubs/runner/nbrun.pyi +2 -2
  140. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  141. metaflow-stubs/runner/utils.pyi +2 -2
  142. metaflow-stubs/system/__init__.pyi +2 -22
  143. metaflow-stubs/system/system_logger.pyi +2 -12
  144. metaflow-stubs/system/system_monitor.pyi +2 -13
  145. metaflow-stubs/tagging_util.pyi +2 -2
  146. metaflow-stubs/tuple_util.pyi +2 -2
  147. metaflow-stubs/version.pyi +2 -2
  148. {metaflow_stubs-2.12.26.dist-info → metaflow_stubs-2.12.28.dist-info}/METADATA +2 -2
  149. metaflow_stubs-2.12.28.dist-info/RECORD +152 -0
  150. {metaflow_stubs-2.12.26.dist-info → metaflow_stubs-2.12.28.dist-info}/WHEEL +1 -1
  151. metaflow_stubs-2.12.26.dist-info/RECORD +0 -152
  152. {metaflow_stubs-2.12.26.dist-info → metaflow_stubs-2.12.28.dist-info}/top_level.txt +0 -0
@@ -1,24 +1,24 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.26 #
4
- # Generated on 2024-10-23T21:00:22.846814 #
3
+ # MF version: 2.12.28 #
4
+ # Generated on 2024-11-01T10:21:04.434546 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.flowspec
12
- import metaflow.client.core
13
- import metaflow.parameters
14
- import metaflow.runner.metaflow_runner
11
+ import metaflow.metaflow_current
12
+ import typing
15
13
  import datetime
16
14
  import io
15
+ import metaflow.runner.metaflow_runner
16
+ import metaflow.client.core
17
+ import metaflow.events
17
18
  import metaflow.datastore.inputs
18
- import typing
19
+ import metaflow.flowspec
19
20
  import metaflow._vendor.click.types
20
- import metaflow.events
21
- import metaflow.metaflow_current
21
+ import metaflow.parameters
22
22
  import metaflow.plugins.datatools.s3.s3
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
@@ -855,133 +855,79 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
855
855
  ...
856
856
 
857
857
  @typing.overload
858
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
859
- """
860
- Creates a human-readable report, a Metaflow Card, after this step completes.
861
-
862
- Note that you may add multiple `@card` decorators in a step with different parameters.
863
-
864
- Parameters
865
- ----------
866
- type : str, default 'default'
867
- Card type.
868
- id : str, optional, default None
869
- If multiple cards are present, use this id to identify this card.
870
- options : Dict[str, Any], default {}
871
- Options passed to the card. The contents depend on the card type.
872
- timeout : int, default 45
873
- Interrupt reporting if it takes more than this many seconds.
874
-
875
-
876
- """
877
- ...
878
-
879
- @typing.overload
880
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
881
- ...
882
-
883
- @typing.overload
884
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
885
- ...
886
-
887
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
858
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
888
859
  """
889
- Creates a human-readable report, a Metaflow Card, after this step completes.
890
-
891
- Note that you may add multiple `@card` decorators in a step with different parameters.
892
-
893
- Parameters
894
- ----------
895
- type : str, default 'default'
896
- Card type.
897
- id : str, optional, default None
898
- If multiple cards are present, use this id to identify this card.
899
- options : Dict[str, Any], default {}
900
- Options passed to the card. The contents depend on the card type.
901
- timeout : int, default 45
902
- Interrupt reporting if it takes more than this many seconds.
860
+ Specifies the resources needed when executing this step.
903
861
 
862
+ Use `@resources` to specify the resource requirements
863
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
904
864
 
905
- """
906
- ...
907
-
908
- @typing.overload
909
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
910
- """
911
- Specifies environment variables to be set prior to the execution of a step.
865
+ You can choose the compute layer on the command line by executing e.g.
866
+ ```
867
+ python myflow.py run --with batch
868
+ ```
869
+ or
870
+ ```
871
+ python myflow.py run --with kubernetes
872
+ ```
873
+ which executes the flow on the desired system using the
874
+ requirements specified in `@resources`.
912
875
 
913
876
  Parameters
914
877
  ----------
915
- vars : Dict[str, str], default {}
916
- Dictionary of environment variables to set.
878
+ cpu : int, default 1
879
+ Number of CPUs required for this step.
880
+ gpu : int, optional, default None
881
+ Number of GPUs required for this step.
882
+ disk : int, optional, default None
883
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
884
+ memory : int, default 4096
885
+ Memory size (in MB) required for this step.
886
+ shared_memory : int, optional, default None
887
+ The value for the size (in MiB) of the /dev/shm volume for this step.
888
+ This parameter maps to the `--shm-size` option in Docker.
917
889
  """
918
890
  ...
919
891
 
920
892
  @typing.overload
921
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
893
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
922
894
  ...
923
895
 
924
896
  @typing.overload
925
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
926
- ...
927
-
928
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
929
- """
930
- Specifies environment variables to be set prior to the execution of a step.
931
-
932
- Parameters
933
- ----------
934
- vars : Dict[str, str], default {}
935
- Dictionary of environment variables to set.
936
- """
897
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
937
898
  ...
938
899
 
939
- @typing.overload
940
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
900
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
941
901
  """
942
- Specifies the PyPI packages for the step.
943
-
944
- Information in this decorator will augment any
945
- attributes set in the `@pyi_base` flow-level decorator. Hence,
946
- you can use `@pypi_base` to set packages required by all
947
- steps and use `@pypi` to specify step-specific overrides.
902
+ Specifies the resources needed when executing this step.
948
903
 
949
- Parameters
950
- ----------
951
- packages : Dict[str, str], default: {}
952
- Packages to use for this step. The key is the name of the package
953
- and the value is the version to use.
954
- python : str, optional, default: None
955
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
956
- that the version used will correspond to the version of the Python interpreter used to start the run.
957
- """
958
- ...
959
-
960
- @typing.overload
961
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
962
- ...
963
-
964
- @typing.overload
965
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
966
- ...
967
-
968
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
969
- """
970
- Specifies the PyPI packages for the step.
904
+ Use `@resources` to specify the resource requirements
905
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
971
906
 
972
- Information in this decorator will augment any
973
- attributes set in the `@pyi_base` flow-level decorator. Hence,
974
- you can use `@pypi_base` to set packages required by all
975
- steps and use `@pypi` to specify step-specific overrides.
907
+ You can choose the compute layer on the command line by executing e.g.
908
+ ```
909
+ python myflow.py run --with batch
910
+ ```
911
+ or
912
+ ```
913
+ python myflow.py run --with kubernetes
914
+ ```
915
+ which executes the flow on the desired system using the
916
+ requirements specified in `@resources`.
976
917
 
977
918
  Parameters
978
919
  ----------
979
- packages : Dict[str, str], default: {}
980
- Packages to use for this step. The key is the name of the package
981
- and the value is the version to use.
982
- python : str, optional, default: None
983
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
984
- that the version used will correspond to the version of the Python interpreter used to start the run.
920
+ cpu : int, default 1
921
+ Number of CPUs required for this step.
922
+ gpu : int, optional, default None
923
+ Number of GPUs required for this step.
924
+ disk : int, optional, default None
925
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
926
+ memory : int, default 4096
927
+ Memory size (in MB) required for this step.
928
+ shared_memory : int, optional, default None
929
+ The value for the size (in MiB) of the /dev/shm volume for this step.
930
+ This parameter maps to the `--shm-size` option in Docker.
985
931
  """
986
932
  ...
987
933
 
@@ -1096,84 +1042,51 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1096
1042
  ...
1097
1043
 
1098
1044
  @typing.overload
1099
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1045
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1100
1046
  """
1101
- Specifies that the step will success under all circumstances.
1047
+ Specifies the PyPI packages for the step.
1102
1048
 
1103
- The decorator will create an optional artifact, specified by `var`, which
1104
- contains the exception raised. You can use it to detect the presence
1105
- of errors, indicating that all happy-path artifacts produced by the step
1106
- are missing.
1049
+ Information in this decorator will augment any
1050
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1051
+ you can use `@pypi_base` to set packages required by all
1052
+ steps and use `@pypi` to specify step-specific overrides.
1107
1053
 
1108
1054
  Parameters
1109
1055
  ----------
1110
- var : str, optional, default None
1111
- Name of the artifact in which to store the caught exception.
1112
- If not specified, the exception is not stored.
1113
- print_exception : bool, default True
1114
- Determines whether or not the exception is printed to
1115
- stdout when caught.
1056
+ packages : Dict[str, str], default: {}
1057
+ Packages to use for this step. The key is the name of the package
1058
+ and the value is the version to use.
1059
+ python : str, optional, default: None
1060
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1061
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1116
1062
  """
1117
1063
  ...
1118
1064
 
1119
1065
  @typing.overload
1120
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1066
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1121
1067
  ...
1122
1068
 
1123
1069
  @typing.overload
1124
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1125
- ...
1126
-
1127
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1128
- """
1129
- Specifies that the step will success under all circumstances.
1130
-
1131
- The decorator will create an optional artifact, specified by `var`, which
1132
- contains the exception raised. You can use it to detect the presence
1133
- of errors, indicating that all happy-path artifacts produced by the step
1134
- are missing.
1135
-
1136
- Parameters
1137
- ----------
1138
- var : str, optional, default None
1139
- Name of the artifact in which to store the caught exception.
1140
- If not specified, the exception is not stored.
1141
- print_exception : bool, default True
1142
- Determines whether or not the exception is printed to
1143
- stdout when caught.
1144
- """
1070
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1145
1071
  ...
1146
1072
 
1147
- @typing.overload
1148
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1073
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1149
1074
  """
1150
- Specifies secrets to be retrieved and injected as environment variables prior to
1151
- the execution of a step.
1075
+ Specifies the PyPI packages for the step.
1152
1076
 
1153
- Parameters
1154
- ----------
1155
- sources : List[Union[str, Dict[str, Any]]], default: []
1156
- List of secret specs, defining how the secrets are to be retrieved
1157
- """
1158
- ...
1159
-
1160
- @typing.overload
1161
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1162
- ...
1163
-
1164
- @typing.overload
1165
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1166
- ...
1167
-
1168
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1169
- """
1170
- Specifies secrets to be retrieved and injected as environment variables prior to
1171
- the execution of a step.
1077
+ Information in this decorator will augment any
1078
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1079
+ you can use `@pypi_base` to set packages required by all
1080
+ steps and use `@pypi` to specify step-specific overrides.
1172
1081
 
1173
1082
  Parameters
1174
1083
  ----------
1175
- sources : List[Union[str, Dict[str, Any]]], default: []
1176
- List of secret specs, defining how the secrets are to be retrieved
1084
+ packages : Dict[str, str], default: {}
1085
+ Packages to use for this step. The key is the name of the package
1086
+ and the value is the version to use.
1087
+ python : str, optional, default: None
1088
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1089
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1177
1090
  """
1178
1091
  ...
1179
1092
 
@@ -1196,209 +1109,6 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1196
1109
  """
1197
1110
  ...
1198
1111
 
1199
- @typing.overload
1200
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1201
- """
1202
- Specifies the Conda environment for the step.
1203
-
1204
- Information in this decorator will augment any
1205
- attributes set in the `@conda_base` flow-level decorator. Hence,
1206
- you can use `@conda_base` to set packages required by all
1207
- steps and use `@conda` to specify step-specific overrides.
1208
-
1209
- Parameters
1210
- ----------
1211
- packages : Dict[str, str], default {}
1212
- Packages to use for this step. The key is the name of the package
1213
- and the value is the version to use.
1214
- libraries : Dict[str, str], default {}
1215
- Supported for backward compatibility. When used with packages, packages will take precedence.
1216
- python : str, optional, default None
1217
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1218
- that the version used will correspond to the version of the Python interpreter used to start the run.
1219
- disabled : bool, default False
1220
- If set to True, disables @conda.
1221
- """
1222
- ...
1223
-
1224
- @typing.overload
1225
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1226
- ...
1227
-
1228
- @typing.overload
1229
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1230
- ...
1231
-
1232
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1233
- """
1234
- Specifies the Conda environment for the step.
1235
-
1236
- Information in this decorator will augment any
1237
- attributes set in the `@conda_base` flow-level decorator. Hence,
1238
- you can use `@conda_base` to set packages required by all
1239
- steps and use `@conda` to specify step-specific overrides.
1240
-
1241
- Parameters
1242
- ----------
1243
- packages : Dict[str, str], default {}
1244
- Packages to use for this step. The key is the name of the package
1245
- and the value is the version to use.
1246
- libraries : Dict[str, str], default {}
1247
- Supported for backward compatibility. When used with packages, packages will take precedence.
1248
- python : str, optional, default None
1249
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1250
- that the version used will correspond to the version of the Python interpreter used to start the run.
1251
- disabled : bool, default False
1252
- If set to True, disables @conda.
1253
- """
1254
- ...
1255
-
1256
- @typing.overload
1257
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1258
- """
1259
- Specifies the resources needed when executing this step.
1260
-
1261
- Use `@resources` to specify the resource requirements
1262
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1263
-
1264
- You can choose the compute layer on the command line by executing e.g.
1265
- ```
1266
- python myflow.py run --with batch
1267
- ```
1268
- or
1269
- ```
1270
- python myflow.py run --with kubernetes
1271
- ```
1272
- which executes the flow on the desired system using the
1273
- requirements specified in `@resources`.
1274
-
1275
- Parameters
1276
- ----------
1277
- cpu : int, default 1
1278
- Number of CPUs required for this step.
1279
- gpu : int, default 0
1280
- Number of GPUs required for this step.
1281
- disk : int, optional, default None
1282
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1283
- memory : int, default 4096
1284
- Memory size (in MB) required for this step.
1285
- shared_memory : int, optional, default None
1286
- The value for the size (in MiB) of the /dev/shm volume for this step.
1287
- This parameter maps to the `--shm-size` option in Docker.
1288
- """
1289
- ...
1290
-
1291
- @typing.overload
1292
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1293
- ...
1294
-
1295
- @typing.overload
1296
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1297
- ...
1298
-
1299
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1300
- """
1301
- Specifies the resources needed when executing this step.
1302
-
1303
- Use `@resources` to specify the resource requirements
1304
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1305
-
1306
- You can choose the compute layer on the command line by executing e.g.
1307
- ```
1308
- python myflow.py run --with batch
1309
- ```
1310
- or
1311
- ```
1312
- python myflow.py run --with kubernetes
1313
- ```
1314
- which executes the flow on the desired system using the
1315
- requirements specified in `@resources`.
1316
-
1317
- Parameters
1318
- ----------
1319
- cpu : int, default 1
1320
- Number of CPUs required for this step.
1321
- gpu : int, default 0
1322
- Number of GPUs required for this step.
1323
- disk : int, optional, default None
1324
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1325
- memory : int, default 4096
1326
- Memory size (in MB) required for this step.
1327
- shared_memory : int, optional, default None
1328
- The value for the size (in MiB) of the /dev/shm volume for this step.
1329
- This parameter maps to the `--shm-size` option in Docker.
1330
- """
1331
- ...
1332
-
1333
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1334
- """
1335
- Specifies that this step should execute on Kubernetes.
1336
-
1337
- Parameters
1338
- ----------
1339
- cpu : int, default 1
1340
- Number of CPUs required for this step. If `@resources` is
1341
- also present, the maximum value from all decorators is used.
1342
- memory : int, default 4096
1343
- Memory size (in MB) required for this step. If
1344
- `@resources` is also present, the maximum value from all decorators is
1345
- used.
1346
- disk : int, default 10240
1347
- Disk size (in MB) required for this step. If
1348
- `@resources` is also present, the maximum value from all decorators is
1349
- used.
1350
- image : str, optional, default None
1351
- Docker image to use when launching on Kubernetes. If not specified, and
1352
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1353
- not, a default Docker image mapping to the current version of Python is used.
1354
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1355
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1356
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1357
- Kubernetes service account to use when launching pod in Kubernetes.
1358
- secrets : List[str], optional, default None
1359
- Kubernetes secrets to use when launching pod in Kubernetes. These
1360
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1361
- in Metaflow configuration.
1362
- node_selector: Union[Dict[str,str], str], optional, default None
1363
- Kubernetes node selector(s) to apply to the pod running the task.
1364
- Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
1365
- or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
1366
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1367
- Kubernetes namespace to use when launching pod in Kubernetes.
1368
- gpu : int, optional, default None
1369
- Number of GPUs required for this step. A value of zero implies that
1370
- the scheduled node should not have GPUs.
1371
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1372
- The vendor of the GPUs to be used for this step.
1373
- tolerations : List[str], default []
1374
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1375
- Kubernetes tolerations to use when launching pod in Kubernetes.
1376
- use_tmpfs : bool, default False
1377
- This enables an explicit tmpfs mount for this step.
1378
- tmpfs_tempdir : bool, default True
1379
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1380
- tmpfs_size : int, optional, default: None
1381
- The value for the size (in MiB) of the tmpfs mount for this step.
1382
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1383
- memory allocated for this step.
1384
- tmpfs_path : str, optional, default /metaflow_temp
1385
- Path to tmpfs mount for this step.
1386
- persistent_volume_claims : Dict[str, str], optional, default None
1387
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1388
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1389
- shared_memory: int, optional
1390
- Shared memory size (in MiB) required for this step
1391
- port: int, optional
1392
- Port number to specify in the Kubernetes job object
1393
- compute_pool : str, optional, default None
1394
- Compute pool to be used for for this step.
1395
- If not specified, any accessible compute pool within the perimeter is used.
1396
- hostname_resolution_timeout: int, default 10 * 60
1397
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
1398
- Only applicable when @parallel is used.
1399
- """
1400
- ...
1401
-
1402
1112
  @typing.overload
1403
1113
  def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1404
1114
  """
@@ -1547,104 +1257,302 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1547
1257
  ...
1548
1258
 
1549
1259
  @typing.overload
1550
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1260
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1551
1261
  """
1552
- Specifies the event(s) that this flow depends on.
1553
-
1554
- ```
1555
- @trigger(event='foo')
1556
- ```
1557
- or
1558
- ```
1559
- @trigger(events=['foo', 'bar'])
1560
- ```
1561
-
1562
- Additionally, you can specify the parameter mappings
1563
- to map event payload to Metaflow parameters for the flow.
1564
- ```
1565
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1566
- ```
1567
- or
1568
- ```
1569
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1570
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1571
- ```
1262
+ Specifies that the step will success under all circumstances.
1572
1263
 
1573
- 'parameters' can also be a list of strings and tuples like so:
1574
- ```
1575
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1576
- ```
1577
- This is equivalent to:
1578
- ```
1579
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1580
- ```
1264
+ The decorator will create an optional artifact, specified by `var`, which
1265
+ contains the exception raised. You can use it to detect the presence
1266
+ of errors, indicating that all happy-path artifacts produced by the step
1267
+ are missing.
1581
1268
 
1582
1269
  Parameters
1583
1270
  ----------
1584
- event : Union[str, Dict[str, Any]], optional, default None
1585
- Event dependency for this flow.
1586
- events : List[Union[str, Dict[str, Any]]], default []
1587
- Events dependency for this flow.
1588
- options : Dict[str, Any], default {}
1589
- Backend-specific configuration for tuning eventing behavior.
1590
-
1591
-
1271
+ var : str, optional, default None
1272
+ Name of the artifact in which to store the caught exception.
1273
+ If not specified, the exception is not stored.
1274
+ print_exception : bool, default True
1275
+ Determines whether or not the exception is printed to
1276
+ stdout when caught.
1592
1277
  """
1593
1278
  ...
1594
1279
 
1595
1280
  @typing.overload
1596
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1281
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1597
1282
  ...
1598
1283
 
1599
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1600
- """
1601
- Specifies the event(s) that this flow depends on.
1602
-
1603
- ```
1604
- @trigger(event='foo')
1605
- ```
1606
- or
1607
- ```
1608
- @trigger(events=['foo', 'bar'])
1609
- ```
1284
+ @typing.overload
1285
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1286
+ ...
1287
+
1288
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1289
+ """
1290
+ Specifies that the step will success under all circumstances.
1610
1291
 
1611
- Additionally, you can specify the parameter mappings
1612
- to map event payload to Metaflow parameters for the flow.
1613
- ```
1614
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1615
- ```
1616
- or
1617
- ```
1618
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1619
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1620
- ```
1292
+ The decorator will create an optional artifact, specified by `var`, which
1293
+ contains the exception raised. You can use it to detect the presence
1294
+ of errors, indicating that all happy-path artifacts produced by the step
1295
+ are missing.
1621
1296
 
1622
- 'parameters' can also be a list of strings and tuples like so:
1623
- ```
1624
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1625
- ```
1626
- This is equivalent to:
1627
- ```
1628
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1629
- ```
1297
+ Parameters
1298
+ ----------
1299
+ var : str, optional, default None
1300
+ Name of the artifact in which to store the caught exception.
1301
+ If not specified, the exception is not stored.
1302
+ print_exception : bool, default True
1303
+ Determines whether or not the exception is printed to
1304
+ stdout when caught.
1305
+ """
1306
+ ...
1307
+
1308
+ @typing.overload
1309
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1310
+ """
1311
+ Specifies environment variables to be set prior to the execution of a step.
1630
1312
 
1631
1313
  Parameters
1632
1314
  ----------
1633
- event : Union[str, Dict[str, Any]], optional, default None
1634
- Event dependency for this flow.
1635
- events : List[Union[str, Dict[str, Any]]], default []
1636
- Events dependency for this flow.
1315
+ vars : Dict[str, str], default {}
1316
+ Dictionary of environment variables to set.
1317
+ """
1318
+ ...
1319
+
1320
+ @typing.overload
1321
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1322
+ ...
1323
+
1324
+ @typing.overload
1325
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1326
+ ...
1327
+
1328
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1329
+ """
1330
+ Specifies environment variables to be set prior to the execution of a step.
1331
+
1332
+ Parameters
1333
+ ----------
1334
+ vars : Dict[str, str], default {}
1335
+ Dictionary of environment variables to set.
1336
+ """
1337
+ ...
1338
+
1339
+ @typing.overload
1340
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1341
+ """
1342
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1343
+
1344
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1345
+
1346
+ Parameters
1347
+ ----------
1348
+ type : str, default 'default'
1349
+ Card type.
1350
+ id : str, optional, default None
1351
+ If multiple cards are present, use this id to identify this card.
1637
1352
  options : Dict[str, Any], default {}
1638
- Backend-specific configuration for tuning eventing behavior.
1353
+ Options passed to the card. The contents depend on the card type.
1354
+ timeout : int, default 45
1355
+ Interrupt reporting if it takes more than this many seconds.
1639
1356
 
1640
1357
 
1641
1358
  """
1642
1359
  ...
1643
1360
 
1644
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1361
+ @typing.overload
1362
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1363
+ ...
1364
+
1365
+ @typing.overload
1366
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1367
+ ...
1368
+
1369
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1645
1370
  """
1646
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1647
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1371
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1372
+
1373
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1374
+
1375
+ Parameters
1376
+ ----------
1377
+ type : str, default 'default'
1378
+ Card type.
1379
+ id : str, optional, default None
1380
+ If multiple cards are present, use this id to identify this card.
1381
+ options : Dict[str, Any], default {}
1382
+ Options passed to the card. The contents depend on the card type.
1383
+ timeout : int, default 45
1384
+ Interrupt reporting if it takes more than this many seconds.
1385
+
1386
+
1387
+ """
1388
+ ...
1389
+
1390
+ @typing.overload
1391
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1392
+ """
1393
+ Specifies secrets to be retrieved and injected as environment variables prior to
1394
+ the execution of a step.
1395
+
1396
+ Parameters
1397
+ ----------
1398
+ sources : List[Union[str, Dict[str, Any]]], default: []
1399
+ List of secret specs, defining how the secrets are to be retrieved
1400
+ """
1401
+ ...
1402
+
1403
+ @typing.overload
1404
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1405
+ ...
1406
+
1407
+ @typing.overload
1408
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1409
+ ...
1410
+
1411
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1412
+ """
1413
+ Specifies secrets to be retrieved and injected as environment variables prior to
1414
+ the execution of a step.
1415
+
1416
+ Parameters
1417
+ ----------
1418
+ sources : List[Union[str, Dict[str, Any]]], default: []
1419
+ List of secret specs, defining how the secrets are to be retrieved
1420
+ """
1421
+ ...
1422
+
1423
+ @typing.overload
1424
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1425
+ """
1426
+ Specifies the Conda environment for the step.
1427
+
1428
+ Information in this decorator will augment any
1429
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1430
+ you can use `@conda_base` to set packages required by all
1431
+ steps and use `@conda` to specify step-specific overrides.
1432
+
1433
+ Parameters
1434
+ ----------
1435
+ packages : Dict[str, str], default {}
1436
+ Packages to use for this step. The key is the name of the package
1437
+ and the value is the version to use.
1438
+ libraries : Dict[str, str], default {}
1439
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1440
+ python : str, optional, default None
1441
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1442
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1443
+ disabled : bool, default False
1444
+ If set to True, disables @conda.
1445
+ """
1446
+ ...
1447
+
1448
+ @typing.overload
1449
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1450
+ ...
1451
+
1452
+ @typing.overload
1453
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1454
+ ...
1455
+
1456
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1457
+ """
1458
+ Specifies the Conda environment for the step.
1459
+
1460
+ Information in this decorator will augment any
1461
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1462
+ you can use `@conda_base` to set packages required by all
1463
+ steps and use `@conda` to specify step-specific overrides.
1464
+
1465
+ Parameters
1466
+ ----------
1467
+ packages : Dict[str, str], default {}
1468
+ Packages to use for this step. The key is the name of the package
1469
+ and the value is the version to use.
1470
+ libraries : Dict[str, str], default {}
1471
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1472
+ python : str, optional, default None
1473
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1474
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1475
+ disabled : bool, default False
1476
+ If set to True, disables @conda.
1477
+ """
1478
+ ...
1479
+
1480
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1481
+ """
1482
+ Specifies that this step should execute on Kubernetes.
1483
+
1484
+ Parameters
1485
+ ----------
1486
+ cpu : int, default 1
1487
+ Number of CPUs required for this step. If `@resources` is
1488
+ also present, the maximum value from all decorators is used.
1489
+ memory : int, default 4096
1490
+ Memory size (in MB) required for this step. If
1491
+ `@resources` is also present, the maximum value from all decorators is
1492
+ used.
1493
+ disk : int, default 10240
1494
+ Disk size (in MB) required for this step. If
1495
+ `@resources` is also present, the maximum value from all decorators is
1496
+ used.
1497
+ image : str, optional, default None
1498
+ Docker image to use when launching on Kubernetes. If not specified, and
1499
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1500
+ not, a default Docker image mapping to the current version of Python is used.
1501
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1502
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1503
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1504
+ Kubernetes service account to use when launching pod in Kubernetes.
1505
+ secrets : List[str], optional, default None
1506
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1507
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1508
+ in Metaflow configuration.
1509
+ node_selector: Union[Dict[str,str], str], optional, default None
1510
+ Kubernetes node selector(s) to apply to the pod running the task.
1511
+ Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
1512
+ or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
1513
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1514
+ Kubernetes namespace to use when launching pod in Kubernetes.
1515
+ gpu : int, optional, default None
1516
+ Number of GPUs required for this step. A value of zero implies that
1517
+ the scheduled node should not have GPUs.
1518
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1519
+ The vendor of the GPUs to be used for this step.
1520
+ tolerations : List[str], default []
1521
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1522
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1523
+ use_tmpfs : bool, default False
1524
+ This enables an explicit tmpfs mount for this step.
1525
+ tmpfs_tempdir : bool, default True
1526
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1527
+ tmpfs_size : int, optional, default: None
1528
+ The value for the size (in MiB) of the tmpfs mount for this step.
1529
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1530
+ memory allocated for this step.
1531
+ tmpfs_path : str, optional, default /metaflow_temp
1532
+ Path to tmpfs mount for this step.
1533
+ persistent_volume_claims : Dict[str, str], optional, default None
1534
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1535
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1536
+ shared_memory: int, optional
1537
+ Shared memory size (in MiB) required for this step
1538
+ port: int, optional
1539
+ Port number to specify in the Kubernetes job object
1540
+ compute_pool : str, optional, default None
1541
+ Compute pool to be used for for this step.
1542
+ If not specified, any accessible compute pool within the perimeter is used.
1543
+ hostname_resolution_timeout: int, default 10 * 60
1544
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
1545
+ Only applicable when @parallel is used.
1546
+ """
1547
+ ...
1548
+
1549
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1550
+ """
1551
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1552
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1553
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1554
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1555
+ starts only after all sensors finish.
1648
1556
 
1649
1557
  Parameters
1650
1558
  ----------
@@ -1665,137 +1573,92 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1665
1573
  Name of the sensor on Airflow
1666
1574
  description : str
1667
1575
  Description of sensor in the Airflow UI
1668
- external_dag_id : str
1669
- The dag_id that contains the task you want to wait for.
1670
- external_task_ids : List[str]
1671
- The list of task_ids that you want to wait for.
1672
- If None (default value) the sensor waits for the DAG. (Default: None)
1673
- allowed_states : List[str]
1674
- Iterable of allowed states, (Default: ['success'])
1675
- failed_states : List[str]
1676
- Iterable of failed or dis-allowed states. (Default: None)
1677
- execution_delta : datetime.timedelta
1678
- time difference with the previous execution to look at,
1679
- the default is the same logical date as the current task or DAG. (Default: None)
1680
- check_existence: bool
1681
- Set to True to check if the external task exists or check if
1682
- the DAG to wait for exists. (Default: True)
1576
+ bucket_key : Union[str, List[str]]
1577
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1578
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1579
+ bucket_name : str
1580
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1581
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1582
+ wildcard_match : bool
1583
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1584
+ aws_conn_id : str
1585
+ a reference to the s3 connection on Airflow. (Default: None)
1586
+ verify : bool
1587
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1683
1588
  """
1684
1589
  ...
1685
1590
 
1686
- @typing.overload
1687
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1591
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1688
1592
  """
1689
- Specifies the times when the flow should be run when running on a
1690
- production scheduler.
1593
+ Specifies what flows belong to the same project.
1691
1594
 
1692
- Parameters
1693
- ----------
1694
- hourly : bool, default False
1695
- Run the workflow hourly.
1696
- daily : bool, default True
1697
- Run the workflow daily.
1698
- weekly : bool, default False
1699
- Run the workflow weekly.
1700
- cron : str, optional, default None
1701
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1702
- specified by this expression.
1703
- timezone : str, optional, default None
1704
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1705
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1706
- """
1707
- ...
1708
-
1709
- @typing.overload
1710
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1711
- ...
1712
-
1713
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1714
- """
1715
- Specifies the times when the flow should be run when running on a
1716
- production scheduler.
1595
+ A project-specific namespace is created for all flows that
1596
+ use the same `@project(name)`.
1717
1597
 
1718
1598
  Parameters
1719
1599
  ----------
1720
- hourly : bool, default False
1721
- Run the workflow hourly.
1722
- daily : bool, default True
1723
- Run the workflow daily.
1724
- weekly : bool, default False
1725
- Run the workflow weekly.
1726
- cron : str, optional, default None
1727
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1728
- specified by this expression.
1729
- timezone : str, optional, default None
1730
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1731
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1600
+ name : str
1601
+ Project name. Make sure that the name is unique amongst all
1602
+ projects that use the same production scheduler. The name may
1603
+ contain only lowercase alphanumeric characters and underscores.
1604
+
1605
+
1732
1606
  """
1733
1607
  ...
1734
1608
 
1735
1609
  @typing.overload
1736
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1610
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1737
1611
  """
1738
- Specifies the PyPI packages for all steps of the flow.
1612
+ Specifies the Conda environment for all steps of the flow.
1613
+
1614
+ Use `@conda_base` to set common libraries required by all
1615
+ steps and use `@conda` to specify step-specific additions.
1739
1616
 
1740
- Use `@pypi_base` to set common packages required by all
1741
- steps and use `@pypi` to specify step-specific overrides.
1742
1617
  Parameters
1743
1618
  ----------
1744
- packages : Dict[str, str], default: {}
1619
+ packages : Dict[str, str], default {}
1745
1620
  Packages to use for this flow. The key is the name of the package
1746
1621
  and the value is the version to use.
1747
- python : str, optional, default: None
1622
+ libraries : Dict[str, str], default {}
1623
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1624
+ python : str, optional, default None
1748
1625
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1749
1626
  that the version used will correspond to the version of the Python interpreter used to start the run.
1627
+ disabled : bool, default False
1628
+ If set to True, disables Conda.
1750
1629
  """
1751
1630
  ...
1752
1631
 
1753
1632
  @typing.overload
1754
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1633
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1755
1634
  ...
1756
1635
 
1757
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1636
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1758
1637
  """
1759
- Specifies the PyPI packages for all steps of the flow.
1638
+ Specifies the Conda environment for all steps of the flow.
1639
+
1640
+ Use `@conda_base` to set common libraries required by all
1641
+ steps and use `@conda` to specify step-specific additions.
1760
1642
 
1761
- Use `@pypi_base` to set common packages required by all
1762
- steps and use `@pypi` to specify step-specific overrides.
1763
1643
  Parameters
1764
1644
  ----------
1765
- packages : Dict[str, str], default: {}
1645
+ packages : Dict[str, str], default {}
1766
1646
  Packages to use for this flow. The key is the name of the package
1767
1647
  and the value is the version to use.
1768
- python : str, optional, default: None
1648
+ libraries : Dict[str, str], default {}
1649
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1650
+ python : str, optional, default None
1769
1651
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1770
1652
  that the version used will correspond to the version of the Python interpreter used to start the run.
1653
+ disabled : bool, default False
1654
+ If set to True, disables Conda.
1771
1655
  """
1772
1656
  ...
1773
1657
 
1774
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1775
- """
1776
- Specifies what flows belong to the same project.
1777
-
1778
- A project-specific namespace is created for all flows that
1779
- use the same `@project(name)`.
1780
-
1781
- Parameters
1782
- ----------
1783
- name : str
1784
- Project name. Make sure that the name is unique amongst all
1785
- projects that use the same production scheduler. The name may
1786
- contain only lowercase alphanumeric characters and underscores.
1787
-
1788
-
1789
- """
1790
- ...
1791
-
1792
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1658
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1793
1659
  """
1794
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1795
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1796
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1797
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1798
- starts only after all sensors finish.
1660
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1661
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1799
1662
 
1800
1663
  Parameters
1801
1664
  ----------
@@ -1816,18 +1679,21 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1816
1679
  Name of the sensor on Airflow
1817
1680
  description : str
1818
1681
  Description of sensor in the Airflow UI
1819
- bucket_key : Union[str, List[str]]
1820
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1821
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1822
- bucket_name : str
1823
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1824
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1825
- wildcard_match : bool
1826
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1827
- aws_conn_id : str
1828
- a reference to the s3 connection on Airflow. (Default: None)
1829
- verify : bool
1830
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1682
+ external_dag_id : str
1683
+ The dag_id that contains the task you want to wait for.
1684
+ external_task_ids : List[str]
1685
+ The list of task_ids that you want to wait for.
1686
+ If None (default value) the sensor waits for the DAG. (Default: None)
1687
+ allowed_states : List[str]
1688
+ Iterable of allowed states, (Default: ['success'])
1689
+ failed_states : List[str]
1690
+ Iterable of failed or dis-allowed states. (Default: None)
1691
+ execution_delta : datetime.timedelta
1692
+ time difference with the previous execution to look at,
1693
+ the default is the same logical date as the current task or DAG. (Default: None)
1694
+ check_existence: bool
1695
+ Set to True to check if the external task exists or check if
1696
+ the DAG to wait for exists. (Default: True)
1831
1697
  """
1832
1698
  ...
1833
1699
 
@@ -1935,51 +1801,185 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1935
1801
  ...
1936
1802
 
1937
1803
  @typing.overload
1938
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1804
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1939
1805
  """
1940
- Specifies the Conda environment for all steps of the flow.
1806
+ Specifies the event(s) that this flow depends on.
1941
1807
 
1942
- Use `@conda_base` to set common libraries required by all
1943
- steps and use `@conda` to specify step-specific additions.
1808
+ ```
1809
+ @trigger(event='foo')
1810
+ ```
1811
+ or
1812
+ ```
1813
+ @trigger(events=['foo', 'bar'])
1814
+ ```
1815
+
1816
+ Additionally, you can specify the parameter mappings
1817
+ to map event payload to Metaflow parameters for the flow.
1818
+ ```
1819
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1820
+ ```
1821
+ or
1822
+ ```
1823
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1824
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1825
+ ```
1826
+
1827
+ 'parameters' can also be a list of strings and tuples like so:
1828
+ ```
1829
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1830
+ ```
1831
+ This is equivalent to:
1832
+ ```
1833
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1834
+ ```
1944
1835
 
1945
1836
  Parameters
1946
1837
  ----------
1947
- packages : Dict[str, str], default {}
1838
+ event : Union[str, Dict[str, Any]], optional, default None
1839
+ Event dependency for this flow.
1840
+ events : List[Union[str, Dict[str, Any]]], default []
1841
+ Events dependency for this flow.
1842
+ options : Dict[str, Any], default {}
1843
+ Backend-specific configuration for tuning eventing behavior.
1844
+
1845
+
1846
+ """
1847
+ ...
1848
+
1849
+ @typing.overload
1850
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1851
+ ...
1852
+
1853
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1854
+ """
1855
+ Specifies the event(s) that this flow depends on.
1856
+
1857
+ ```
1858
+ @trigger(event='foo')
1859
+ ```
1860
+ or
1861
+ ```
1862
+ @trigger(events=['foo', 'bar'])
1863
+ ```
1864
+
1865
+ Additionally, you can specify the parameter mappings
1866
+ to map event payload to Metaflow parameters for the flow.
1867
+ ```
1868
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1869
+ ```
1870
+ or
1871
+ ```
1872
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1873
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1874
+ ```
1875
+
1876
+ 'parameters' can also be a list of strings and tuples like so:
1877
+ ```
1878
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1879
+ ```
1880
+ This is equivalent to:
1881
+ ```
1882
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1883
+ ```
1884
+
1885
+ Parameters
1886
+ ----------
1887
+ event : Union[str, Dict[str, Any]], optional, default None
1888
+ Event dependency for this flow.
1889
+ events : List[Union[str, Dict[str, Any]]], default []
1890
+ Events dependency for this flow.
1891
+ options : Dict[str, Any], default {}
1892
+ Backend-specific configuration for tuning eventing behavior.
1893
+
1894
+
1895
+ """
1896
+ ...
1897
+
1898
+ @typing.overload
1899
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1900
+ """
1901
+ Specifies the PyPI packages for all steps of the flow.
1902
+
1903
+ Use `@pypi_base` to set common packages required by all
1904
+ steps and use `@pypi` to specify step-specific overrides.
1905
+ Parameters
1906
+ ----------
1907
+ packages : Dict[str, str], default: {}
1948
1908
  Packages to use for this flow. The key is the name of the package
1949
1909
  and the value is the version to use.
1950
- libraries : Dict[str, str], default {}
1951
- Supported for backward compatibility. When used with packages, packages will take precedence.
1952
- python : str, optional, default None
1910
+ python : str, optional, default: None
1953
1911
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1954
1912
  that the version used will correspond to the version of the Python interpreter used to start the run.
1955
- disabled : bool, default False
1956
- If set to True, disables Conda.
1957
1913
  """
1958
1914
  ...
1959
1915
 
1960
1916
  @typing.overload
1961
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1917
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1962
1918
  ...
1963
1919
 
1964
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1920
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1965
1921
  """
1966
- Specifies the Conda environment for all steps of the flow.
1967
-
1968
- Use `@conda_base` to set common libraries required by all
1969
- steps and use `@conda` to specify step-specific additions.
1922
+ Specifies the PyPI packages for all steps of the flow.
1970
1923
 
1924
+ Use `@pypi_base` to set common packages required by all
1925
+ steps and use `@pypi` to specify step-specific overrides.
1971
1926
  Parameters
1972
1927
  ----------
1973
- packages : Dict[str, str], default {}
1928
+ packages : Dict[str, str], default: {}
1974
1929
  Packages to use for this flow. The key is the name of the package
1975
1930
  and the value is the version to use.
1976
- libraries : Dict[str, str], default {}
1977
- Supported for backward compatibility. When used with packages, packages will take precedence.
1978
- python : str, optional, default None
1931
+ python : str, optional, default: None
1979
1932
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1980
1933
  that the version used will correspond to the version of the Python interpreter used to start the run.
1981
- disabled : bool, default False
1982
- If set to True, disables Conda.
1934
+ """
1935
+ ...
1936
+
1937
+ @typing.overload
1938
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1939
+ """
1940
+ Specifies the times when the flow should be run when running on a
1941
+ production scheduler.
1942
+
1943
+ Parameters
1944
+ ----------
1945
+ hourly : bool, default False
1946
+ Run the workflow hourly.
1947
+ daily : bool, default True
1948
+ Run the workflow daily.
1949
+ weekly : bool, default False
1950
+ Run the workflow weekly.
1951
+ cron : str, optional, default None
1952
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1953
+ specified by this expression.
1954
+ timezone : str, optional, default None
1955
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1956
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1957
+ """
1958
+ ...
1959
+
1960
+ @typing.overload
1961
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1962
+ ...
1963
+
1964
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1965
+ """
1966
+ Specifies the times when the flow should be run when running on a
1967
+ production scheduler.
1968
+
1969
+ Parameters
1970
+ ----------
1971
+ hourly : bool, default False
1972
+ Run the workflow hourly.
1973
+ daily : bool, default True
1974
+ Run the workflow daily.
1975
+ weekly : bool, default False
1976
+ Run the workflow weekly.
1977
+ cron : str, optional, default None
1978
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1979
+ specified by this expression.
1980
+ timezone : str, optional, default None
1981
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1982
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1983
1983
  """
1984
1984
  ...
1985
1985
 
@@ -2074,7 +2074,7 @@ class Metaflow(object, metaclass=type):
2074
2074
  flows present in the current namespace will be returned. A `Flow` is present in a namespace
2075
2075
  if it has at least one run in the namespace.
2076
2076
  """
2077
- def __init__(self):
2077
+ def __init__(self, _current_metadata: typing.Optional[str] = None):
2078
2078
  ...
2079
2079
  @property
2080
2080
  def flows(self) -> typing.List[metaflow.client.core.Flow]: