metaflow-stubs 2.12.27__py2.py3-none-any.whl → 2.12.28__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. metaflow-stubs/__init__.pyi +582 -582
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +5 -5
  13. metaflow-stubs/info_file.pyi +2 -2
  14. metaflow-stubs/metadata/metadata.pyi +2 -2
  15. metaflow-stubs/metadata/util.pyi +2 -2
  16. metaflow-stubs/metaflow_config.pyi +2 -2
  17. metaflow-stubs/metaflow_current.pyi +5 -5
  18. metaflow-stubs/mflog/mflog.pyi +2 -2
  19. metaflow-stubs/multicore_utils.pyi +2 -2
  20. metaflow-stubs/parameters.pyi +4 -4
  21. metaflow-stubs/plugins/__init__.pyi +3 -3
  22. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow.pyi +4 -4
  24. metaflow-stubs/plugins/airflow/airflow_cli.pyi +4 -4
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +5 -5
  36. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +5 -5
  37. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +5 -5
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  39. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  46. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  47. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  49. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  59. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  62. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  63. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  64. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  65. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  66. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  68. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +4 -4
  88. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  100. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  102. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  109. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  110. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  112. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  114. metaflow-stubs/plugins/logs_cli.pyi +4 -4
  115. metaflow-stubs/plugins/package_cli.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  121. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  123. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  124. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  128. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  130. metaflow-stubs/plugins/tag_cli.pyi +3 -3
  131. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  132. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  133. metaflow-stubs/procpoll.pyi +2 -2
  134. metaflow-stubs/pylint_wrapper.pyi +2 -2
  135. metaflow-stubs/runner/__init__.pyi +2 -2
  136. metaflow-stubs/runner/deployer.pyi +3 -3
  137. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  138. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  139. metaflow-stubs/runner/nbrun.pyi +2 -2
  140. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  141. metaflow-stubs/runner/utils.pyi +2 -2
  142. metaflow-stubs/system/__init__.pyi +3 -23
  143. metaflow-stubs/system/system_logger.pyi +3 -13
  144. metaflow-stubs/system/system_monitor.pyi +2 -13
  145. metaflow-stubs/tagging_util.pyi +2 -2
  146. metaflow-stubs/tuple_util.pyi +2 -2
  147. metaflow-stubs/version.pyi +2 -2
  148. {metaflow_stubs-2.12.27.dist-info → metaflow_stubs-2.12.28.dist-info}/METADATA +2 -2
  149. metaflow_stubs-2.12.28.dist-info/RECORD +152 -0
  150. {metaflow_stubs-2.12.27.dist-info → metaflow_stubs-2.12.28.dist-info}/WHEEL +1 -1
  151. metaflow_stubs-2.12.27.dist-info/RECORD +0 -152
  152. {metaflow_stubs-2.12.27.dist-info → metaflow_stubs-2.12.28.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.27 #
4
- # Generated on 2024-10-24T19:27:17.873106 #
3
+ # MF version: 2.12.28 #
4
+ # Generated on 2024-11-01T10:21:04.434546 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.datastore.inputs
12
- import metaflow.flowspec
13
- import metaflow.events
14
- import metaflow.client.core
15
- import metaflow._vendor.click.types
11
+ import metaflow.metaflow_current
16
12
  import typing
17
13
  import datetime
14
+ import io
18
15
  import metaflow.runner.metaflow_runner
19
- import metaflow.plugins.datatools.s3.s3
16
+ import metaflow.client.core
17
+ import metaflow.events
18
+ import metaflow.datastore.inputs
19
+ import metaflow.flowspec
20
+ import metaflow._vendor.click.types
20
21
  import metaflow.parameters
21
- import metaflow.metaflow_current
22
- import io
22
+ import metaflow.plugins.datatools.s3.s3
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -855,190 +855,132 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
855
855
  ...
856
856
 
857
857
  @typing.overload
858
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
859
- """
860
- Decorator prototype for all step decorators. This function gets specialized
861
- and imported for all decorators types by _import_plugin_decorators().
862
- """
863
- ...
864
-
865
- @typing.overload
866
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
867
- ...
868
-
869
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
870
- """
871
- Decorator prototype for all step decorators. This function gets specialized
872
- and imported for all decorators types by _import_plugin_decorators().
873
- """
874
- ...
875
-
876
- @typing.overload
877
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
858
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
878
859
  """
879
- Specifies the PyPI packages for the step.
860
+ Specifies the resources needed when executing this step.
880
861
 
881
- Information in this decorator will augment any
882
- attributes set in the `@pyi_base` flow-level decorator. Hence,
883
- you can use `@pypi_base` to set packages required by all
884
- steps and use `@pypi` to specify step-specific overrides.
862
+ Use `@resources` to specify the resource requirements
863
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
864
+
865
+ You can choose the compute layer on the command line by executing e.g.
866
+ ```
867
+ python myflow.py run --with batch
868
+ ```
869
+ or
870
+ ```
871
+ python myflow.py run --with kubernetes
872
+ ```
873
+ which executes the flow on the desired system using the
874
+ requirements specified in `@resources`.
885
875
 
886
876
  Parameters
887
877
  ----------
888
- packages : Dict[str, str], default: {}
889
- Packages to use for this step. The key is the name of the package
890
- and the value is the version to use.
891
- python : str, optional, default: None
892
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
893
- that the version used will correspond to the version of the Python interpreter used to start the run.
878
+ cpu : int, default 1
879
+ Number of CPUs required for this step.
880
+ gpu : int, optional, default None
881
+ Number of GPUs required for this step.
882
+ disk : int, optional, default None
883
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
884
+ memory : int, default 4096
885
+ Memory size (in MB) required for this step.
886
+ shared_memory : int, optional, default None
887
+ The value for the size (in MiB) of the /dev/shm volume for this step.
888
+ This parameter maps to the `--shm-size` option in Docker.
894
889
  """
895
890
  ...
896
891
 
897
892
  @typing.overload
898
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
893
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
899
894
  ...
900
895
 
901
896
  @typing.overload
902
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
897
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
903
898
  ...
904
899
 
905
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
900
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
906
901
  """
907
- Specifies the PyPI packages for the step.
902
+ Specifies the resources needed when executing this step.
908
903
 
909
- Information in this decorator will augment any
910
- attributes set in the `@pyi_base` flow-level decorator. Hence,
911
- you can use `@pypi_base` to set packages required by all
912
- steps and use `@pypi` to specify step-specific overrides.
904
+ Use `@resources` to specify the resource requirements
905
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
906
+
907
+ You can choose the compute layer on the command line by executing e.g.
908
+ ```
909
+ python myflow.py run --with batch
910
+ ```
911
+ or
912
+ ```
913
+ python myflow.py run --with kubernetes
914
+ ```
915
+ which executes the flow on the desired system using the
916
+ requirements specified in `@resources`.
913
917
 
914
918
  Parameters
915
919
  ----------
916
- packages : Dict[str, str], default: {}
917
- Packages to use for this step. The key is the name of the package
918
- and the value is the version to use.
919
- python : str, optional, default: None
920
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
921
- that the version used will correspond to the version of the Python interpreter used to start the run.
920
+ cpu : int, default 1
921
+ Number of CPUs required for this step.
922
+ gpu : int, optional, default None
923
+ Number of GPUs required for this step.
924
+ disk : int, optional, default None
925
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
926
+ memory : int, default 4096
927
+ Memory size (in MB) required for this step.
928
+ shared_memory : int, optional, default None
929
+ The value for the size (in MiB) of the /dev/shm volume for this step.
930
+ This parameter maps to the `--shm-size` option in Docker.
922
931
  """
923
932
  ...
924
933
 
925
934
  @typing.overload
926
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
935
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
927
936
  """
928
- Creates a human-readable report, a Metaflow Card, after this step completes.
937
+ Specifies the number of times the task corresponding
938
+ to a step needs to be retried.
929
939
 
930
- Note that you may add multiple `@card` decorators in a step with different parameters.
940
+ This decorator is useful for handling transient errors, such as networking issues.
941
+ If your task contains operations that can't be retried safely, e.g. database updates,
942
+ it is advisable to annotate it with `@retry(times=0)`.
943
+
944
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
945
+ decorator will execute a no-op task after all retries have been exhausted,
946
+ ensuring that the flow execution can continue.
931
947
 
932
948
  Parameters
933
949
  ----------
934
- type : str, default 'default'
935
- Card type.
936
- id : str, optional, default None
937
- If multiple cards are present, use this id to identify this card.
938
- options : Dict[str, Any], default {}
939
- Options passed to the card. The contents depend on the card type.
940
- timeout : int, default 45
941
- Interrupt reporting if it takes more than this many seconds.
942
-
943
-
950
+ times : int, default 3
951
+ Number of times to retry this task.
952
+ minutes_between_retries : int, default 2
953
+ Number of minutes between retries.
944
954
  """
945
955
  ...
946
956
 
947
957
  @typing.overload
948
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
958
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
949
959
  ...
950
960
 
951
961
  @typing.overload
952
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
962
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
953
963
  ...
954
964
 
955
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
965
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
956
966
  """
957
- Creates a human-readable report, a Metaflow Card, after this step completes.
958
-
959
- Note that you may add multiple `@card` decorators in a step with different parameters.
960
-
961
- Parameters
962
- ----------
963
- type : str, default 'default'
964
- Card type.
965
- id : str, optional, default None
966
- If multiple cards are present, use this id to identify this card.
967
- options : Dict[str, Any], default {}
968
- Options passed to the card. The contents depend on the card type.
969
- timeout : int, default 45
970
- Interrupt reporting if it takes more than this many seconds.
967
+ Specifies the number of times the task corresponding
968
+ to a step needs to be retried.
971
969
 
970
+ This decorator is useful for handling transient errors, such as networking issues.
971
+ If your task contains operations that can't be retried safely, e.g. database updates,
972
+ it is advisable to annotate it with `@retry(times=0)`.
972
973
 
973
- """
974
- ...
975
-
976
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
977
- """
978
- Specifies that this step should execute on Kubernetes.
974
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
975
+ decorator will execute a no-op task after all retries have been exhausted,
976
+ ensuring that the flow execution can continue.
979
977
 
980
978
  Parameters
981
979
  ----------
982
- cpu : int, default 1
983
- Number of CPUs required for this step. If `@resources` is
984
- also present, the maximum value from all decorators is used.
985
- memory : int, default 4096
986
- Memory size (in MB) required for this step. If
987
- `@resources` is also present, the maximum value from all decorators is
988
- used.
989
- disk : int, default 10240
990
- Disk size (in MB) required for this step. If
991
- `@resources` is also present, the maximum value from all decorators is
992
- used.
993
- image : str, optional, default None
994
- Docker image to use when launching on Kubernetes. If not specified, and
995
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
996
- not, a default Docker image mapping to the current version of Python is used.
997
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
998
- If given, the imagePullPolicy to be applied to the Docker image of the step.
999
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1000
- Kubernetes service account to use when launching pod in Kubernetes.
1001
- secrets : List[str], optional, default None
1002
- Kubernetes secrets to use when launching pod in Kubernetes. These
1003
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1004
- in Metaflow configuration.
1005
- node_selector: Union[Dict[str,str], str], optional, default None
1006
- Kubernetes node selector(s) to apply to the pod running the task.
1007
- Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
1008
- or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
1009
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1010
- Kubernetes namespace to use when launching pod in Kubernetes.
1011
- gpu : int, optional, default None
1012
- Number of GPUs required for this step. A value of zero implies that
1013
- the scheduled node should not have GPUs.
1014
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1015
- The vendor of the GPUs to be used for this step.
1016
- tolerations : List[str], default []
1017
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1018
- Kubernetes tolerations to use when launching pod in Kubernetes.
1019
- use_tmpfs : bool, default False
1020
- This enables an explicit tmpfs mount for this step.
1021
- tmpfs_tempdir : bool, default True
1022
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1023
- tmpfs_size : int, optional, default: None
1024
- The value for the size (in MiB) of the tmpfs mount for this step.
1025
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1026
- memory allocated for this step.
1027
- tmpfs_path : str, optional, default /metaflow_temp
1028
- Path to tmpfs mount for this step.
1029
- persistent_volume_claims : Dict[str, str], optional, default None
1030
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1031
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1032
- shared_memory: int, optional
1033
- Shared memory size (in MiB) required for this step
1034
- port: int, optional
1035
- Port number to specify in the Kubernetes job object
1036
- compute_pool : str, optional, default None
1037
- Compute pool to be used for for this step.
1038
- If not specified, any accessible compute pool within the perimeter is used.
1039
- hostname_resolution_timeout: int, default 10 * 60
1040
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
1041
- Only applicable when @parallel is used.
980
+ times : int, default 3
981
+ Number of times to retry this task.
982
+ minutes_between_retries : int, default 2
983
+ Number of minutes between retries.
1042
984
  """
1043
985
  ...
1044
986
 
@@ -1100,163 +1042,70 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1100
1042
  ...
1101
1043
 
1102
1044
  @typing.overload
1103
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1045
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1104
1046
  """
1105
- Specifies the resources needed when executing this step.
1047
+ Specifies the PyPI packages for the step.
1106
1048
 
1107
- Use `@resources` to specify the resource requirements
1108
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1109
-
1110
- You can choose the compute layer on the command line by executing e.g.
1111
- ```
1112
- python myflow.py run --with batch
1113
- ```
1114
- or
1115
- ```
1116
- python myflow.py run --with kubernetes
1117
- ```
1118
- which executes the flow on the desired system using the
1119
- requirements specified in `@resources`.
1049
+ Information in this decorator will augment any
1050
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1051
+ you can use `@pypi_base` to set packages required by all
1052
+ steps and use `@pypi` to specify step-specific overrides.
1120
1053
 
1121
1054
  Parameters
1122
1055
  ----------
1123
- cpu : int, default 1
1124
- Number of CPUs required for this step.
1125
- gpu : int, optional, default None
1126
- Number of GPUs required for this step.
1127
- disk : int, optional, default None
1128
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1129
- memory : int, default 4096
1130
- Memory size (in MB) required for this step.
1131
- shared_memory : int, optional, default None
1132
- The value for the size (in MiB) of the /dev/shm volume for this step.
1133
- This parameter maps to the `--shm-size` option in Docker.
1056
+ packages : Dict[str, str], default: {}
1057
+ Packages to use for this step. The key is the name of the package
1058
+ and the value is the version to use.
1059
+ python : str, optional, default: None
1060
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1061
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1134
1062
  """
1135
1063
  ...
1136
1064
 
1137
1065
  @typing.overload
1138
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1066
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1139
1067
  ...
1140
1068
 
1141
1069
  @typing.overload
1142
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1143
- ...
1144
-
1145
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1146
- """
1147
- Specifies the resources needed when executing this step.
1148
-
1149
- Use `@resources` to specify the resource requirements
1150
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1151
-
1152
- You can choose the compute layer on the command line by executing e.g.
1153
- ```
1154
- python myflow.py run --with batch
1155
- ```
1156
- or
1157
- ```
1158
- python myflow.py run --with kubernetes
1159
- ```
1160
- which executes the flow on the desired system using the
1161
- requirements specified in `@resources`.
1162
-
1163
- Parameters
1164
- ----------
1165
- cpu : int, default 1
1166
- Number of CPUs required for this step.
1167
- gpu : int, optional, default None
1168
- Number of GPUs required for this step.
1169
- disk : int, optional, default None
1170
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1171
- memory : int, default 4096
1172
- Memory size (in MB) required for this step.
1173
- shared_memory : int, optional, default None
1174
- The value for the size (in MiB) of the /dev/shm volume for this step.
1175
- This parameter maps to the `--shm-size` option in Docker.
1176
- """
1070
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1177
1071
  ...
1178
1072
 
1179
- @typing.overload
1180
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1073
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1181
1074
  """
1182
- Specifies environment variables to be set prior to the execution of a step.
1075
+ Specifies the PyPI packages for the step.
1183
1076
 
1184
- Parameters
1185
- ----------
1186
- vars : Dict[str, str], default {}
1187
- Dictionary of environment variables to set.
1188
- """
1189
- ...
1190
-
1191
- @typing.overload
1192
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1193
- ...
1194
-
1195
- @typing.overload
1196
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1197
- ...
1198
-
1199
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1200
- """
1201
- Specifies environment variables to be set prior to the execution of a step.
1077
+ Information in this decorator will augment any
1078
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1079
+ you can use `@pypi_base` to set packages required by all
1080
+ steps and use `@pypi` to specify step-specific overrides.
1202
1081
 
1203
1082
  Parameters
1204
1083
  ----------
1205
- vars : Dict[str, str], default {}
1206
- Dictionary of environment variables to set.
1084
+ packages : Dict[str, str], default: {}
1085
+ Packages to use for this step. The key is the name of the package
1086
+ and the value is the version to use.
1087
+ python : str, optional, default: None
1088
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1089
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1207
1090
  """
1208
1091
  ...
1209
1092
 
1210
1093
  @typing.overload
1211
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1094
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1212
1095
  """
1213
- Specifies the number of times the task corresponding
1214
- to a step needs to be retried.
1215
-
1216
- This decorator is useful for handling transient errors, such as networking issues.
1217
- If your task contains operations that can't be retried safely, e.g. database updates,
1218
- it is advisable to annotate it with `@retry(times=0)`.
1219
-
1220
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1221
- decorator will execute a no-op task after all retries have been exhausted,
1222
- ensuring that the flow execution can continue.
1223
-
1224
- Parameters
1225
- ----------
1226
- times : int, default 3
1227
- Number of times to retry this task.
1228
- minutes_between_retries : int, default 2
1229
- Number of minutes between retries.
1096
+ Decorator prototype for all step decorators. This function gets specialized
1097
+ and imported for all decorators types by _import_plugin_decorators().
1230
1098
  """
1231
1099
  ...
1232
1100
 
1233
1101
  @typing.overload
1234
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1235
- ...
1236
-
1237
- @typing.overload
1238
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1102
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1239
1103
  ...
1240
1104
 
1241
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1105
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1242
1106
  """
1243
- Specifies the number of times the task corresponding
1244
- to a step needs to be retried.
1245
-
1246
- This decorator is useful for handling transient errors, such as networking issues.
1247
- If your task contains operations that can't be retried safely, e.g. database updates,
1248
- it is advisable to annotate it with `@retry(times=0)`.
1249
-
1250
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1251
- decorator will execute a no-op task after all retries have been exhausted,
1252
- ensuring that the flow execution can continue.
1253
-
1254
- Parameters
1255
- ----------
1256
- times : int, default 3
1257
- Number of times to retry this task.
1258
- minutes_between_retries : int, default 2
1259
- Number of minutes between retries.
1107
+ Decorator prototype for all step decorators. This function gets specialized
1108
+ and imported for all decorators types by _import_plugin_decorators().
1260
1109
  """
1261
1110
  ...
1262
1111
 
@@ -1407,96 +1256,6 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1407
1256
  """
1408
1257
  ...
1409
1258
 
1410
- @typing.overload
1411
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1412
- """
1413
- Specifies secrets to be retrieved and injected as environment variables prior to
1414
- the execution of a step.
1415
-
1416
- Parameters
1417
- ----------
1418
- sources : List[Union[str, Dict[str, Any]]], default: []
1419
- List of secret specs, defining how the secrets are to be retrieved
1420
- """
1421
- ...
1422
-
1423
- @typing.overload
1424
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1425
- ...
1426
-
1427
- @typing.overload
1428
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1429
- ...
1430
-
1431
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1432
- """
1433
- Specifies secrets to be retrieved and injected as environment variables prior to
1434
- the execution of a step.
1435
-
1436
- Parameters
1437
- ----------
1438
- sources : List[Union[str, Dict[str, Any]]], default: []
1439
- List of secret specs, defining how the secrets are to be retrieved
1440
- """
1441
- ...
1442
-
1443
- @typing.overload
1444
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1445
- """
1446
- Specifies the Conda environment for the step.
1447
-
1448
- Information in this decorator will augment any
1449
- attributes set in the `@conda_base` flow-level decorator. Hence,
1450
- you can use `@conda_base` to set packages required by all
1451
- steps and use `@conda` to specify step-specific overrides.
1452
-
1453
- Parameters
1454
- ----------
1455
- packages : Dict[str, str], default {}
1456
- Packages to use for this step. The key is the name of the package
1457
- and the value is the version to use.
1458
- libraries : Dict[str, str], default {}
1459
- Supported for backward compatibility. When used with packages, packages will take precedence.
1460
- python : str, optional, default None
1461
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1462
- that the version used will correspond to the version of the Python interpreter used to start the run.
1463
- disabled : bool, default False
1464
- If set to True, disables @conda.
1465
- """
1466
- ...
1467
-
1468
- @typing.overload
1469
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1470
- ...
1471
-
1472
- @typing.overload
1473
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1474
- ...
1475
-
1476
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1477
- """
1478
- Specifies the Conda environment for the step.
1479
-
1480
- Information in this decorator will augment any
1481
- attributes set in the `@conda_base` flow-level decorator. Hence,
1482
- you can use `@conda_base` to set packages required by all
1483
- steps and use `@conda` to specify step-specific overrides.
1484
-
1485
- Parameters
1486
- ----------
1487
- packages : Dict[str, str], default {}
1488
- Packages to use for this step. The key is the name of the package
1489
- and the value is the version to use.
1490
- libraries : Dict[str, str], default {}
1491
- Supported for backward compatibility. When used with packages, packages will take precedence.
1492
- python : str, optional, default None
1493
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1494
- that the version used will correspond to the version of the Python interpreter used to start the run.
1495
- disabled : bool, default False
1496
- If set to True, disables @conda.
1497
- """
1498
- ...
1499
-
1500
1259
  @typing.overload
1501
1260
  def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1502
1261
  """
@@ -1546,63 +1305,244 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1546
1305
  """
1547
1306
  ...
1548
1307
 
1549
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1308
+ @typing.overload
1309
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1550
1310
  """
1551
- Specifies what flows belong to the same project.
1552
-
1553
- A project-specific namespace is created for all flows that
1554
- use the same `@project(name)`.
1311
+ Specifies environment variables to be set prior to the execution of a step.
1555
1312
 
1556
1313
  Parameters
1557
1314
  ----------
1558
- name : str
1559
- Project name. Make sure that the name is unique amongst all
1560
- projects that use the same production scheduler. The name may
1561
- contain only lowercase alphanumeric characters and underscores.
1562
-
1563
-
1315
+ vars : Dict[str, str], default {}
1316
+ Dictionary of environment variables to set.
1564
1317
  """
1565
1318
  ...
1566
1319
 
1567
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1568
- """
1569
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1570
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1571
-
1572
- Parameters
1573
- ----------
1574
- timeout : int
1575
- Time, in seconds before the task times out and fails. (Default: 3600)
1576
- poke_interval : int
1577
- Time in seconds that the job should wait in between each try. (Default: 60)
1578
- mode : str
1579
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1580
- exponential_backoff : bool
1581
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1582
- pool : str
1583
- the slot pool this task should run in,
1584
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1585
- soft_fail : bool
1586
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1587
- name : str
1588
- Name of the sensor on Airflow
1589
- description : str
1590
- Description of sensor in the Airflow UI
1591
- external_dag_id : str
1592
- The dag_id that contains the task you want to wait for.
1593
- external_task_ids : List[str]
1594
- The list of task_ids that you want to wait for.
1595
- If None (default value) the sensor waits for the DAG. (Default: None)
1596
- allowed_states : List[str]
1597
- Iterable of allowed states, (Default: ['success'])
1598
- failed_states : List[str]
1599
- Iterable of failed or dis-allowed states. (Default: None)
1600
- execution_delta : datetime.timedelta
1601
- time difference with the previous execution to look at,
1602
- the default is the same logical date as the current task or DAG. (Default: None)
1603
- check_existence: bool
1604
- Set to True to check if the external task exists or check if
1605
- the DAG to wait for exists. (Default: True)
1320
+ @typing.overload
1321
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1322
+ ...
1323
+
1324
+ @typing.overload
1325
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1326
+ ...
1327
+
1328
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1329
+ """
1330
+ Specifies environment variables to be set prior to the execution of a step.
1331
+
1332
+ Parameters
1333
+ ----------
1334
+ vars : Dict[str, str], default {}
1335
+ Dictionary of environment variables to set.
1336
+ """
1337
+ ...
1338
+
1339
+ @typing.overload
1340
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1341
+ """
1342
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1343
+
1344
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1345
+
1346
+ Parameters
1347
+ ----------
1348
+ type : str, default 'default'
1349
+ Card type.
1350
+ id : str, optional, default None
1351
+ If multiple cards are present, use this id to identify this card.
1352
+ options : Dict[str, Any], default {}
1353
+ Options passed to the card. The contents depend on the card type.
1354
+ timeout : int, default 45
1355
+ Interrupt reporting if it takes more than this many seconds.
1356
+
1357
+
1358
+ """
1359
+ ...
1360
+
1361
+ @typing.overload
1362
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1363
+ ...
1364
+
1365
+ @typing.overload
1366
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1367
+ ...
1368
+
1369
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1370
+ """
1371
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1372
+
1373
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1374
+
1375
+ Parameters
1376
+ ----------
1377
+ type : str, default 'default'
1378
+ Card type.
1379
+ id : str, optional, default None
1380
+ If multiple cards are present, use this id to identify this card.
1381
+ options : Dict[str, Any], default {}
1382
+ Options passed to the card. The contents depend on the card type.
1383
+ timeout : int, default 45
1384
+ Interrupt reporting if it takes more than this many seconds.
1385
+
1386
+
1387
+ """
1388
+ ...
1389
+
1390
+ @typing.overload
1391
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1392
+ """
1393
+ Specifies secrets to be retrieved and injected as environment variables prior to
1394
+ the execution of a step.
1395
+
1396
+ Parameters
1397
+ ----------
1398
+ sources : List[Union[str, Dict[str, Any]]], default: []
1399
+ List of secret specs, defining how the secrets are to be retrieved
1400
+ """
1401
+ ...
1402
+
1403
+ @typing.overload
1404
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1405
+ ...
1406
+
1407
+ @typing.overload
1408
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1409
+ ...
1410
+
1411
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1412
+ """
1413
+ Specifies secrets to be retrieved and injected as environment variables prior to
1414
+ the execution of a step.
1415
+
1416
+ Parameters
1417
+ ----------
1418
+ sources : List[Union[str, Dict[str, Any]]], default: []
1419
+ List of secret specs, defining how the secrets are to be retrieved
1420
+ """
1421
+ ...
1422
+
1423
+ @typing.overload
1424
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1425
+ """
1426
+ Specifies the Conda environment for the step.
1427
+
1428
+ Information in this decorator will augment any
1429
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1430
+ you can use `@conda_base` to set packages required by all
1431
+ steps and use `@conda` to specify step-specific overrides.
1432
+
1433
+ Parameters
1434
+ ----------
1435
+ packages : Dict[str, str], default {}
1436
+ Packages to use for this step. The key is the name of the package
1437
+ and the value is the version to use.
1438
+ libraries : Dict[str, str], default {}
1439
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1440
+ python : str, optional, default None
1441
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1442
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1443
+ disabled : bool, default False
1444
+ If set to True, disables @conda.
1445
+ """
1446
+ ...
1447
+
1448
+ @typing.overload
1449
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1450
+ ...
1451
+
1452
+ @typing.overload
1453
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1454
+ ...
1455
+
1456
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1457
+ """
1458
+ Specifies the Conda environment for the step.
1459
+
1460
+ Information in this decorator will augment any
1461
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1462
+ you can use `@conda_base` to set packages required by all
1463
+ steps and use `@conda` to specify step-specific overrides.
1464
+
1465
+ Parameters
1466
+ ----------
1467
+ packages : Dict[str, str], default {}
1468
+ Packages to use for this step. The key is the name of the package
1469
+ and the value is the version to use.
1470
+ libraries : Dict[str, str], default {}
1471
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1472
+ python : str, optional, default None
1473
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1474
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1475
+ disabled : bool, default False
1476
+ If set to True, disables @conda.
1477
+ """
1478
+ ...
1479
+
1480
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1481
+ """
1482
+ Specifies that this step should execute on Kubernetes.
1483
+
1484
+ Parameters
1485
+ ----------
1486
+ cpu : int, default 1
1487
+ Number of CPUs required for this step. If `@resources` is
1488
+ also present, the maximum value from all decorators is used.
1489
+ memory : int, default 4096
1490
+ Memory size (in MB) required for this step. If
1491
+ `@resources` is also present, the maximum value from all decorators is
1492
+ used.
1493
+ disk : int, default 10240
1494
+ Disk size (in MB) required for this step. If
1495
+ `@resources` is also present, the maximum value from all decorators is
1496
+ used.
1497
+ image : str, optional, default None
1498
+ Docker image to use when launching on Kubernetes. If not specified, and
1499
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1500
+ not, a default Docker image mapping to the current version of Python is used.
1501
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1502
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1503
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1504
+ Kubernetes service account to use when launching pod in Kubernetes.
1505
+ secrets : List[str], optional, default None
1506
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1507
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1508
+ in Metaflow configuration.
1509
+ node_selector: Union[Dict[str,str], str], optional, default None
1510
+ Kubernetes node selector(s) to apply to the pod running the task.
1511
+ Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
1512
+ or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
1513
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1514
+ Kubernetes namespace to use when launching pod in Kubernetes.
1515
+ gpu : int, optional, default None
1516
+ Number of GPUs required for this step. A value of zero implies that
1517
+ the scheduled node should not have GPUs.
1518
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1519
+ The vendor of the GPUs to be used for this step.
1520
+ tolerations : List[str], default []
1521
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1522
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1523
+ use_tmpfs : bool, default False
1524
+ This enables an explicit tmpfs mount for this step.
1525
+ tmpfs_tempdir : bool, default True
1526
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1527
+ tmpfs_size : int, optional, default: None
1528
+ The value for the size (in MiB) of the tmpfs mount for this step.
1529
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1530
+ memory allocated for this step.
1531
+ tmpfs_path : str, optional, default /metaflow_temp
1532
+ Path to tmpfs mount for this step.
1533
+ persistent_volume_claims : Dict[str, str], optional, default None
1534
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1535
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1536
+ shared_memory: int, optional
1537
+ Shared memory size (in MiB) required for this step
1538
+ port: int, optional
1539
+ Port number to specify in the Kubernetes job object
1540
+ compute_pool : str, optional, default None
1541
+ Compute pool to be used for for this step.
1542
+ If not specified, any accessible compute pool within the perimeter is used.
1543
+ hostname_resolution_timeout: int, default 10 * 60
1544
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
1545
+ Only applicable when @parallel is used.
1606
1546
  """
1607
1547
  ...
1608
1548
 
@@ -1648,52 +1588,215 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1648
1588
  """
1649
1589
  ...
1650
1590
 
1591
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1592
+ """
1593
+ Specifies what flows belong to the same project.
1594
+
1595
+ A project-specific namespace is created for all flows that
1596
+ use the same `@project(name)`.
1597
+
1598
+ Parameters
1599
+ ----------
1600
+ name : str
1601
+ Project name. Make sure that the name is unique amongst all
1602
+ projects that use the same production scheduler. The name may
1603
+ contain only lowercase alphanumeric characters and underscores.
1604
+
1605
+
1606
+ """
1607
+ ...
1608
+
1651
1609
  @typing.overload
1652
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1610
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1653
1611
  """
1654
- Specifies the times when the flow should be run when running on a
1655
- production scheduler.
1612
+ Specifies the Conda environment for all steps of the flow.
1613
+
1614
+ Use `@conda_base` to set common libraries required by all
1615
+ steps and use `@conda` to specify step-specific additions.
1616
+
1617
+ Parameters
1618
+ ----------
1619
+ packages : Dict[str, str], default {}
1620
+ Packages to use for this flow. The key is the name of the package
1621
+ and the value is the version to use.
1622
+ libraries : Dict[str, str], default {}
1623
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1624
+ python : str, optional, default None
1625
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1626
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1627
+ disabled : bool, default False
1628
+ If set to True, disables Conda.
1629
+ """
1630
+ ...
1631
+
1632
+ @typing.overload
1633
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1634
+ ...
1635
+
1636
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1637
+ """
1638
+ Specifies the Conda environment for all steps of the flow.
1639
+
1640
+ Use `@conda_base` to set common libraries required by all
1641
+ steps and use `@conda` to specify step-specific additions.
1642
+
1643
+ Parameters
1644
+ ----------
1645
+ packages : Dict[str, str], default {}
1646
+ Packages to use for this flow. The key is the name of the package
1647
+ and the value is the version to use.
1648
+ libraries : Dict[str, str], default {}
1649
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1650
+ python : str, optional, default None
1651
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1652
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1653
+ disabled : bool, default False
1654
+ If set to True, disables Conda.
1655
+ """
1656
+ ...
1657
+
1658
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1659
+ """
1660
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1661
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1662
+
1663
+ Parameters
1664
+ ----------
1665
+ timeout : int
1666
+ Time, in seconds before the task times out and fails. (Default: 3600)
1667
+ poke_interval : int
1668
+ Time in seconds that the job should wait in between each try. (Default: 60)
1669
+ mode : str
1670
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1671
+ exponential_backoff : bool
1672
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1673
+ pool : str
1674
+ the slot pool this task should run in,
1675
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1676
+ soft_fail : bool
1677
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1678
+ name : str
1679
+ Name of the sensor on Airflow
1680
+ description : str
1681
+ Description of sensor in the Airflow UI
1682
+ external_dag_id : str
1683
+ The dag_id that contains the task you want to wait for.
1684
+ external_task_ids : List[str]
1685
+ The list of task_ids that you want to wait for.
1686
+ If None (default value) the sensor waits for the DAG. (Default: None)
1687
+ allowed_states : List[str]
1688
+ Iterable of allowed states, (Default: ['success'])
1689
+ failed_states : List[str]
1690
+ Iterable of failed or dis-allowed states. (Default: None)
1691
+ execution_delta : datetime.timedelta
1692
+ time difference with the previous execution to look at,
1693
+ the default is the same logical date as the current task or DAG. (Default: None)
1694
+ check_existence: bool
1695
+ Set to True to check if the external task exists or check if
1696
+ the DAG to wait for exists. (Default: True)
1697
+ """
1698
+ ...
1699
+
1700
+ @typing.overload
1701
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1702
+ """
1703
+ Specifies the flow(s) that this flow depends on.
1704
+
1705
+ ```
1706
+ @trigger_on_finish(flow='FooFlow')
1707
+ ```
1708
+ or
1709
+ ```
1710
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1711
+ ```
1712
+ This decorator respects the @project decorator and triggers the flow
1713
+ when upstream runs within the same namespace complete successfully
1714
+
1715
+ Additionally, you can specify project aware upstream flow dependencies
1716
+ by specifying the fully qualified project_flow_name.
1717
+ ```
1718
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1719
+ ```
1720
+ or
1721
+ ```
1722
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1723
+ ```
1724
+
1725
+ You can also specify just the project or project branch (other values will be
1726
+ inferred from the current project or project branch):
1727
+ ```
1728
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1729
+ ```
1730
+
1731
+ Note that `branch` is typically one of:
1732
+ - `prod`
1733
+ - `user.bob`
1734
+ - `test.my_experiment`
1735
+ - `prod.staging`
1656
1736
 
1657
1737
  Parameters
1658
1738
  ----------
1659
- hourly : bool, default False
1660
- Run the workflow hourly.
1661
- daily : bool, default True
1662
- Run the workflow daily.
1663
- weekly : bool, default False
1664
- Run the workflow weekly.
1665
- cron : str, optional, default None
1666
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1667
- specified by this expression.
1668
- timezone : str, optional, default None
1669
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1670
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1739
+ flow : Union[str, Dict[str, str]], optional, default None
1740
+ Upstream flow dependency for this flow.
1741
+ flows : List[Union[str, Dict[str, str]]], default []
1742
+ Upstream flow dependencies for this flow.
1743
+ options : Dict[str, Any], default {}
1744
+ Backend-specific configuration for tuning eventing behavior.
1745
+
1746
+
1671
1747
  """
1672
1748
  ...
1673
1749
 
1674
1750
  @typing.overload
1675
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1751
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1676
1752
  ...
1677
1753
 
1678
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1754
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1679
1755
  """
1680
- Specifies the times when the flow should be run when running on a
1681
- production scheduler.
1756
+ Specifies the flow(s) that this flow depends on.
1757
+
1758
+ ```
1759
+ @trigger_on_finish(flow='FooFlow')
1760
+ ```
1761
+ or
1762
+ ```
1763
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1764
+ ```
1765
+ This decorator respects the @project decorator and triggers the flow
1766
+ when upstream runs within the same namespace complete successfully
1767
+
1768
+ Additionally, you can specify project aware upstream flow dependencies
1769
+ by specifying the fully qualified project_flow_name.
1770
+ ```
1771
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1772
+ ```
1773
+ or
1774
+ ```
1775
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1776
+ ```
1777
+
1778
+ You can also specify just the project or project branch (other values will be
1779
+ inferred from the current project or project branch):
1780
+ ```
1781
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1782
+ ```
1783
+
1784
+ Note that `branch` is typically one of:
1785
+ - `prod`
1786
+ - `user.bob`
1787
+ - `test.my_experiment`
1788
+ - `prod.staging`
1682
1789
 
1683
1790
  Parameters
1684
1791
  ----------
1685
- hourly : bool, default False
1686
- Run the workflow hourly.
1687
- daily : bool, default True
1688
- Run the workflow daily.
1689
- weekly : bool, default False
1690
- Run the workflow weekly.
1691
- cron : str, optional, default None
1692
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1693
- specified by this expression.
1694
- timezone : str, optional, default None
1695
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1696
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1792
+ flow : Union[str, Dict[str, str]], optional, default None
1793
+ Upstream flow dependency for this flow.
1794
+ flows : List[Union[str, Dict[str, str]]], default []
1795
+ Upstream flow dependencies for this flow.
1796
+ options : Dict[str, Any], default {}
1797
+ Backend-specific configuration for tuning eventing behavior.
1798
+
1799
+
1697
1800
  """
1698
1801
  ...
1699
1802
 
@@ -1832,154 +1935,51 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1832
1935
  ...
1833
1936
 
1834
1937
  @typing.overload
1835
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1836
- """
1837
- Specifies the Conda environment for all steps of the flow.
1838
-
1839
- Use `@conda_base` to set common libraries required by all
1840
- steps and use `@conda` to specify step-specific additions.
1841
-
1842
- Parameters
1843
- ----------
1844
- packages : Dict[str, str], default {}
1845
- Packages to use for this flow. The key is the name of the package
1846
- and the value is the version to use.
1847
- libraries : Dict[str, str], default {}
1848
- Supported for backward compatibility. When used with packages, packages will take precedence.
1849
- python : str, optional, default None
1850
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1851
- that the version used will correspond to the version of the Python interpreter used to start the run.
1852
- disabled : bool, default False
1853
- If set to True, disables Conda.
1854
- """
1855
- ...
1856
-
1857
- @typing.overload
1858
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1859
- ...
1860
-
1861
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1862
- """
1863
- Specifies the Conda environment for all steps of the flow.
1864
-
1865
- Use `@conda_base` to set common libraries required by all
1866
- steps and use `@conda` to specify step-specific additions.
1867
-
1868
- Parameters
1869
- ----------
1870
- packages : Dict[str, str], default {}
1871
- Packages to use for this flow. The key is the name of the package
1872
- and the value is the version to use.
1873
- libraries : Dict[str, str], default {}
1874
- Supported for backward compatibility. When used with packages, packages will take precedence.
1875
- python : str, optional, default None
1876
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1877
- that the version used will correspond to the version of the Python interpreter used to start the run.
1878
- disabled : bool, default False
1879
- If set to True, disables Conda.
1880
- """
1881
- ...
1882
-
1883
- @typing.overload
1884
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1938
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1885
1939
  """
1886
- Specifies the flow(s) that this flow depends on.
1887
-
1888
- ```
1889
- @trigger_on_finish(flow='FooFlow')
1890
- ```
1891
- or
1892
- ```
1893
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1894
- ```
1895
- This decorator respects the @project decorator and triggers the flow
1896
- when upstream runs within the same namespace complete successfully
1897
-
1898
- Additionally, you can specify project aware upstream flow dependencies
1899
- by specifying the fully qualified project_flow_name.
1900
- ```
1901
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1902
- ```
1903
- or
1904
- ```
1905
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1906
- ```
1907
-
1908
- You can also specify just the project or project branch (other values will be
1909
- inferred from the current project or project branch):
1910
- ```
1911
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1912
- ```
1913
-
1914
- Note that `branch` is typically one of:
1915
- - `prod`
1916
- - `user.bob`
1917
- - `test.my_experiment`
1918
- - `prod.staging`
1940
+ Specifies the times when the flow should be run when running on a
1941
+ production scheduler.
1919
1942
 
1920
1943
  Parameters
1921
1944
  ----------
1922
- flow : Union[str, Dict[str, str]], optional, default None
1923
- Upstream flow dependency for this flow.
1924
- flows : List[Union[str, Dict[str, str]]], default []
1925
- Upstream flow dependencies for this flow.
1926
- options : Dict[str, Any], default {}
1927
- Backend-specific configuration for tuning eventing behavior.
1928
-
1929
-
1945
+ hourly : bool, default False
1946
+ Run the workflow hourly.
1947
+ daily : bool, default True
1948
+ Run the workflow daily.
1949
+ weekly : bool, default False
1950
+ Run the workflow weekly.
1951
+ cron : str, optional, default None
1952
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1953
+ specified by this expression.
1954
+ timezone : str, optional, default None
1955
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1956
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1930
1957
  """
1931
1958
  ...
1932
1959
 
1933
1960
  @typing.overload
1934
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1961
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1935
1962
  ...
1936
1963
 
1937
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1964
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1938
1965
  """
1939
- Specifies the flow(s) that this flow depends on.
1940
-
1941
- ```
1942
- @trigger_on_finish(flow='FooFlow')
1943
- ```
1944
- or
1945
- ```
1946
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1947
- ```
1948
- This decorator respects the @project decorator and triggers the flow
1949
- when upstream runs within the same namespace complete successfully
1950
-
1951
- Additionally, you can specify project aware upstream flow dependencies
1952
- by specifying the fully qualified project_flow_name.
1953
- ```
1954
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1955
- ```
1956
- or
1957
- ```
1958
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1959
- ```
1960
-
1961
- You can also specify just the project or project branch (other values will be
1962
- inferred from the current project or project branch):
1963
- ```
1964
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1965
- ```
1966
-
1967
- Note that `branch` is typically one of:
1968
- - `prod`
1969
- - `user.bob`
1970
- - `test.my_experiment`
1971
- - `prod.staging`
1966
+ Specifies the times when the flow should be run when running on a
1967
+ production scheduler.
1972
1968
 
1973
1969
  Parameters
1974
1970
  ----------
1975
- flow : Union[str, Dict[str, str]], optional, default None
1976
- Upstream flow dependency for this flow.
1977
- flows : List[Union[str, Dict[str, str]]], default []
1978
- Upstream flow dependencies for this flow.
1979
- options : Dict[str, Any], default {}
1980
- Backend-specific configuration for tuning eventing behavior.
1981
-
1982
-
1971
+ hourly : bool, default False
1972
+ Run the workflow hourly.
1973
+ daily : bool, default True
1974
+ Run the workflow daily.
1975
+ weekly : bool, default False
1976
+ Run the workflow weekly.
1977
+ cron : str, optional, default None
1978
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1979
+ specified by this expression.
1980
+ timezone : str, optional, default None
1981
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1982
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1983
1983
  """
1984
1984
  ...
1985
1985