ob-metaflow-stubs 6.0.3.180rc0__py2.py3-none-any.whl → 6.0.3.180rc2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (220) hide show
  1. metaflow-stubs/__init__.pyi +798 -798
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +74 -74
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +2 -2
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  89. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  92. metaflow-stubs/multicore_utils.pyi +1 -1
  93. metaflow-stubs/ob_internal.pyi +1 -1
  94. metaflow-stubs/parameters.pyi +4 -4
  95. metaflow-stubs/plugins/__init__.pyi +8 -8
  96. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  97. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  98. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  99. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  100. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  101. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  102. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  103. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  104. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  105. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  106. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  107. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  109. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  110. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  111. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  112. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  113. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  114. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  115. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  116. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  117. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  118. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  119. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  120. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  121. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  122. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  123. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  124. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  125. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  126. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  128. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  129. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  130. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  131. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  132. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  133. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  134. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  135. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  136. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  137. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  138. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  139. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  140. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  141. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  142. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  143. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  144. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  145. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  146. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  147. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  148. metaflow-stubs/plugins/catch_decorator.pyi +1 -1
  149. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  150. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  151. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  152. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  153. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  154. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  155. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  156. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  157. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  158. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  159. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  160. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  161. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  162. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  163. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  164. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  165. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  166. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  167. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  168. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  169. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  170. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  171. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  172. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  173. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  174. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  175. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  176. metaflow-stubs/plugins/perimeters.pyi +1 -1
  177. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  178. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  179. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  180. metaflow-stubs/plugins/pypi/conda_environment.pyi +1 -1
  181. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  182. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  183. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  184. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  185. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  186. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  187. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  188. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +1 -1
  189. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  192. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  193. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  194. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  195. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  196. metaflow-stubs/plugins/uv/uv_environment.pyi +1 -1
  197. metaflow-stubs/profilers/__init__.pyi +1 -1
  198. metaflow-stubs/pylint_wrapper.pyi +1 -1
  199. metaflow-stubs/runner/__init__.pyi +1 -1
  200. metaflow-stubs/runner/deployer.pyi +29 -29
  201. metaflow-stubs/runner/deployer_impl.pyi +1 -1
  202. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  203. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  204. metaflow-stubs/runner/nbrun.pyi +1 -1
  205. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  206. metaflow-stubs/runner/utils.pyi +2 -2
  207. metaflow-stubs/system/__init__.pyi +1 -1
  208. metaflow-stubs/system/system_logger.pyi +1 -1
  209. metaflow-stubs/system/system_monitor.pyi +1 -1
  210. metaflow-stubs/tagging_util.pyi +1 -1
  211. metaflow-stubs/tuple_util.pyi +1 -1
  212. metaflow-stubs/user_configs/__init__.pyi +1 -1
  213. metaflow-stubs/user_configs/config_decorators.pyi +4 -4
  214. metaflow-stubs/user_configs/config_options.pyi +3 -3
  215. metaflow-stubs/user_configs/config_parameters.pyi +3 -3
  216. {ob_metaflow_stubs-6.0.3.180rc0.dist-info → ob_metaflow_stubs-6.0.3.180rc2.dist-info}/METADATA +1 -1
  217. ob_metaflow_stubs-6.0.3.180rc2.dist-info/RECORD +220 -0
  218. ob_metaflow_stubs-6.0.3.180rc0.dist-info/RECORD +0 -220
  219. {ob_metaflow_stubs-6.0.3.180rc0.dist-info → ob_metaflow_stubs-6.0.3.180rc2.dist-info}/WHEEL +0 -0
  220. {ob_metaflow_stubs-6.0.3.180rc0.dist-info → ob_metaflow_stubs-6.0.3.180rc2.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.17.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-06-17T08:34:56.575242 #
4
+ # Generated on 2025-06-17T09:53:19.248370 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -35,18 +35,18 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import events as events
39
38
  from . import tuple_util as tuple_util
40
39
  from . import cards as cards
40
+ from . import events as events
41
41
  from . import metaflow_git as metaflow_git
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
48
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
49
47
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
48
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
49
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
52
52
  from .client.core import get_namespace as get_namespace
@@ -155,28 +155,184 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
155
155
  """
156
156
  ...
157
157
 
158
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
158
+ @typing.overload
159
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
159
160
  """
160
- Decorator that helps cache, version and store models/datasets from huggingface hub.
161
+ Specifies environment variables to be set prior to the execution of a step.
161
162
 
162
163
 
163
164
  Parameters
164
165
  ----------
165
- temp_dir_root : str, optional
166
- The root directory that will hold the temporary directory where objects will be downloaded.
166
+ vars : Dict[str, str], default {}
167
+ Dictionary of environment variables to set.
168
+ """
169
+ ...
170
+
171
+ @typing.overload
172
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
173
+ ...
174
+
175
+ @typing.overload
176
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
177
+ ...
178
+
179
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
180
+ """
181
+ Specifies environment variables to be set prior to the execution of a step.
167
182
 
168
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
169
- The list of repos (models/datasets) to load.
170
183
 
171
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
184
+ Parameters
185
+ ----------
186
+ vars : Dict[str, str], default {}
187
+ Dictionary of environment variables to set.
188
+ """
189
+ ...
190
+
191
+ @typing.overload
192
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
193
+ """
194
+ Decorator prototype for all step decorators. This function gets specialized
195
+ and imported for all decorators types by _import_plugin_decorators().
196
+ """
197
+ ...
198
+
199
+ @typing.overload
200
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
201
+ ...
202
+
203
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
204
+ """
205
+ Decorator prototype for all step decorators. This function gets specialized
206
+ and imported for all decorators types by _import_plugin_decorators().
207
+ """
208
+ ...
209
+
210
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
211
+ """
212
+ Specifies that this step is used to deploy an instance of the app.
213
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
172
214
 
173
- - If repo (model/dataset) is not found in the datastore:
174
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
175
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
176
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
177
215
 
178
- - If repo is found in the datastore:
179
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
216
+ Parameters
217
+ ----------
218
+ app_port : int
219
+ Number of GPUs to use.
220
+ app_name : str
221
+ Name of the app to deploy.
222
+ """
223
+ ...
224
+
225
+ @typing.overload
226
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
227
+ """
228
+ Enables loading / saving of models within a step.
229
+
230
+
231
+
232
+ Parameters
233
+ ----------
234
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
235
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
236
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
237
+ - `current.checkpoint`
238
+ - `current.model`
239
+ - `current.huggingface_hub`
240
+
241
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
242
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
243
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
244
+
245
+ temp_dir_root : str, default: None
246
+ The root directory under which `current.model.loaded` will store loaded models
247
+ """
248
+ ...
249
+
250
+ @typing.overload
251
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
252
+ ...
253
+
254
+ @typing.overload
255
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
256
+ ...
257
+
258
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
259
+ """
260
+ Enables loading / saving of models within a step.
261
+
262
+
263
+
264
+ Parameters
265
+ ----------
266
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
267
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
268
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
269
+ - `current.checkpoint`
270
+ - `current.model`
271
+ - `current.huggingface_hub`
272
+
273
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
274
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
275
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
276
+
277
+ temp_dir_root : str, default: None
278
+ The root directory under which `current.model.loaded` will store loaded models
279
+ """
280
+ ...
281
+
282
+ @typing.overload
283
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
284
+ """
285
+ Enables checkpointing for a step.
286
+
287
+
288
+
289
+ Parameters
290
+ ----------
291
+ load_policy : str, default: "fresh"
292
+ The policy for loading the checkpoint. The following policies are supported:
293
+ - "eager": Loads the the latest available checkpoint within the namespace.
294
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
295
+ will be loaded at the start of the task.
296
+ - "none": Do not load any checkpoint
297
+ - "fresh": Loads the lastest checkpoint created within the running Task.
298
+ This mode helps loading checkpoints across various retry attempts of the same task.
299
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
300
+ created within the task will be loaded when the task is retries execution on failure.
301
+
302
+ temp_dir_root : str, default: None
303
+ The root directory under which `current.checkpoint.directory` will be created.
304
+ """
305
+ ...
306
+
307
+ @typing.overload
308
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
309
+ ...
310
+
311
+ @typing.overload
312
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
313
+ ...
314
+
315
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
316
+ """
317
+ Enables checkpointing for a step.
318
+
319
+
320
+
321
+ Parameters
322
+ ----------
323
+ load_policy : str, default: "fresh"
324
+ The policy for loading the checkpoint. The following policies are supported:
325
+ - "eager": Loads the the latest available checkpoint within the namespace.
326
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
327
+ will be loaded at the start of the task.
328
+ - "none": Do not load any checkpoint
329
+ - "fresh": Loads the lastest checkpoint created within the running Task.
330
+ This mode helps loading checkpoints across various retry attempts of the same task.
331
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
332
+ created within the task will be loaded when the task is retries execution on failure.
333
+
334
+ temp_dir_root : str, default: None
335
+ The root directory under which `current.checkpoint.directory` will be created.
180
336
  """
181
337
  ...
182
338
 
@@ -223,6 +379,61 @@ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy:
223
379
  """
224
380
  ...
225
381
 
382
+ def vllm(*, model: str, backend: str, debug: bool, kwargs: typing.Any) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
383
+ """
384
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
385
+
386
+ User code call
387
+ --------------
388
+ @vllm(
389
+ model="...",
390
+ ...
391
+ )
392
+
393
+ Valid backend options
394
+ ---------------------
395
+ - 'local': Run as a separate process on the local task machine.
396
+
397
+ Valid model options
398
+ -------------------
399
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
400
+
401
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
402
+ If you need multiple models, you must create multiple @vllm decorators.
403
+
404
+
405
+ Parameters
406
+ ----------
407
+ model: str
408
+ HuggingFace model identifier to be served by vLLM.
409
+ backend: str
410
+ Determines where and how to run the vLLM process.
411
+ debug: bool
412
+ Whether to turn on verbose debugging logs.
413
+ kwargs : Any
414
+ Any other keyword arguments are passed directly to the vLLM engine.
415
+ This allows for flexible configuration of vLLM server settings.
416
+ For example, `tensor_parallel_size=2`.
417
+ """
418
+ ...
419
+
420
+ @typing.overload
421
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
422
+ """
423
+ Internal decorator to support Fast bakery
424
+ """
425
+ ...
426
+
427
+ @typing.overload
428
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
429
+ ...
430
+
431
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
432
+ """
433
+ Internal decorator to support Fast bakery
434
+ """
435
+ ...
436
+
226
437
  @typing.overload
227
438
  def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
228
439
  """
@@ -259,38 +470,83 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
259
470
  ...
260
471
 
261
472
  @typing.overload
262
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
473
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
263
474
  """
264
- Specifies the number of times the task corresponding
265
- to a step needs to be retried.
266
-
267
- This decorator is useful for handling transient errors, such as networking issues.
268
- If your task contains operations that can't be retried safely, e.g. database updates,
269
- it is advisable to annotate it with `@retry(times=0)`.
475
+ Specifies that the step will success under all circumstances.
270
476
 
271
- This can be used in conjunction with the `@catch` decorator. The `@catch`
272
- decorator will execute a no-op task after all retries have been exhausted,
273
- ensuring that the flow execution can continue.
477
+ The decorator will create an optional artifact, specified by `var`, which
478
+ contains the exception raised. You can use it to detect the presence
479
+ of errors, indicating that all happy-path artifacts produced by the step
480
+ are missing.
274
481
 
275
482
 
276
483
  Parameters
277
484
  ----------
278
- times : int, default 3
279
- Number of times to retry this task.
280
- minutes_between_retries : int, default 2
281
- Number of minutes between retries.
485
+ var : str, optional, default None
486
+ Name of the artifact in which to store the caught exception.
487
+ If not specified, the exception is not stored.
488
+ print_exception : bool, default True
489
+ Determines whether or not the exception is printed to
490
+ stdout when caught.
282
491
  """
283
492
  ...
284
493
 
285
494
  @typing.overload
286
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
495
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
287
496
  ...
288
497
 
289
498
  @typing.overload
290
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
499
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
291
500
  ...
292
501
 
293
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
502
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
503
+ """
504
+ Specifies that the step will success under all circumstances.
505
+
506
+ The decorator will create an optional artifact, specified by `var`, which
507
+ contains the exception raised. You can use it to detect the presence
508
+ of errors, indicating that all happy-path artifacts produced by the step
509
+ are missing.
510
+
511
+
512
+ Parameters
513
+ ----------
514
+ var : str, optional, default None
515
+ Name of the artifact in which to store the caught exception.
516
+ If not specified, the exception is not stored.
517
+ print_exception : bool, default True
518
+ Determines whether or not the exception is printed to
519
+ stdout when caught.
520
+ """
521
+ ...
522
+
523
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
524
+ """
525
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
526
+
527
+
528
+ Parameters
529
+ ----------
530
+ temp_dir_root : str, optional
531
+ The root directory that will hold the temporary directory where objects will be downloaded.
532
+
533
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
534
+ The list of repos (models/datasets) to load.
535
+
536
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
537
+
538
+ - If repo (model/dataset) is not found in the datastore:
539
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
540
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
541
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
542
+
543
+ - If repo is found in the datastore:
544
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
545
+ """
546
+ ...
547
+
548
+ @typing.overload
549
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
294
550
  """
295
551
  Specifies the number of times the task corresponding
296
552
  to a step needs to be retried.
@@ -314,73 +570,33 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
314
570
  ...
315
571
 
316
572
  @typing.overload
317
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
318
- """
319
- Decorator prototype for all step decorators. This function gets specialized
320
- and imported for all decorators types by _import_plugin_decorators().
321
- """
573
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
322
574
  ...
323
575
 
324
576
  @typing.overload
325
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
326
- ...
327
-
328
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
329
- """
330
- Decorator prototype for all step decorators. This function gets specialized
331
- and imported for all decorators types by _import_plugin_decorators().
332
- """
577
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
333
578
  ...
334
579
 
335
- def vllm(*, model: str, backend: str, debug: bool, kwargs: typing.Any) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
580
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
336
581
  """
337
- This decorator is used to run vllm APIs as Metaflow task sidecars.
338
-
339
- User code call
340
- --------------
341
- @vllm(
342
- model="...",
343
- ...
344
- )
345
-
346
- Valid backend options
347
- ---------------------
348
- - 'local': Run as a separate process on the local task machine.
349
-
350
- Valid model options
351
- -------------------
352
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
353
-
354
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
355
- If you need multiple models, you must create multiple @vllm decorators.
582
+ Specifies the number of times the task corresponding
583
+ to a step needs to be retried.
356
584
 
585
+ This decorator is useful for handling transient errors, such as networking issues.
586
+ If your task contains operations that can't be retried safely, e.g. database updates,
587
+ it is advisable to annotate it with `@retry(times=0)`.
357
588
 
358
- Parameters
359
- ----------
360
- model: str
361
- HuggingFace model identifier to be served by vLLM.
362
- backend: str
363
- Determines where and how to run the vLLM process.
364
- debug: bool
365
- Whether to turn on verbose debugging logs.
366
- kwargs : Any
367
- Any other keyword arguments are passed directly to the vLLM engine.
368
- This allows for flexible configuration of vLLM server settings.
369
- For example, `tensor_parallel_size=2`.
370
- """
371
- ...
372
-
373
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
374
- """
375
- Specifies that this step should execute on DGX cloud.
589
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
590
+ decorator will execute a no-op task after all retries have been exhausted,
591
+ ensuring that the flow execution can continue.
376
592
 
377
593
 
378
594
  Parameters
379
595
  ----------
380
- gpu : int
381
- Number of GPUs to use.
382
- gpu_type : str
383
- Type of Nvidia GPU to use.
596
+ times : int, default 3
597
+ Number of times to retry this task.
598
+ minutes_between_retries : int, default 2
599
+ Number of minutes between retries.
384
600
  """
385
601
  ...
386
602
 
@@ -401,215 +617,87 @@ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[t
401
617
  ...
402
618
 
403
619
  @typing.overload
404
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
620
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
405
621
  """
406
- Specifies that the step will success under all circumstances.
622
+ Specifies the resources needed when executing this step.
407
623
 
408
- The decorator will create an optional artifact, specified by `var`, which
409
- contains the exception raised. You can use it to detect the presence
410
- of errors, indicating that all happy-path artifacts produced by the step
411
- are missing.
624
+ Use `@resources` to specify the resource requirements
625
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
626
+
627
+ You can choose the compute layer on the command line by executing e.g.
628
+ ```
629
+ python myflow.py run --with batch
630
+ ```
631
+ or
632
+ ```
633
+ python myflow.py run --with kubernetes
634
+ ```
635
+ which executes the flow on the desired system using the
636
+ requirements specified in `@resources`.
412
637
 
413
638
 
414
639
  Parameters
415
640
  ----------
416
- var : str, optional, default None
417
- Name of the artifact in which to store the caught exception.
418
- If not specified, the exception is not stored.
419
- print_exception : bool, default True
420
- Determines whether or not the exception is printed to
421
- stdout when caught.
641
+ cpu : int, default 1
642
+ Number of CPUs required for this step.
643
+ gpu : int, optional, default None
644
+ Number of GPUs required for this step.
645
+ disk : int, optional, default None
646
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
647
+ memory : int, default 4096
648
+ Memory size (in MB) required for this step.
649
+ shared_memory : int, optional, default None
650
+ The value for the size (in MiB) of the /dev/shm volume for this step.
651
+ This parameter maps to the `--shm-size` option in Docker.
422
652
  """
423
653
  ...
424
654
 
425
655
  @typing.overload
426
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
656
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
427
657
  ...
428
658
 
429
659
  @typing.overload
430
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
660
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
431
661
  ...
432
662
 
433
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
663
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
434
664
  """
435
- Specifies that the step will success under all circumstances.
436
-
437
- The decorator will create an optional artifact, specified by `var`, which
438
- contains the exception raised. You can use it to detect the presence
439
- of errors, indicating that all happy-path artifacts produced by the step
440
- are missing.
441
-
665
+ Specifies the resources needed when executing this step.
442
666
 
443
- Parameters
444
- ----------
445
- var : str, optional, default None
446
- Name of the artifact in which to store the caught exception.
447
- If not specified, the exception is not stored.
448
- print_exception : bool, default True
449
- Determines whether or not the exception is printed to
450
- stdout when caught.
451
- """
452
- ...
453
-
454
- @typing.overload
455
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
456
- """
457
- Specifies the PyPI packages for the step.
667
+ Use `@resources` to specify the resource requirements
668
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
458
669
 
459
- Information in this decorator will augment any
460
- attributes set in the `@pyi_base` flow-level decorator. Hence,
461
- you can use `@pypi_base` to set packages required by all
462
- steps and use `@pypi` to specify step-specific overrides.
670
+ You can choose the compute layer on the command line by executing e.g.
671
+ ```
672
+ python myflow.py run --with batch
673
+ ```
674
+ or
675
+ ```
676
+ python myflow.py run --with kubernetes
677
+ ```
678
+ which executes the flow on the desired system using the
679
+ requirements specified in `@resources`.
463
680
 
464
681
 
465
682
  Parameters
466
683
  ----------
467
- packages : Dict[str, str], default: {}
468
- Packages to use for this step. The key is the name of the package
469
- and the value is the version to use.
470
- python : str, optional, default: None
471
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
472
- that the version used will correspond to the version of the Python interpreter used to start the run.
684
+ cpu : int, default 1
685
+ Number of CPUs required for this step.
686
+ gpu : int, optional, default None
687
+ Number of GPUs required for this step.
688
+ disk : int, optional, default None
689
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
690
+ memory : int, default 4096
691
+ Memory size (in MB) required for this step.
692
+ shared_memory : int, optional, default None
693
+ The value for the size (in MiB) of the /dev/shm volume for this step.
694
+ This parameter maps to the `--shm-size` option in Docker.
473
695
  """
474
696
  ...
475
697
 
476
- @typing.overload
477
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
478
- ...
479
-
480
- @typing.overload
481
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
482
- ...
483
-
484
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
698
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
485
699
  """
486
- Specifies the PyPI packages for the step.
487
-
488
- Information in this decorator will augment any
489
- attributes set in the `@pyi_base` flow-level decorator. Hence,
490
- you can use `@pypi_base` to set packages required by all
491
- steps and use `@pypi` to specify step-specific overrides.
492
-
493
-
494
- Parameters
495
- ----------
496
- packages : Dict[str, str], default: {}
497
- Packages to use for this step. The key is the name of the package
498
- and the value is the version to use.
499
- python : str, optional, default: None
500
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
501
- that the version used will correspond to the version of the Python interpreter used to start the run.
502
- """
503
- ...
504
-
505
- @typing.overload
506
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
507
- """
508
- Enables loading / saving of models within a step.
509
-
510
-
511
-
512
- Parameters
513
- ----------
514
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
515
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
516
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
517
- - `current.checkpoint`
518
- - `current.model`
519
- - `current.huggingface_hub`
520
-
521
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
522
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
523
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
524
-
525
- temp_dir_root : str, default: None
526
- The root directory under which `current.model.loaded` will store loaded models
527
- """
528
- ...
529
-
530
- @typing.overload
531
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
532
- ...
533
-
534
- @typing.overload
535
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
536
- ...
537
-
538
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
539
- """
540
- Enables loading / saving of models within a step.
541
-
542
-
543
-
544
- Parameters
545
- ----------
546
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
547
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
548
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
549
- - `current.checkpoint`
550
- - `current.model`
551
- - `current.huggingface_hub`
552
-
553
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
554
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
555
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
556
-
557
- temp_dir_root : str, default: None
558
- The root directory under which `current.model.loaded` will store loaded models
559
- """
560
- ...
561
-
562
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
563
- """
564
- Specifies that this step is used to deploy an instance of the app.
565
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
566
-
567
-
568
- Parameters
569
- ----------
570
- app_port : int
571
- Number of GPUs to use.
572
- app_name : str
573
- Name of the app to deploy.
574
- """
575
- ...
576
-
577
- @typing.overload
578
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
579
- """
580
- Specifies environment variables to be set prior to the execution of a step.
581
-
582
-
583
- Parameters
584
- ----------
585
- vars : Dict[str, str], default {}
586
- Dictionary of environment variables to set.
587
- """
588
- ...
589
-
590
- @typing.overload
591
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
592
- ...
593
-
594
- @typing.overload
595
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
596
- ...
597
-
598
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
599
- """
600
- Specifies environment variables to be set prior to the execution of a step.
601
-
602
-
603
- Parameters
604
- ----------
605
- vars : Dict[str, str], default {}
606
- Dictionary of environment variables to set.
607
- """
608
- ...
609
-
610
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
611
- """
612
- Specifies that this step should execute on Kubernetes.
700
+ Specifies that this step should execute on Kubernetes.
613
701
 
614
702
 
615
703
  Parameters
@@ -697,118 +785,53 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
697
785
  ...
698
786
 
699
787
  @typing.overload
700
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
701
- """
702
- Specifies a timeout for your step.
703
-
704
- This decorator is useful if this step may hang indefinitely.
705
-
706
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
707
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
708
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
709
-
710
- Note that all the values specified in parameters are added together so if you specify
711
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
712
-
713
-
714
- Parameters
715
- ----------
716
- seconds : int, default 0
717
- Number of seconds to wait prior to timing out.
718
- minutes : int, default 0
719
- Number of minutes to wait prior to timing out.
720
- hours : int, default 0
721
- Number of hours to wait prior to timing out.
722
- """
723
- ...
724
-
725
- @typing.overload
726
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
727
- ...
728
-
729
- @typing.overload
730
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
731
- ...
732
-
733
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
734
- """
735
- Specifies a timeout for your step.
736
-
737
- This decorator is useful if this step may hang indefinitely.
738
-
739
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
740
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
741
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
742
-
743
- Note that all the values specified in parameters are added together so if you specify
744
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
745
-
746
-
747
- Parameters
748
- ----------
749
- seconds : int, default 0
750
- Number of seconds to wait prior to timing out.
751
- minutes : int, default 0
752
- Number of minutes to wait prior to timing out.
753
- hours : int, default 0
754
- Number of hours to wait prior to timing out.
755
- """
756
- ...
757
-
758
- @typing.overload
759
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
788
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
760
789
  """
761
- Enables checkpointing for a step.
790
+ Specifies the PyPI packages for the step.
762
791
 
792
+ Information in this decorator will augment any
793
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
794
+ you can use `@pypi_base` to set packages required by all
795
+ steps and use `@pypi` to specify step-specific overrides.
763
796
 
764
797
 
765
798
  Parameters
766
799
  ----------
767
- load_policy : str, default: "fresh"
768
- The policy for loading the checkpoint. The following policies are supported:
769
- - "eager": Loads the the latest available checkpoint within the namespace.
770
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
771
- will be loaded at the start of the task.
772
- - "none": Do not load any checkpoint
773
- - "fresh": Loads the lastest checkpoint created within the running Task.
774
- This mode helps loading checkpoints across various retry attempts of the same task.
775
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
776
- created within the task will be loaded when the task is retries execution on failure.
777
-
778
- temp_dir_root : str, default: None
779
- The root directory under which `current.checkpoint.directory` will be created.
800
+ packages : Dict[str, str], default: {}
801
+ Packages to use for this step. The key is the name of the package
802
+ and the value is the version to use.
803
+ python : str, optional, default: None
804
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
805
+ that the version used will correspond to the version of the Python interpreter used to start the run.
780
806
  """
781
807
  ...
782
808
 
783
809
  @typing.overload
784
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
810
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
785
811
  ...
786
812
 
787
813
  @typing.overload
788
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
814
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
789
815
  ...
790
816
 
791
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
817
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
792
818
  """
793
- Enables checkpointing for a step.
819
+ Specifies the PyPI packages for the step.
794
820
 
821
+ Information in this decorator will augment any
822
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
823
+ you can use `@pypi_base` to set packages required by all
824
+ steps and use `@pypi` to specify step-specific overrides.
795
825
 
796
826
 
797
827
  Parameters
798
828
  ----------
799
- load_policy : str, default: "fresh"
800
- The policy for loading the checkpoint. The following policies are supported:
801
- - "eager": Loads the the latest available checkpoint within the namespace.
802
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
803
- will be loaded at the start of the task.
804
- - "none": Do not load any checkpoint
805
- - "fresh": Loads the lastest checkpoint created within the running Task.
806
- This mode helps loading checkpoints across various retry attempts of the same task.
807
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
808
- created within the task will be loaded when the task is retries execution on failure.
809
-
810
- temp_dir_root : str, default: None
811
- The root directory under which `current.checkpoint.directory` will be created.
829
+ packages : Dict[str, str], default: {}
830
+ Packages to use for this step. The key is the name of the package
831
+ and the value is the version to use.
832
+ python : str, optional, default: None
833
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
834
+ that the version used will correspond to the version of the Python interpreter used to start the run.
812
835
  """
813
836
  ...
814
837
 
@@ -862,110 +885,14 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
862
885
  ...
863
886
 
864
887
  @typing.overload
865
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
888
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
866
889
  """
867
- Specifies the resources needed when executing this step.
890
+ Specifies the Conda environment for the step.
868
891
 
869
- Use `@resources` to specify the resource requirements
870
- independently of the specific compute layer (`@batch`, `@kubernetes`).
871
-
872
- You can choose the compute layer on the command line by executing e.g.
873
- ```
874
- python myflow.py run --with batch
875
- ```
876
- or
877
- ```
878
- python myflow.py run --with kubernetes
879
- ```
880
- which executes the flow on the desired system using the
881
- requirements specified in `@resources`.
882
-
883
-
884
- Parameters
885
- ----------
886
- cpu : int, default 1
887
- Number of CPUs required for this step.
888
- gpu : int, optional, default None
889
- Number of GPUs required for this step.
890
- disk : int, optional, default None
891
- Disk size (in MB) required for this step. Only applies on Kubernetes.
892
- memory : int, default 4096
893
- Memory size (in MB) required for this step.
894
- shared_memory : int, optional, default None
895
- The value for the size (in MiB) of the /dev/shm volume for this step.
896
- This parameter maps to the `--shm-size` option in Docker.
897
- """
898
- ...
899
-
900
- @typing.overload
901
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
902
- ...
903
-
904
- @typing.overload
905
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
906
- ...
907
-
908
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
909
- """
910
- Specifies the resources needed when executing this step.
911
-
912
- Use `@resources` to specify the resource requirements
913
- independently of the specific compute layer (`@batch`, `@kubernetes`).
914
-
915
- You can choose the compute layer on the command line by executing e.g.
916
- ```
917
- python myflow.py run --with batch
918
- ```
919
- or
920
- ```
921
- python myflow.py run --with kubernetes
922
- ```
923
- which executes the flow on the desired system using the
924
- requirements specified in `@resources`.
925
-
926
-
927
- Parameters
928
- ----------
929
- cpu : int, default 1
930
- Number of CPUs required for this step.
931
- gpu : int, optional, default None
932
- Number of GPUs required for this step.
933
- disk : int, optional, default None
934
- Disk size (in MB) required for this step. Only applies on Kubernetes.
935
- memory : int, default 4096
936
- Memory size (in MB) required for this step.
937
- shared_memory : int, optional, default None
938
- The value for the size (in MiB) of the /dev/shm volume for this step.
939
- This parameter maps to the `--shm-size` option in Docker.
940
- """
941
- ...
942
-
943
- @typing.overload
944
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
945
- """
946
- Internal decorator to support Fast bakery
947
- """
948
- ...
949
-
950
- @typing.overload
951
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
952
- ...
953
-
954
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
955
- """
956
- Internal decorator to support Fast bakery
957
- """
958
- ...
959
-
960
- @typing.overload
961
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
962
- """
963
- Specifies the Conda environment for the step.
964
-
965
- Information in this decorator will augment any
966
- attributes set in the `@conda_base` flow-level decorator. Hence,
967
- you can use `@conda_base` to set packages required by all
968
- steps and use `@conda` to specify step-specific overrides.
892
+ Information in this decorator will augment any
893
+ attributes set in the `@conda_base` flow-level decorator. Hence,
894
+ you can use `@conda_base` to set packages required by all
895
+ steps and use `@conda` to specify step-specific overrides.
969
896
 
970
897
 
971
898
  Parameters
@@ -1016,145 +943,76 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1016
943
  """
1017
944
  ...
1018
945
 
1019
- @typing.overload
1020
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1021
- """
1022
- Specifies the PyPI packages for all steps of the flow.
1023
-
1024
- Use `@pypi_base` to set common packages required by all
1025
- steps and use `@pypi` to specify step-specific overrides.
1026
-
1027
- Parameters
1028
- ----------
1029
- packages : Dict[str, str], default: {}
1030
- Packages to use for this flow. The key is the name of the package
1031
- and the value is the version to use.
1032
- python : str, optional, default: None
1033
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1034
- that the version used will correspond to the version of the Python interpreter used to start the run.
1035
- """
1036
- ...
1037
-
1038
- @typing.overload
1039
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1040
- ...
1041
-
1042
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
946
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1043
947
  """
1044
- Specifies the PyPI packages for all steps of the flow.
948
+ Specifies that this step should execute on DGX cloud.
1045
949
 
1046
- Use `@pypi_base` to set common packages required by all
1047
- steps and use `@pypi` to specify step-specific overrides.
1048
950
 
1049
951
  Parameters
1050
952
  ----------
1051
- packages : Dict[str, str], default: {}
1052
- Packages to use for this flow. The key is the name of the package
1053
- and the value is the version to use.
1054
- python : str, optional, default: None
1055
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1056
- that the version used will correspond to the version of the Python interpreter used to start the run.
953
+ gpu : int
954
+ Number of GPUs to use.
955
+ gpu_type : str
956
+ Type of Nvidia GPU to use.
1057
957
  """
1058
958
  ...
1059
959
 
1060
960
  @typing.overload
1061
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
961
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1062
962
  """
1063
- Specifies the flow(s) that this flow depends on.
1064
-
1065
- ```
1066
- @trigger_on_finish(flow='FooFlow')
1067
- ```
1068
- or
1069
- ```
1070
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1071
- ```
1072
- This decorator respects the @project decorator and triggers the flow
1073
- when upstream runs within the same namespace complete successfully
963
+ Specifies a timeout for your step.
1074
964
 
1075
- Additionally, you can specify project aware upstream flow dependencies
1076
- by specifying the fully qualified project_flow_name.
1077
- ```
1078
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1079
- ```
1080
- or
1081
- ```
1082
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1083
- ```
965
+ This decorator is useful if this step may hang indefinitely.
1084
966
 
1085
- You can also specify just the project or project branch (other values will be
1086
- inferred from the current project or project branch):
1087
- ```
1088
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1089
- ```
967
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
968
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
969
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1090
970
 
1091
- Note that `branch` is typically one of:
1092
- - `prod`
1093
- - `user.bob`
1094
- - `test.my_experiment`
1095
- - `prod.staging`
971
+ Note that all the values specified in parameters are added together so if you specify
972
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1096
973
 
1097
974
 
1098
975
  Parameters
1099
976
  ----------
1100
- flow : Union[str, Dict[str, str]], optional, default None
1101
- Upstream flow dependency for this flow.
1102
- flows : List[Union[str, Dict[str, str]]], default []
1103
- Upstream flow dependencies for this flow.
1104
- options : Dict[str, Any], default {}
1105
- Backend-specific configuration for tuning eventing behavior.
977
+ seconds : int, default 0
978
+ Number of seconds to wait prior to timing out.
979
+ minutes : int, default 0
980
+ Number of minutes to wait prior to timing out.
981
+ hours : int, default 0
982
+ Number of hours to wait prior to timing out.
1106
983
  """
1107
984
  ...
1108
985
 
1109
986
  @typing.overload
1110
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
987
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1111
988
  ...
1112
989
 
1113
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
990
+ @typing.overload
991
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
992
+ ...
993
+
994
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1114
995
  """
1115
- Specifies the flow(s) that this flow depends on.
1116
-
1117
- ```
1118
- @trigger_on_finish(flow='FooFlow')
1119
- ```
1120
- or
1121
- ```
1122
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1123
- ```
1124
- This decorator respects the @project decorator and triggers the flow
1125
- when upstream runs within the same namespace complete successfully
996
+ Specifies a timeout for your step.
1126
997
 
1127
- Additionally, you can specify project aware upstream flow dependencies
1128
- by specifying the fully qualified project_flow_name.
1129
- ```
1130
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1131
- ```
1132
- or
1133
- ```
1134
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1135
- ```
998
+ This decorator is useful if this step may hang indefinitely.
1136
999
 
1137
- You can also specify just the project or project branch (other values will be
1138
- inferred from the current project or project branch):
1139
- ```
1140
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1141
- ```
1000
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1001
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1002
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1142
1003
 
1143
- Note that `branch` is typically one of:
1144
- - `prod`
1145
- - `user.bob`
1146
- - `test.my_experiment`
1147
- - `prod.staging`
1004
+ Note that all the values specified in parameters are added together so if you specify
1005
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1148
1006
 
1149
1007
 
1150
1008
  Parameters
1151
1009
  ----------
1152
- flow : Union[str, Dict[str, str]], optional, default None
1153
- Upstream flow dependency for this flow.
1154
- flows : List[Union[str, Dict[str, str]]], default []
1155
- Upstream flow dependencies for this flow.
1156
- options : Dict[str, Any], default {}
1157
- Backend-specific configuration for tuning eventing behavior.
1010
+ seconds : int, default 0
1011
+ Number of seconds to wait prior to timing out.
1012
+ minutes : int, default 0
1013
+ Number of minutes to wait prior to timing out.
1014
+ hours : int, default 0
1015
+ Number of hours to wait prior to timing out.
1158
1016
  """
1159
1017
  ...
1160
1018
 
@@ -1210,224 +1068,43 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1210
1068
  ...
1211
1069
 
1212
1070
  @typing.overload
1213
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1071
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1214
1072
  """
1215
- Specifies the event(s) that this flow depends on.
1216
-
1217
- ```
1218
- @trigger(event='foo')
1219
- ```
1220
- or
1221
- ```
1222
- @trigger(events=['foo', 'bar'])
1223
- ```
1224
-
1225
- Additionally, you can specify the parameter mappings
1226
- to map event payload to Metaflow parameters for the flow.
1227
- ```
1228
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1229
- ```
1230
- or
1231
- ```
1232
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1233
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1234
- ```
1235
-
1236
- 'parameters' can also be a list of strings and tuples like so:
1237
- ```
1238
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1239
- ```
1240
- This is equivalent to:
1241
- ```
1242
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1243
- ```
1244
-
1245
-
1246
- Parameters
1247
- ----------
1248
- event : Union[str, Dict[str, Any]], optional, default None
1249
- Event dependency for this flow.
1250
- events : List[Union[str, Dict[str, Any]]], default []
1251
- Events dependency for this flow.
1252
- options : Dict[str, Any], default {}
1253
- Backend-specific configuration for tuning eventing behavior.
1254
- """
1255
- ...
1256
-
1257
- @typing.overload
1258
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1259
- ...
1260
-
1261
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1262
- """
1263
- Specifies the event(s) that this flow depends on.
1264
-
1265
- ```
1266
- @trigger(event='foo')
1267
- ```
1268
- or
1269
- ```
1270
- @trigger(events=['foo', 'bar'])
1271
- ```
1272
-
1273
- Additionally, you can specify the parameter mappings
1274
- to map event payload to Metaflow parameters for the flow.
1275
- ```
1276
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1277
- ```
1278
- or
1279
- ```
1280
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1281
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1282
- ```
1283
-
1284
- 'parameters' can also be a list of strings and tuples like so:
1285
- ```
1286
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1287
- ```
1288
- This is equivalent to:
1289
- ```
1290
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1291
- ```
1292
-
1293
-
1294
- Parameters
1295
- ----------
1296
- event : Union[str, Dict[str, Any]], optional, default None
1297
- Event dependency for this flow.
1298
- events : List[Union[str, Dict[str, Any]]], default []
1299
- Events dependency for this flow.
1300
- options : Dict[str, Any], default {}
1301
- Backend-specific configuration for tuning eventing behavior.
1302
- """
1303
- ...
1304
-
1305
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1306
- """
1307
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1308
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1309
-
1310
-
1311
- Parameters
1312
- ----------
1313
- timeout : int
1314
- Time, in seconds before the task times out and fails. (Default: 3600)
1315
- poke_interval : int
1316
- Time in seconds that the job should wait in between each try. (Default: 60)
1317
- mode : str
1318
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1319
- exponential_backoff : bool
1320
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1321
- pool : str
1322
- the slot pool this task should run in,
1323
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1324
- soft_fail : bool
1325
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1326
- name : str
1327
- Name of the sensor on Airflow
1328
- description : str
1329
- Description of sensor in the Airflow UI
1330
- external_dag_id : str
1331
- The dag_id that contains the task you want to wait for.
1332
- external_task_ids : List[str]
1333
- The list of task_ids that you want to wait for.
1334
- If None (default value) the sensor waits for the DAG. (Default: None)
1335
- allowed_states : List[str]
1336
- Iterable of allowed states, (Default: ['success'])
1337
- failed_states : List[str]
1338
- Iterable of failed or dis-allowed states. (Default: None)
1339
- execution_delta : datetime.timedelta
1340
- time difference with the previous execution to look at,
1341
- the default is the same logical date as the current task or DAG. (Default: None)
1342
- check_existence: bool
1343
- Set to True to check if the external task exists or check if
1344
- the DAG to wait for exists. (Default: True)
1345
- """
1346
- ...
1347
-
1348
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1349
- """
1350
- Specifies what flows belong to the same project.
1351
-
1352
- A project-specific namespace is created for all flows that
1353
- use the same `@project(name)`.
1354
-
1355
-
1356
- Parameters
1357
- ----------
1358
- name : str
1359
- Project name. Make sure that the name is unique amongst all
1360
- projects that use the same production scheduler. The name may
1361
- contain only lowercase alphanumeric characters and underscores.
1362
-
1363
- branch : Optional[str], default None
1364
- The branch to use. If not specified, the branch is set to
1365
- `user.<username>` unless `production` is set to `True`. This can
1366
- also be set on the command line using `--branch` as a top-level option.
1367
- It is an error to specify `branch` in the decorator and on the command line.
1368
-
1369
- production : bool, default False
1370
- Whether or not the branch is the production branch. This can also be set on the
1371
- command line using `--production` as a top-level option. It is an error to specify
1372
- `production` in the decorator and on the command line.
1373
- The project branch name will be:
1374
- - if `branch` is specified:
1375
- - if `production` is True: `prod.<branch>`
1376
- - if `production` is False: `test.<branch>`
1377
- - if `branch` is not specified:
1378
- - if `production` is True: `prod`
1379
- - if `production` is False: `user.<username>`
1380
- """
1381
- ...
1382
-
1383
- @typing.overload
1384
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1385
- """
1386
- Specifies the times when the flow should be run when running on a
1387
- production scheduler.
1073
+ Specifies the PyPI packages for all steps of the flow.
1388
1074
 
1075
+ Use `@pypi_base` to set common packages required by all
1076
+ steps and use `@pypi` to specify step-specific overrides.
1389
1077
 
1390
1078
  Parameters
1391
1079
  ----------
1392
- hourly : bool, default False
1393
- Run the workflow hourly.
1394
- daily : bool, default True
1395
- Run the workflow daily.
1396
- weekly : bool, default False
1397
- Run the workflow weekly.
1398
- cron : str, optional, default None
1399
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1400
- specified by this expression.
1401
- timezone : str, optional, default None
1402
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1403
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1080
+ packages : Dict[str, str], default: {}
1081
+ Packages to use for this flow. The key is the name of the package
1082
+ and the value is the version to use.
1083
+ python : str, optional, default: None
1084
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1085
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1404
1086
  """
1405
1087
  ...
1406
1088
 
1407
1089
  @typing.overload
1408
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1090
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1409
1091
  ...
1410
1092
 
1411
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1093
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1412
1094
  """
1413
- Specifies the times when the flow should be run when running on a
1414
- production scheduler.
1095
+ Specifies the PyPI packages for all steps of the flow.
1415
1096
 
1097
+ Use `@pypi_base` to set common packages required by all
1098
+ steps and use `@pypi` to specify step-specific overrides.
1416
1099
 
1417
1100
  Parameters
1418
1101
  ----------
1419
- hourly : bool, default False
1420
- Run the workflow hourly.
1421
- daily : bool, default True
1422
- Run the workflow daily.
1423
- weekly : bool, default False
1424
- Run the workflow weekly.
1425
- cron : str, optional, default None
1426
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1427
- specified by this expression.
1428
- timezone : str, optional, default None
1429
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1430
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1102
+ packages : Dict[str, str], default: {}
1103
+ Packages to use for this flow. The key is the name of the package
1104
+ and the value is the version to use.
1105
+ python : str, optional, default: None
1106
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1107
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1431
1108
  """
1432
1109
  ...
1433
1110
 
@@ -1545,33 +1222,177 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1545
1222
  """
1546
1223
  ...
1547
1224
 
1548
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1225
+ @typing.overload
1226
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1549
1227
  """
1550
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1551
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1552
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1553
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1554
- starts only after all sensors finish.
1228
+ Specifies the event(s) that this flow depends on.
1229
+
1230
+ ```
1231
+ @trigger(event='foo')
1232
+ ```
1233
+ or
1234
+ ```
1235
+ @trigger(events=['foo', 'bar'])
1236
+ ```
1237
+
1238
+ Additionally, you can specify the parameter mappings
1239
+ to map event payload to Metaflow parameters for the flow.
1240
+ ```
1241
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1242
+ ```
1243
+ or
1244
+ ```
1245
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1246
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1247
+ ```
1248
+
1249
+ 'parameters' can also be a list of strings and tuples like so:
1250
+ ```
1251
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1252
+ ```
1253
+ This is equivalent to:
1254
+ ```
1255
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1256
+ ```
1555
1257
 
1556
1258
 
1557
1259
  Parameters
1558
1260
  ----------
1559
- timeout : int
1560
- Time, in seconds before the task times out and fails. (Default: 3600)
1561
- poke_interval : int
1562
- Time in seconds that the job should wait in between each try. (Default: 60)
1563
- mode : str
1564
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1565
- exponential_backoff : bool
1566
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1567
- pool : str
1568
- the slot pool this task should run in,
1569
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1570
- soft_fail : bool
1571
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1572
- name : str
1573
- Name of the sensor on Airflow
1574
- description : str
1261
+ event : Union[str, Dict[str, Any]], optional, default None
1262
+ Event dependency for this flow.
1263
+ events : List[Union[str, Dict[str, Any]]], default []
1264
+ Events dependency for this flow.
1265
+ options : Dict[str, Any], default {}
1266
+ Backend-specific configuration for tuning eventing behavior.
1267
+ """
1268
+ ...
1269
+
1270
+ @typing.overload
1271
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1272
+ ...
1273
+
1274
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1275
+ """
1276
+ Specifies the event(s) that this flow depends on.
1277
+
1278
+ ```
1279
+ @trigger(event='foo')
1280
+ ```
1281
+ or
1282
+ ```
1283
+ @trigger(events=['foo', 'bar'])
1284
+ ```
1285
+
1286
+ Additionally, you can specify the parameter mappings
1287
+ to map event payload to Metaflow parameters for the flow.
1288
+ ```
1289
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1290
+ ```
1291
+ or
1292
+ ```
1293
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1294
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1295
+ ```
1296
+
1297
+ 'parameters' can also be a list of strings and tuples like so:
1298
+ ```
1299
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1300
+ ```
1301
+ This is equivalent to:
1302
+ ```
1303
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1304
+ ```
1305
+
1306
+
1307
+ Parameters
1308
+ ----------
1309
+ event : Union[str, Dict[str, Any]], optional, default None
1310
+ Event dependency for this flow.
1311
+ events : List[Union[str, Dict[str, Any]]], default []
1312
+ Events dependency for this flow.
1313
+ options : Dict[str, Any], default {}
1314
+ Backend-specific configuration for tuning eventing behavior.
1315
+ """
1316
+ ...
1317
+
1318
+ @typing.overload
1319
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1320
+ """
1321
+ Specifies the times when the flow should be run when running on a
1322
+ production scheduler.
1323
+
1324
+
1325
+ Parameters
1326
+ ----------
1327
+ hourly : bool, default False
1328
+ Run the workflow hourly.
1329
+ daily : bool, default True
1330
+ Run the workflow daily.
1331
+ weekly : bool, default False
1332
+ Run the workflow weekly.
1333
+ cron : str, optional, default None
1334
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1335
+ specified by this expression.
1336
+ timezone : str, optional, default None
1337
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1338
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1339
+ """
1340
+ ...
1341
+
1342
+ @typing.overload
1343
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1344
+ ...
1345
+
1346
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1347
+ """
1348
+ Specifies the times when the flow should be run when running on a
1349
+ production scheduler.
1350
+
1351
+
1352
+ Parameters
1353
+ ----------
1354
+ hourly : bool, default False
1355
+ Run the workflow hourly.
1356
+ daily : bool, default True
1357
+ Run the workflow daily.
1358
+ weekly : bool, default False
1359
+ Run the workflow weekly.
1360
+ cron : str, optional, default None
1361
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1362
+ specified by this expression.
1363
+ timezone : str, optional, default None
1364
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1365
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1366
+ """
1367
+ ...
1368
+
1369
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1370
+ """
1371
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1372
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1373
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1374
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1375
+ starts only after all sensors finish.
1376
+
1377
+
1378
+ Parameters
1379
+ ----------
1380
+ timeout : int
1381
+ Time, in seconds before the task times out and fails. (Default: 3600)
1382
+ poke_interval : int
1383
+ Time in seconds that the job should wait in between each try. (Default: 60)
1384
+ mode : str
1385
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1386
+ exponential_backoff : bool
1387
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1388
+ pool : str
1389
+ the slot pool this task should run in,
1390
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1391
+ soft_fail : bool
1392
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1393
+ name : str
1394
+ Name of the sensor on Airflow
1395
+ description : str
1575
1396
  Description of sensor in the Airflow UI
1576
1397
  bucket_key : Union[str, List[str]]
1577
1398
  The key(s) being waited on. Supports full s3:// style url or relative path from root level.
@@ -1588,5 +1409,184 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1588
1409
  """
1589
1410
  ...
1590
1411
 
1412
+ @typing.overload
1413
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1414
+ """
1415
+ Specifies the flow(s) that this flow depends on.
1416
+
1417
+ ```
1418
+ @trigger_on_finish(flow='FooFlow')
1419
+ ```
1420
+ or
1421
+ ```
1422
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1423
+ ```
1424
+ This decorator respects the @project decorator and triggers the flow
1425
+ when upstream runs within the same namespace complete successfully
1426
+
1427
+ Additionally, you can specify project aware upstream flow dependencies
1428
+ by specifying the fully qualified project_flow_name.
1429
+ ```
1430
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1431
+ ```
1432
+ or
1433
+ ```
1434
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1435
+ ```
1436
+
1437
+ You can also specify just the project or project branch (other values will be
1438
+ inferred from the current project or project branch):
1439
+ ```
1440
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1441
+ ```
1442
+
1443
+ Note that `branch` is typically one of:
1444
+ - `prod`
1445
+ - `user.bob`
1446
+ - `test.my_experiment`
1447
+ - `prod.staging`
1448
+
1449
+
1450
+ Parameters
1451
+ ----------
1452
+ flow : Union[str, Dict[str, str]], optional, default None
1453
+ Upstream flow dependency for this flow.
1454
+ flows : List[Union[str, Dict[str, str]]], default []
1455
+ Upstream flow dependencies for this flow.
1456
+ options : Dict[str, Any], default {}
1457
+ Backend-specific configuration for tuning eventing behavior.
1458
+ """
1459
+ ...
1460
+
1461
+ @typing.overload
1462
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1463
+ ...
1464
+
1465
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1466
+ """
1467
+ Specifies the flow(s) that this flow depends on.
1468
+
1469
+ ```
1470
+ @trigger_on_finish(flow='FooFlow')
1471
+ ```
1472
+ or
1473
+ ```
1474
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1475
+ ```
1476
+ This decorator respects the @project decorator and triggers the flow
1477
+ when upstream runs within the same namespace complete successfully
1478
+
1479
+ Additionally, you can specify project aware upstream flow dependencies
1480
+ by specifying the fully qualified project_flow_name.
1481
+ ```
1482
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1483
+ ```
1484
+ or
1485
+ ```
1486
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1487
+ ```
1488
+
1489
+ You can also specify just the project or project branch (other values will be
1490
+ inferred from the current project or project branch):
1491
+ ```
1492
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1493
+ ```
1494
+
1495
+ Note that `branch` is typically one of:
1496
+ - `prod`
1497
+ - `user.bob`
1498
+ - `test.my_experiment`
1499
+ - `prod.staging`
1500
+
1501
+
1502
+ Parameters
1503
+ ----------
1504
+ flow : Union[str, Dict[str, str]], optional, default None
1505
+ Upstream flow dependency for this flow.
1506
+ flows : List[Union[str, Dict[str, str]]], default []
1507
+ Upstream flow dependencies for this flow.
1508
+ options : Dict[str, Any], default {}
1509
+ Backend-specific configuration for tuning eventing behavior.
1510
+ """
1511
+ ...
1512
+
1513
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1514
+ """
1515
+ Specifies what flows belong to the same project.
1516
+
1517
+ A project-specific namespace is created for all flows that
1518
+ use the same `@project(name)`.
1519
+
1520
+
1521
+ Parameters
1522
+ ----------
1523
+ name : str
1524
+ Project name. Make sure that the name is unique amongst all
1525
+ projects that use the same production scheduler. The name may
1526
+ contain only lowercase alphanumeric characters and underscores.
1527
+
1528
+ branch : Optional[str], default None
1529
+ The branch to use. If not specified, the branch is set to
1530
+ `user.<username>` unless `production` is set to `True`. This can
1531
+ also be set on the command line using `--branch` as a top-level option.
1532
+ It is an error to specify `branch` in the decorator and on the command line.
1533
+
1534
+ production : bool, default False
1535
+ Whether or not the branch is the production branch. This can also be set on the
1536
+ command line using `--production` as a top-level option. It is an error to specify
1537
+ `production` in the decorator and on the command line.
1538
+ The project branch name will be:
1539
+ - if `branch` is specified:
1540
+ - if `production` is True: `prod.<branch>`
1541
+ - if `production` is False: `test.<branch>`
1542
+ - if `branch` is not specified:
1543
+ - if `production` is True: `prod`
1544
+ - if `production` is False: `user.<username>`
1545
+ """
1546
+ ...
1547
+
1548
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1549
+ """
1550
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1551
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1552
+
1553
+
1554
+ Parameters
1555
+ ----------
1556
+ timeout : int
1557
+ Time, in seconds before the task times out and fails. (Default: 3600)
1558
+ poke_interval : int
1559
+ Time in seconds that the job should wait in between each try. (Default: 60)
1560
+ mode : str
1561
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1562
+ exponential_backoff : bool
1563
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1564
+ pool : str
1565
+ the slot pool this task should run in,
1566
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1567
+ soft_fail : bool
1568
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1569
+ name : str
1570
+ Name of the sensor on Airflow
1571
+ description : str
1572
+ Description of sensor in the Airflow UI
1573
+ external_dag_id : str
1574
+ The dag_id that contains the task you want to wait for.
1575
+ external_task_ids : List[str]
1576
+ The list of task_ids that you want to wait for.
1577
+ If None (default value) the sensor waits for the DAG. (Default: None)
1578
+ allowed_states : List[str]
1579
+ Iterable of allowed states, (Default: ['success'])
1580
+ failed_states : List[str]
1581
+ Iterable of failed or dis-allowed states. (Default: None)
1582
+ execution_delta : datetime.timedelta
1583
+ time difference with the previous execution to look at,
1584
+ the default is the same logical date as the current task or DAG. (Default: None)
1585
+ check_existence: bool
1586
+ Set to True to check if the external task exists or check if
1587
+ the DAG to wait for exists. (Default: True)
1588
+ """
1589
+ ...
1590
+
1591
1591
  pkg_name: str
1592
1592