ob-metaflow-stubs 6.0.3.182rc2__py2.py3-none-any.whl → 6.0.3.183rc0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (220) hide show
  1. metaflow-stubs/__init__.pyi +628 -628
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -4
  20. metaflow-stubs/metaflow_current.pyi +106 -106
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +3 -3
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +4 -4
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +3 -3
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +3 -3
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +3 -3
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +3 -3
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +2 -2
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  88. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  89. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  91. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  92. metaflow-stubs/multicore_utils.pyi +2 -2
  93. metaflow-stubs/ob_internal.pyi +2 -2
  94. metaflow-stubs/parameters.pyi +3 -3
  95. metaflow-stubs/plugins/__init__.pyi +8 -8
  96. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  97. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  98. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  99. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  101. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  102. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  103. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  105. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  106. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  107. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  108. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  109. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  110. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  111. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  112. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  113. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  114. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  115. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  116. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  118. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  119. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  120. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  121. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  123. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  124. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  125. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  126. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  128. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  129. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  130. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  131. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  132. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  133. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  134. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  135. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  136. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -4
  137. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  138. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  139. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  140. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  141. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  142. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  143. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  144. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  145. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  146. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  147. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  148. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  149. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  150. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  151. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  152. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  153. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  154. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  155. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  156. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  157. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  158. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  159. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  160. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  161. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  162. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  163. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  164. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  165. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  166. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  167. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  168. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  169. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  170. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  171. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  172. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  173. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  174. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  175. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  176. metaflow-stubs/plugins/perimeters.pyi +2 -2
  177. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  178. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  179. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  180. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  181. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  182. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  183. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  184. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  185. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  186. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  187. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  188. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  189. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  190. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  191. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  192. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  193. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  194. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  195. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  196. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  197. metaflow-stubs/profilers/__init__.pyi +2 -2
  198. metaflow-stubs/pylint_wrapper.pyi +2 -2
  199. metaflow-stubs/runner/__init__.pyi +2 -2
  200. metaflow-stubs/runner/deployer.pyi +30 -30
  201. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  202. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  203. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  204. metaflow-stubs/runner/nbrun.pyi +2 -2
  205. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  206. metaflow-stubs/runner/utils.pyi +3 -3
  207. metaflow-stubs/system/__init__.pyi +2 -2
  208. metaflow-stubs/system/system_logger.pyi +3 -3
  209. metaflow-stubs/system/system_monitor.pyi +2 -2
  210. metaflow-stubs/tagging_util.pyi +2 -2
  211. metaflow-stubs/tuple_util.pyi +2 -2
  212. metaflow-stubs/user_configs/__init__.pyi +2 -2
  213. metaflow-stubs/user_configs/config_decorators.pyi +6 -6
  214. metaflow-stubs/user_configs/config_options.pyi +2 -2
  215. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  216. {ob_metaflow_stubs-6.0.3.182rc2.dist-info → ob_metaflow_stubs-6.0.3.183rc0.dist-info}/METADATA +1 -1
  217. ob_metaflow_stubs-6.0.3.183rc0.dist-info/RECORD +220 -0
  218. ob_metaflow_stubs-6.0.3.182rc2.dist-info/RECORD +0 -220
  219. {ob_metaflow_stubs-6.0.3.182rc2.dist-info → ob_metaflow_stubs-6.0.3.183rc0.dist-info}/WHEEL +0 -0
  220. {ob_metaflow_stubs-6.0.3.182rc2.dist-info → ob_metaflow_stubs-6.0.3.183rc0.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.15.17.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-06-18T10:30:41.738497 #
3
+ # MF version: 2.15.18.1+obcheckpoint(0.2.1);ob(v1) #
4
+ # Generated on 2025-06-20T20:53:54.419355 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -37,16 +37,16 @@ from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDec
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
38
  from . import tuple_util as tuple_util
39
39
  from . import cards as cards
40
- from . import events as events
41
40
  from . import metaflow_git as metaflow_git
41
+ from . import events as events
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
48
47
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
49
48
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
49
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
52
52
  from .client.core import get_namespace as get_namespace
@@ -155,37 +155,55 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
155
155
  """
156
156
  ...
157
157
 
158
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
158
+ @typing.overload
159
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
159
160
  """
160
- Specifies that this step is used to deploy an instance of the app.
161
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
161
+ Internal decorator to support Fast bakery
162
+ """
163
+ ...
164
+
165
+ @typing.overload
166
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
167
+ ...
168
+
169
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
170
+ """
171
+ Internal decorator to support Fast bakery
172
+ """
173
+ ...
174
+
175
+ @typing.overload
176
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
177
+ """
178
+ Specifies secrets to be retrieved and injected as environment variables prior to
179
+ the execution of a step.
162
180
 
163
181
 
164
182
  Parameters
165
183
  ----------
166
- app_port : int
167
- Number of GPUs to use.
168
- app_name : str
169
- Name of the app to deploy.
184
+ sources : List[Union[str, Dict[str, Any]]], default: []
185
+ List of secret specs, defining how the secrets are to be retrieved
170
186
  """
171
187
  ...
172
188
 
173
189
  @typing.overload
174
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
175
- """
176
- Decorator prototype for all step decorators. This function gets specialized
177
- and imported for all decorators types by _import_plugin_decorators().
178
- """
190
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
179
191
  ...
180
192
 
181
193
  @typing.overload
182
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
194
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
183
195
  ...
184
196
 
185
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
197
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
186
198
  """
187
- Decorator prototype for all step decorators. This function gets specialized
188
- and imported for all decorators types by _import_plugin_decorators().
199
+ Specifies secrets to be retrieved and injected as environment variables prior to
200
+ the execution of a step.
201
+
202
+
203
+ Parameters
204
+ ----------
205
+ sources : List[Union[str, Dict[str, Any]]], default: []
206
+ List of secret specs, defining how the secrets are to be retrieved
189
207
  """
190
208
  ...
191
209
 
@@ -246,118 +264,6 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
246
264
  """
247
265
  ...
248
266
 
249
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
250
- """
251
- Specifies that this step should execute on DGX cloud.
252
-
253
-
254
- Parameters
255
- ----------
256
- gpu : int
257
- Number of GPUs to use.
258
- gpu_type : str
259
- Type of Nvidia GPU to use.
260
- queue_timeout : int
261
- Time to keep the job in NVCF's queue.
262
- """
263
- ...
264
-
265
- @typing.overload
266
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
267
- """
268
- Enables checkpointing for a step.
269
-
270
-
271
-
272
- Parameters
273
- ----------
274
- load_policy : str, default: "fresh"
275
- The policy for loading the checkpoint. The following policies are supported:
276
- - "eager": Loads the the latest available checkpoint within the namespace.
277
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
278
- will be loaded at the start of the task.
279
- - "none": Do not load any checkpoint
280
- - "fresh": Loads the lastest checkpoint created within the running Task.
281
- This mode helps loading checkpoints across various retry attempts of the same task.
282
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
283
- created within the task will be loaded when the task is retries execution on failure.
284
-
285
- temp_dir_root : str, default: None
286
- The root directory under which `current.checkpoint.directory` will be created.
287
- """
288
- ...
289
-
290
- @typing.overload
291
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
292
- ...
293
-
294
- @typing.overload
295
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
296
- ...
297
-
298
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
299
- """
300
- Enables checkpointing for a step.
301
-
302
-
303
-
304
- Parameters
305
- ----------
306
- load_policy : str, default: "fresh"
307
- The policy for loading the checkpoint. The following policies are supported:
308
- - "eager": Loads the the latest available checkpoint within the namespace.
309
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
310
- will be loaded at the start of the task.
311
- - "none": Do not load any checkpoint
312
- - "fresh": Loads the lastest checkpoint created within the running Task.
313
- This mode helps loading checkpoints across various retry attempts of the same task.
314
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
315
- created within the task will be loaded when the task is retries execution on failure.
316
-
317
- temp_dir_root : str, default: None
318
- The root directory under which `current.checkpoint.directory` will be created.
319
- """
320
- ...
321
-
322
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
323
- """
324
- Specifies that this step should execute on DGX cloud.
325
-
326
-
327
- Parameters
328
- ----------
329
- gpu : int
330
- Number of GPUs to use.
331
- gpu_type : str
332
- Type of Nvidia GPU to use.
333
- """
334
- ...
335
-
336
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
337
- """
338
- Decorator that helps cache, version and store models/datasets from huggingface hub.
339
-
340
-
341
- Parameters
342
- ----------
343
- temp_dir_root : str, optional
344
- The root directory that will hold the temporary directory where objects will be downloaded.
345
-
346
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
347
- The list of repos (models/datasets) to load.
348
-
349
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
350
-
351
- - If repo (model/dataset) is not found in the datastore:
352
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
353
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
354
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
355
-
356
- - If repo is found in the datastore:
357
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
358
- """
359
- ...
360
-
361
267
  @typing.overload
362
268
  def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
363
269
  """
@@ -507,100 +413,85 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
507
413
  ...
508
414
 
509
415
  @typing.overload
510
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
416
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
511
417
  """
512
- Specifies the number of times the task corresponding
513
- to a step needs to be retried.
514
-
515
- This decorator is useful for handling transient errors, such as networking issues.
516
- If your task contains operations that can't be retried safely, e.g. database updates,
517
- it is advisable to annotate it with `@retry(times=0)`.
418
+ Creates a human-readable report, a Metaflow Card, after this step completes.
518
419
 
519
- This can be used in conjunction with the `@catch` decorator. The `@catch`
520
- decorator will execute a no-op task after all retries have been exhausted,
521
- ensuring that the flow execution can continue.
420
+ Note that you may add multiple `@card` decorators in a step with different parameters.
522
421
 
523
422
 
524
423
  Parameters
525
424
  ----------
526
- times : int, default 3
527
- Number of times to retry this task.
528
- minutes_between_retries : int, default 2
529
- Number of minutes between retries.
425
+ type : str, default 'default'
426
+ Card type.
427
+ id : str, optional, default None
428
+ If multiple cards are present, use this id to identify this card.
429
+ options : Dict[str, Any], default {}
430
+ Options passed to the card. The contents depend on the card type.
431
+ timeout : int, default 45
432
+ Interrupt reporting if it takes more than this many seconds.
530
433
  """
531
434
  ...
532
435
 
533
436
  @typing.overload
534
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
437
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
535
438
  ...
536
439
 
537
440
  @typing.overload
538
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
441
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
539
442
  ...
540
443
 
541
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
444
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
542
445
  """
543
- Specifies the number of times the task corresponding
544
- to a step needs to be retried.
545
-
546
- This decorator is useful for handling transient errors, such as networking issues.
547
- If your task contains operations that can't be retried safely, e.g. database updates,
548
- it is advisable to annotate it with `@retry(times=0)`.
446
+ Creates a human-readable report, a Metaflow Card, after this step completes.
549
447
 
550
- This can be used in conjunction with the `@catch` decorator. The `@catch`
551
- decorator will execute a no-op task after all retries have been exhausted,
552
- ensuring that the flow execution can continue.
448
+ Note that you may add multiple `@card` decorators in a step with different parameters.
553
449
 
554
450
 
555
451
  Parameters
556
452
  ----------
557
- times : int, default 3
558
- Number of times to retry this task.
559
- minutes_between_retries : int, default 2
560
- Number of minutes between retries.
453
+ type : str, default 'default'
454
+ Card type.
455
+ id : str, optional, default None
456
+ If multiple cards are present, use this id to identify this card.
457
+ options : Dict[str, Any], default {}
458
+ Options passed to the card. The contents depend on the card type.
459
+ timeout : int, default 45
460
+ Interrupt reporting if it takes more than this many seconds.
561
461
  """
562
462
  ...
563
463
 
564
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
464
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
565
465
  """
566
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
567
-
568
- User code call
569
- --------------
570
- @ollama(
571
- models=[...],
572
- ...
573
- )
574
-
575
- Valid backend options
576
- ---------------------
577
- - 'local': Run as a separate process on the local task machine.
578
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
579
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
580
-
581
- Valid model options
582
- -------------------
583
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
466
+ Specifies that this step is used to deploy an instance of the app.
467
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
584
468
 
585
469
 
586
470
  Parameters
587
471
  ----------
588
- models: list[str]
589
- List of Ollama containers running models in sidecars.
590
- backend: str
591
- Determines where and how to run the Ollama process.
592
- force_pull: bool
593
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
594
- cache_update_policy: str
595
- Cache update policy: "auto", "force", or "never".
596
- force_cache_update: bool
597
- Simple override for "force" cache update policy.
598
- debug: bool
599
- Whether to turn on verbose debugging logs.
600
- circuit_breaker_config: dict
601
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
602
- timeout_config: dict
603
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
472
+ app_port : int
473
+ Number of GPUs to use.
474
+ app_name : str
475
+ Name of the app to deploy.
476
+ """
477
+ ...
478
+
479
+ @typing.overload
480
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
481
+ """
482
+ Decorator prototype for all step decorators. This function gets specialized
483
+ and imported for all decorators types by _import_plugin_decorators().
484
+ """
485
+ ...
486
+
487
+ @typing.overload
488
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
489
+ ...
490
+
491
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
492
+ """
493
+ Decorator prototype for all step decorators. This function gets specialized
494
+ and imported for all decorators types by _import_plugin_decorators().
604
495
  """
605
496
  ...
606
497
 
@@ -684,187 +575,116 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
684
575
  ...
685
576
 
686
577
  @typing.overload
687
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
578
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
688
579
  """
689
- Specifies that the step will success under all circumstances.
580
+ Specifies the Conda environment for the step.
690
581
 
691
- The decorator will create an optional artifact, specified by `var`, which
692
- contains the exception raised. You can use it to detect the presence
693
- of errors, indicating that all happy-path artifacts produced by the step
694
- are missing.
582
+ Information in this decorator will augment any
583
+ attributes set in the `@conda_base` flow-level decorator. Hence,
584
+ you can use `@conda_base` to set packages required by all
585
+ steps and use `@conda` to specify step-specific overrides.
695
586
 
696
587
 
697
588
  Parameters
698
589
  ----------
699
- var : str, optional, default None
700
- Name of the artifact in which to store the caught exception.
701
- If not specified, the exception is not stored.
702
- print_exception : bool, default True
703
- Determines whether or not the exception is printed to
704
- stdout when caught.
590
+ packages : Dict[str, str], default {}
591
+ Packages to use for this step. The key is the name of the package
592
+ and the value is the version to use.
593
+ libraries : Dict[str, str], default {}
594
+ Supported for backward compatibility. When used with packages, packages will take precedence.
595
+ python : str, optional, default None
596
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
597
+ that the version used will correspond to the version of the Python interpreter used to start the run.
598
+ disabled : bool, default False
599
+ If set to True, disables @conda.
705
600
  """
706
601
  ...
707
602
 
708
603
  @typing.overload
709
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
604
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
710
605
  ...
711
606
 
712
607
  @typing.overload
713
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
608
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
714
609
  ...
715
610
 
716
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
611
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
717
612
  """
718
- Specifies that the step will success under all circumstances.
719
-
720
- The decorator will create an optional artifact, specified by `var`, which
721
- contains the exception raised. You can use it to detect the presence
722
- of errors, indicating that all happy-path artifacts produced by the step
723
- are missing.
724
-
613
+ Specifies the Conda environment for the step.
725
614
 
726
- Parameters
727
- ----------
728
- var : str, optional, default None
729
- Name of the artifact in which to store the caught exception.
730
- If not specified, the exception is not stored.
731
- print_exception : bool, default True
732
- Determines whether or not the exception is printed to
733
- stdout when caught.
734
- """
735
- ...
736
-
737
- @typing.overload
738
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
739
- """
740
- Specifies secrets to be retrieved and injected as environment variables prior to
741
- the execution of a step.
615
+ Information in this decorator will augment any
616
+ attributes set in the `@conda_base` flow-level decorator. Hence,
617
+ you can use `@conda_base` to set packages required by all
618
+ steps and use `@conda` to specify step-specific overrides.
742
619
 
743
620
 
744
621
  Parameters
745
622
  ----------
746
- sources : List[Union[str, Dict[str, Any]]], default: []
747
- List of secret specs, defining how the secrets are to be retrieved
623
+ packages : Dict[str, str], default {}
624
+ Packages to use for this step. The key is the name of the package
625
+ and the value is the version to use.
626
+ libraries : Dict[str, str], default {}
627
+ Supported for backward compatibility. When used with packages, packages will take precedence.
628
+ python : str, optional, default None
629
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
630
+ that the version used will correspond to the version of the Python interpreter used to start the run.
631
+ disabled : bool, default False
632
+ If set to True, disables @conda.
748
633
  """
749
634
  ...
750
635
 
751
636
  @typing.overload
752
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
753
- ...
754
-
755
- @typing.overload
756
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
757
- ...
758
-
759
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
637
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
760
638
  """
761
- Specifies secrets to be retrieved and injected as environment variables prior to
762
- the execution of a step.
639
+ Specifies the number of times the task corresponding
640
+ to a step needs to be retried.
763
641
 
642
+ This decorator is useful for handling transient errors, such as networking issues.
643
+ If your task contains operations that can't be retried safely, e.g. database updates,
644
+ it is advisable to annotate it with `@retry(times=0)`.
764
645
 
765
- Parameters
766
- ----------
767
- sources : List[Union[str, Dict[str, Any]]], default: []
768
- List of secret specs, defining how the secrets are to be retrieved
769
- """
770
- ...
771
-
772
- @typing.overload
773
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
774
- """
775
- Specifies environment variables to be set prior to the execution of a step.
646
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
647
+ decorator will execute a no-op task after all retries have been exhausted,
648
+ ensuring that the flow execution can continue.
776
649
 
777
650
 
778
651
  Parameters
779
652
  ----------
780
- vars : Dict[str, str], default {}
781
- Dictionary of environment variables to set.
653
+ times : int, default 3
654
+ Number of times to retry this task.
655
+ minutes_between_retries : int, default 2
656
+ Number of minutes between retries.
782
657
  """
783
658
  ...
784
659
 
785
660
  @typing.overload
786
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
661
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
787
662
  ...
788
663
 
789
664
  @typing.overload
790
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
791
- ...
792
-
793
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
794
- """
795
- Specifies environment variables to be set prior to the execution of a step.
796
-
797
-
798
- Parameters
799
- ----------
800
- vars : Dict[str, str], default {}
801
- Dictionary of environment variables to set.
802
- """
665
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
803
666
  ...
804
667
 
805
- @typing.overload
806
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
668
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
807
669
  """
808
- Creates a human-readable report, a Metaflow Card, after this step completes.
809
-
810
- Note that you may add multiple `@card` decorators in a step with different parameters.
811
-
670
+ Specifies the number of times the task corresponding
671
+ to a step needs to be retried.
812
672
 
813
- Parameters
814
- ----------
815
- type : str, default 'default'
816
- Card type.
817
- id : str, optional, default None
818
- If multiple cards are present, use this id to identify this card.
819
- options : Dict[str, Any], default {}
820
- Options passed to the card. The contents depend on the card type.
821
- timeout : int, default 45
822
- Interrupt reporting if it takes more than this many seconds.
823
- """
824
- ...
825
-
826
- @typing.overload
827
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
828
- ...
829
-
830
- @typing.overload
831
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
832
- ...
833
-
834
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
835
- """
836
- Creates a human-readable report, a Metaflow Card, after this step completes.
673
+ This decorator is useful for handling transient errors, such as networking issues.
674
+ If your task contains operations that can't be retried safely, e.g. database updates,
675
+ it is advisable to annotate it with `@retry(times=0)`.
837
676
 
838
- Note that you may add multiple `@card` decorators in a step with different parameters.
677
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
678
+ decorator will execute a no-op task after all retries have been exhausted,
679
+ ensuring that the flow execution can continue.
839
680
 
840
681
 
841
682
  Parameters
842
683
  ----------
843
- type : str, default 'default'
844
- Card type.
845
- id : str, optional, default None
846
- If multiple cards are present, use this id to identify this card.
847
- options : Dict[str, Any], default {}
848
- Options passed to the card. The contents depend on the card type.
849
- timeout : int, default 45
850
- Interrupt reporting if it takes more than this many seconds.
851
- """
852
- ...
853
-
854
- @typing.overload
855
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
856
- """
857
- Internal decorator to support Fast bakery
858
- """
859
- ...
860
-
861
- @typing.overload
862
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
863
- ...
864
-
865
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
866
- """
867
- Internal decorator to support Fast bakery
684
+ times : int, default 3
685
+ Number of times to retry this task.
686
+ minutes_between_retries : int, default 2
687
+ Number of minutes between retries.
868
688
  """
869
689
  ...
870
690
 
@@ -907,231 +727,428 @@ def vllm(*, model: str, backend: str, debug: bool, kwargs: typing.Any) -> typing
907
727
  ...
908
728
 
909
729
  @typing.overload
910
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
911
731
  """
912
- Specifies the Conda environment for the step.
732
+ Specifies the PyPI packages for the step.
913
733
 
914
734
  Information in this decorator will augment any
915
- attributes set in the `@conda_base` flow-level decorator. Hence,
916
- you can use `@conda_base` to set packages required by all
917
- steps and use `@conda` to specify step-specific overrides.
735
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
736
+ you can use `@pypi_base` to set packages required by all
737
+ steps and use `@pypi` to specify step-specific overrides.
918
738
 
919
739
 
920
740
  Parameters
921
741
  ----------
922
- packages : Dict[str, str], default {}
742
+ packages : Dict[str, str], default: {}
923
743
  Packages to use for this step. The key is the name of the package
924
744
  and the value is the version to use.
925
- libraries : Dict[str, str], default {}
926
- Supported for backward compatibility. When used with packages, packages will take precedence.
927
- python : str, optional, default None
745
+ python : str, optional, default: None
928
746
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
929
747
  that the version used will correspond to the version of the Python interpreter used to start the run.
930
- disabled : bool, default False
931
- If set to True, disables @conda.
932
748
  """
933
749
  ...
934
750
 
935
751
  @typing.overload
936
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
752
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
937
753
  ...
938
754
 
939
755
  @typing.overload
940
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
756
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
941
757
  ...
942
758
 
943
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
759
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
944
760
  """
945
- Specifies the Conda environment for the step.
761
+ Specifies the PyPI packages for the step.
946
762
 
947
763
  Information in this decorator will augment any
948
- attributes set in the `@conda_base` flow-level decorator. Hence,
949
- you can use `@conda_base` to set packages required by all
950
- steps and use `@conda` to specify step-specific overrides.
764
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
765
+ you can use `@pypi_base` to set packages required by all
766
+ steps and use `@pypi` to specify step-specific overrides.
951
767
 
952
768
 
953
769
  Parameters
954
770
  ----------
955
- packages : Dict[str, str], default {}
771
+ packages : Dict[str, str], default: {}
956
772
  Packages to use for this step. The key is the name of the package
957
773
  and the value is the version to use.
958
- libraries : Dict[str, str], default {}
959
- Supported for backward compatibility. When used with packages, packages will take precedence.
960
- python : str, optional, default None
774
+ python : str, optional, default: None
961
775
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
962
776
  that the version used will correspond to the version of the Python interpreter used to start the run.
963
- disabled : bool, default False
964
- If set to True, disables @conda.
777
+ """
778
+ ...
779
+
780
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
781
+ """
782
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
783
+
784
+
785
+ Parameters
786
+ ----------
787
+ temp_dir_root : str, optional
788
+ The root directory that will hold the temporary directory where objects will be downloaded.
789
+
790
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
791
+ The list of repos (models/datasets) to load.
792
+
793
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
794
+
795
+ - If repo (model/dataset) is not found in the datastore:
796
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
797
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
798
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
799
+
800
+ - If repo is found in the datastore:
801
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
802
+ """
803
+ ...
804
+
805
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
806
+ """
807
+ Specifies that this step should execute on DGX cloud.
808
+
809
+
810
+ Parameters
811
+ ----------
812
+ gpu : int
813
+ Number of GPUs to use.
814
+ gpu_type : str
815
+ Type of Nvidia GPU to use.
816
+ """
817
+ ...
818
+
819
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
820
+ """
821
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
822
+
823
+ User code call
824
+ --------------
825
+ @ollama(
826
+ models=[...],
827
+ ...
828
+ )
829
+
830
+ Valid backend options
831
+ ---------------------
832
+ - 'local': Run as a separate process on the local task machine.
833
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
834
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
835
+
836
+ Valid model options
837
+ -------------------
838
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
839
+
840
+
841
+ Parameters
842
+ ----------
843
+ models: list[str]
844
+ List of Ollama containers running models in sidecars.
845
+ backend: str
846
+ Determines where and how to run the Ollama process.
847
+ force_pull: bool
848
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
849
+ cache_update_policy: str
850
+ Cache update policy: "auto", "force", or "never".
851
+ force_cache_update: bool
852
+ Simple override for "force" cache update policy.
853
+ debug: bool
854
+ Whether to turn on verbose debugging logs.
855
+ circuit_breaker_config: dict
856
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
857
+ timeout_config: dict
858
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
859
+ """
860
+ ...
861
+
862
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
863
+ """
864
+ Specifies that this step should execute on DGX cloud.
865
+
866
+
867
+ Parameters
868
+ ----------
869
+ gpu : int
870
+ Number of GPUs to use.
871
+ gpu_type : str
872
+ Type of Nvidia GPU to use.
873
+ queue_timeout : int
874
+ Time to keep the job in NVCF's queue.
965
875
  """
966
876
  ...
967
877
 
968
878
  @typing.overload
969
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
879
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
970
880
  """
971
- Specifies the PyPI packages for the step.
881
+ Enables checkpointing for a step.
972
882
 
973
- Information in this decorator will augment any
974
- attributes set in the `@pyi_base` flow-level decorator. Hence,
975
- you can use `@pypi_base` to set packages required by all
976
- steps and use `@pypi` to specify step-specific overrides.
977
883
 
978
884
 
979
885
  Parameters
980
886
  ----------
981
- packages : Dict[str, str], default: {}
982
- Packages to use for this step. The key is the name of the package
983
- and the value is the version to use.
984
- python : str, optional, default: None
985
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
986
- that the version used will correspond to the version of the Python interpreter used to start the run.
887
+ load_policy : str, default: "fresh"
888
+ The policy for loading the checkpoint. The following policies are supported:
889
+ - "eager": Loads the the latest available checkpoint within the namespace.
890
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
891
+ will be loaded at the start of the task.
892
+ - "none": Do not load any checkpoint
893
+ - "fresh": Loads the lastest checkpoint created within the running Task.
894
+ This mode helps loading checkpoints across various retry attempts of the same task.
895
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
896
+ created within the task will be loaded when the task is retries execution on failure.
897
+
898
+ temp_dir_root : str, default: None
899
+ The root directory under which `current.checkpoint.directory` will be created.
987
900
  """
988
901
  ...
989
902
 
990
903
  @typing.overload
991
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
904
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
992
905
  ...
993
906
 
994
907
  @typing.overload
995
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
908
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
996
909
  ...
997
910
 
998
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
911
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
999
912
  """
1000
- Specifies the PyPI packages for the step.
913
+ Enables checkpointing for a step.
1001
914
 
1002
- Information in this decorator will augment any
1003
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1004
- you can use `@pypi_base` to set packages required by all
1005
- steps and use `@pypi` to specify step-specific overrides.
1006
915
 
1007
916
 
1008
917
  Parameters
1009
918
  ----------
1010
- packages : Dict[str, str], default: {}
1011
- Packages to use for this step. The key is the name of the package
1012
- and the value is the version to use.
1013
- python : str, optional, default: None
1014
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1015
- that the version used will correspond to the version of the Python interpreter used to start the run.
919
+ load_policy : str, default: "fresh"
920
+ The policy for loading the checkpoint. The following policies are supported:
921
+ - "eager": Loads the the latest available checkpoint within the namespace.
922
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
923
+ will be loaded at the start of the task.
924
+ - "none": Do not load any checkpoint
925
+ - "fresh": Loads the lastest checkpoint created within the running Task.
926
+ This mode helps loading checkpoints across various retry attempts of the same task.
927
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
928
+ created within the task will be loaded when the task is retries execution on failure.
929
+
930
+ temp_dir_root : str, default: None
931
+ The root directory under which `current.checkpoint.directory` will be created.
1016
932
  """
1017
933
  ...
1018
934
 
1019
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
935
+ @typing.overload
936
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1020
937
  """
1021
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1022
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
938
+ Specifies that the step will success under all circumstances.
939
+
940
+ The decorator will create an optional artifact, specified by `var`, which
941
+ contains the exception raised. You can use it to detect the presence
942
+ of errors, indicating that all happy-path artifacts produced by the step
943
+ are missing.
1023
944
 
1024
945
 
1025
946
  Parameters
1026
947
  ----------
1027
- timeout : int
1028
- Time, in seconds before the task times out and fails. (Default: 3600)
1029
- poke_interval : int
1030
- Time in seconds that the job should wait in between each try. (Default: 60)
1031
- mode : str
1032
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1033
- exponential_backoff : bool
1034
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1035
- pool : str
1036
- the slot pool this task should run in,
1037
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1038
- soft_fail : bool
1039
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1040
- name : str
1041
- Name of the sensor on Airflow
1042
- description : str
1043
- Description of sensor in the Airflow UI
1044
- external_dag_id : str
1045
- The dag_id that contains the task you want to wait for.
1046
- external_task_ids : List[str]
1047
- The list of task_ids that you want to wait for.
1048
- If None (default value) the sensor waits for the DAG. (Default: None)
1049
- allowed_states : List[str]
1050
- Iterable of allowed states, (Default: ['success'])
1051
- failed_states : List[str]
1052
- Iterable of failed or dis-allowed states. (Default: None)
1053
- execution_delta : datetime.timedelta
1054
- time difference with the previous execution to look at,
1055
- the default is the same logical date as the current task or DAG. (Default: None)
1056
- check_existence: bool
1057
- Set to True to check if the external task exists or check if
1058
- the DAG to wait for exists. (Default: True)
948
+ var : str, optional, default None
949
+ Name of the artifact in which to store the caught exception.
950
+ If not specified, the exception is not stored.
951
+ print_exception : bool, default True
952
+ Determines whether or not the exception is printed to
953
+ stdout when caught.
1059
954
  """
1060
955
  ...
1061
956
 
1062
957
  @typing.overload
1063
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
958
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
959
+ ...
960
+
961
+ @typing.overload
962
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
963
+ ...
964
+
965
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1064
966
  """
1065
- Specifies the PyPI packages for all steps of the flow.
967
+ Specifies that the step will success under all circumstances.
968
+
969
+ The decorator will create an optional artifact, specified by `var`, which
970
+ contains the exception raised. You can use it to detect the presence
971
+ of errors, indicating that all happy-path artifacts produced by the step
972
+ are missing.
1066
973
 
1067
- Use `@pypi_base` to set common packages required by all
1068
- steps and use `@pypi` to specify step-specific overrides.
1069
974
 
1070
975
  Parameters
1071
976
  ----------
1072
- packages : Dict[str, str], default: {}
1073
- Packages to use for this flow. The key is the name of the package
1074
- and the value is the version to use.
1075
- python : str, optional, default: None
1076
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1077
- that the version used will correspond to the version of the Python interpreter used to start the run.
977
+ var : str, optional, default None
978
+ Name of the artifact in which to store the caught exception.
979
+ If not specified, the exception is not stored.
980
+ print_exception : bool, default True
981
+ Determines whether or not the exception is printed to
982
+ stdout when caught.
1078
983
  """
1079
984
  ...
1080
985
 
1081
986
  @typing.overload
1082
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
987
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
988
+ """
989
+ Specifies environment variables to be set prior to the execution of a step.
990
+
991
+
992
+ Parameters
993
+ ----------
994
+ vars : Dict[str, str], default {}
995
+ Dictionary of environment variables to set.
996
+ """
1083
997
  ...
1084
998
 
1085
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
999
+ @typing.overload
1000
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1001
+ ...
1002
+
1003
+ @typing.overload
1004
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1005
+ ...
1006
+
1007
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1086
1008
  """
1087
- Specifies the PyPI packages for all steps of the flow.
1009
+ Specifies environment variables to be set prior to the execution of a step.
1088
1010
 
1089
- Use `@pypi_base` to set common packages required by all
1090
- steps and use `@pypi` to specify step-specific overrides.
1091
1011
 
1092
1012
  Parameters
1093
1013
  ----------
1094
- packages : Dict[str, str], default: {}
1095
- Packages to use for this flow. The key is the name of the package
1096
- and the value is the version to use.
1097
- python : str, optional, default: None
1098
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1099
- that the version used will correspond to the version of the Python interpreter used to start the run.
1014
+ vars : Dict[str, str], default {}
1015
+ Dictionary of environment variables to set.
1100
1016
  """
1101
1017
  ...
1102
1018
 
1103
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1019
+ @typing.overload
1020
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1104
1021
  """
1105
- Specifies what flows belong to the same project.
1022
+ Specifies the event(s) that this flow depends on.
1106
1023
 
1107
- A project-specific namespace is created for all flows that
1108
- use the same `@project(name)`.
1024
+ ```
1025
+ @trigger(event='foo')
1026
+ ```
1027
+ or
1028
+ ```
1029
+ @trigger(events=['foo', 'bar'])
1030
+ ```
1031
+
1032
+ Additionally, you can specify the parameter mappings
1033
+ to map event payload to Metaflow parameters for the flow.
1034
+ ```
1035
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1036
+ ```
1037
+ or
1038
+ ```
1039
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1040
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1041
+ ```
1042
+
1043
+ 'parameters' can also be a list of strings and tuples like so:
1044
+ ```
1045
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1046
+ ```
1047
+ This is equivalent to:
1048
+ ```
1049
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1050
+ ```
1109
1051
 
1110
1052
 
1111
1053
  Parameters
1112
1054
  ----------
1113
- name : str
1114
- Project name. Make sure that the name is unique amongst all
1115
- projects that use the same production scheduler. The name may
1116
- contain only lowercase alphanumeric characters and underscores.
1055
+ event : Union[str, Dict[str, Any]], optional, default None
1056
+ Event dependency for this flow.
1057
+ events : List[Union[str, Dict[str, Any]]], default []
1058
+ Events dependency for this flow.
1059
+ options : Dict[str, Any], default {}
1060
+ Backend-specific configuration for tuning eventing behavior.
1061
+ """
1062
+ ...
1063
+
1064
+ @typing.overload
1065
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1066
+ ...
1067
+
1068
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1069
+ """
1070
+ Specifies the event(s) that this flow depends on.
1117
1071
 
1118
- branch : Optional[str], default None
1119
- The branch to use. If not specified, the branch is set to
1120
- `user.<username>` unless `production` is set to `True`. This can
1121
- also be set on the command line using `--branch` as a top-level option.
1122
- It is an error to specify `branch` in the decorator and on the command line.
1072
+ ```
1073
+ @trigger(event='foo')
1074
+ ```
1075
+ or
1076
+ ```
1077
+ @trigger(events=['foo', 'bar'])
1078
+ ```
1123
1079
 
1124
- production : bool, default False
1125
- Whether or not the branch is the production branch. This can also be set on the
1126
- command line using `--production` as a top-level option. It is an error to specify
1127
- `production` in the decorator and on the command line.
1128
- The project branch name will be:
1129
- - if `branch` is specified:
1130
- - if `production` is True: `prod.<branch>`
1131
- - if `production` is False: `test.<branch>`
1132
- - if `branch` is not specified:
1133
- - if `production` is True: `prod`
1134
- - if `production` is False: `user.<username>`
1080
+ Additionally, you can specify the parameter mappings
1081
+ to map event payload to Metaflow parameters for the flow.
1082
+ ```
1083
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1084
+ ```
1085
+ or
1086
+ ```
1087
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1088
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1089
+ ```
1090
+
1091
+ 'parameters' can also be a list of strings and tuples like so:
1092
+ ```
1093
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1094
+ ```
1095
+ This is equivalent to:
1096
+ ```
1097
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1098
+ ```
1099
+
1100
+
1101
+ Parameters
1102
+ ----------
1103
+ event : Union[str, Dict[str, Any]], optional, default None
1104
+ Event dependency for this flow.
1105
+ events : List[Union[str, Dict[str, Any]]], default []
1106
+ Events dependency for this flow.
1107
+ options : Dict[str, Any], default {}
1108
+ Backend-specific configuration for tuning eventing behavior.
1109
+ """
1110
+ ...
1111
+
1112
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1113
+ """
1114
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1115
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1116
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1117
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1118
+ starts only after all sensors finish.
1119
+
1120
+
1121
+ Parameters
1122
+ ----------
1123
+ timeout : int
1124
+ Time, in seconds before the task times out and fails. (Default: 3600)
1125
+ poke_interval : int
1126
+ Time in seconds that the job should wait in between each try. (Default: 60)
1127
+ mode : str
1128
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1129
+ exponential_backoff : bool
1130
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1131
+ pool : str
1132
+ the slot pool this task should run in,
1133
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1134
+ soft_fail : bool
1135
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1136
+ name : str
1137
+ Name of the sensor on Airflow
1138
+ description : str
1139
+ Description of sensor in the Airflow UI
1140
+ bucket_key : Union[str, List[str]]
1141
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1142
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1143
+ bucket_name : str
1144
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1145
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1146
+ wildcard_match : bool
1147
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1148
+ aws_conn_id : str
1149
+ a reference to the s3 connection on Airflow. (Default: None)
1150
+ verify : bool
1151
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1135
1152
  """
1136
1153
  ...
1137
1154
 
@@ -1186,46 +1203,79 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1186
1203
  """
1187
1204
  ...
1188
1205
 
1189
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1206
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1190
1207
  """
1191
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1192
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1193
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1194
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1195
- starts only after all sensors finish.
1208
+ Specifies what flows belong to the same project.
1209
+
1210
+ A project-specific namespace is created for all flows that
1211
+ use the same `@project(name)`.
1212
+
1213
+
1214
+ Parameters
1215
+ ----------
1216
+ name : str
1217
+ Project name. Make sure that the name is unique amongst all
1218
+ projects that use the same production scheduler. The name may
1219
+ contain only lowercase alphanumeric characters and underscores.
1220
+
1221
+ branch : Optional[str], default None
1222
+ The branch to use. If not specified, the branch is set to
1223
+ `user.<username>` unless `production` is set to `True`. This can
1224
+ also be set on the command line using `--branch` as a top-level option.
1225
+ It is an error to specify `branch` in the decorator and on the command line.
1226
+
1227
+ production : bool, default False
1228
+ Whether or not the branch is the production branch. This can also be set on the
1229
+ command line using `--production` as a top-level option. It is an error to specify
1230
+ `production` in the decorator and on the command line.
1231
+ The project branch name will be:
1232
+ - if `branch` is specified:
1233
+ - if `production` is True: `prod.<branch>`
1234
+ - if `production` is False: `test.<branch>`
1235
+ - if `branch` is not specified:
1236
+ - if `production` is True: `prod`
1237
+ - if `production` is False: `user.<username>`
1238
+ """
1239
+ ...
1240
+
1241
+ @typing.overload
1242
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1243
+ """
1244
+ Specifies the PyPI packages for all steps of the flow.
1245
+
1246
+ Use `@pypi_base` to set common packages required by all
1247
+ steps and use `@pypi` to specify step-specific overrides.
1248
+
1249
+ Parameters
1250
+ ----------
1251
+ packages : Dict[str, str], default: {}
1252
+ Packages to use for this flow. The key is the name of the package
1253
+ and the value is the version to use.
1254
+ python : str, optional, default: None
1255
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1256
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1257
+ """
1258
+ ...
1259
+
1260
+ @typing.overload
1261
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1262
+ ...
1263
+
1264
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1265
+ """
1266
+ Specifies the PyPI packages for all steps of the flow.
1196
1267
 
1268
+ Use `@pypi_base` to set common packages required by all
1269
+ steps and use `@pypi` to specify step-specific overrides.
1197
1270
 
1198
1271
  Parameters
1199
1272
  ----------
1200
- timeout : int
1201
- Time, in seconds before the task times out and fails. (Default: 3600)
1202
- poke_interval : int
1203
- Time in seconds that the job should wait in between each try. (Default: 60)
1204
- mode : str
1205
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1206
- exponential_backoff : bool
1207
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1208
- pool : str
1209
- the slot pool this task should run in,
1210
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1211
- soft_fail : bool
1212
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1213
- name : str
1214
- Name of the sensor on Airflow
1215
- description : str
1216
- Description of sensor in the Airflow UI
1217
- bucket_key : Union[str, List[str]]
1218
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1219
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1220
- bucket_name : str
1221
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1222
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1223
- wildcard_match : bool
1224
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1225
- aws_conn_id : str
1226
- a reference to the s3 connection on Airflow. (Default: None)
1227
- verify : bool
1228
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1273
+ packages : Dict[str, str], default: {}
1274
+ Packages to use for this flow. The key is the name of the package
1275
+ and the value is the version to use.
1276
+ python : str, optional, default: None
1277
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1278
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1229
1279
  """
1230
1280
  ...
1231
1281
 
@@ -1344,93 +1394,101 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1344
1394
  ...
1345
1395
 
1346
1396
  @typing.overload
1347
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1397
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1348
1398
  """
1349
- Specifies the event(s) that this flow depends on.
1399
+ Specifies the flow(s) that this flow depends on.
1350
1400
 
1351
1401
  ```
1352
- @trigger(event='foo')
1402
+ @trigger_on_finish(flow='FooFlow')
1353
1403
  ```
1354
1404
  or
1355
1405
  ```
1356
- @trigger(events=['foo', 'bar'])
1406
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1357
1407
  ```
1408
+ This decorator respects the @project decorator and triggers the flow
1409
+ when upstream runs within the same namespace complete successfully
1358
1410
 
1359
- Additionally, you can specify the parameter mappings
1360
- to map event payload to Metaflow parameters for the flow.
1411
+ Additionally, you can specify project aware upstream flow dependencies
1412
+ by specifying the fully qualified project_flow_name.
1361
1413
  ```
1362
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1414
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1363
1415
  ```
1364
1416
  or
1365
1417
  ```
1366
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1367
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1418
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1368
1419
  ```
1369
1420
 
1370
- 'parameters' can also be a list of strings and tuples like so:
1371
- ```
1372
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1373
- ```
1374
- This is equivalent to:
1421
+ You can also specify just the project or project branch (other values will be
1422
+ inferred from the current project or project branch):
1375
1423
  ```
1376
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1424
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1377
1425
  ```
1378
1426
 
1427
+ Note that `branch` is typically one of:
1428
+ - `prod`
1429
+ - `user.bob`
1430
+ - `test.my_experiment`
1431
+ - `prod.staging`
1432
+
1379
1433
 
1380
1434
  Parameters
1381
1435
  ----------
1382
- event : Union[str, Dict[str, Any]], optional, default None
1383
- Event dependency for this flow.
1384
- events : List[Union[str, Dict[str, Any]]], default []
1385
- Events dependency for this flow.
1436
+ flow : Union[str, Dict[str, str]], optional, default None
1437
+ Upstream flow dependency for this flow.
1438
+ flows : List[Union[str, Dict[str, str]]], default []
1439
+ Upstream flow dependencies for this flow.
1386
1440
  options : Dict[str, Any], default {}
1387
1441
  Backend-specific configuration for tuning eventing behavior.
1388
1442
  """
1389
1443
  ...
1390
1444
 
1391
1445
  @typing.overload
1392
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1446
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1393
1447
  ...
1394
1448
 
1395
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1449
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1396
1450
  """
1397
- Specifies the event(s) that this flow depends on.
1451
+ Specifies the flow(s) that this flow depends on.
1398
1452
 
1399
1453
  ```
1400
- @trigger(event='foo')
1454
+ @trigger_on_finish(flow='FooFlow')
1401
1455
  ```
1402
1456
  or
1403
1457
  ```
1404
- @trigger(events=['foo', 'bar'])
1458
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1405
1459
  ```
1460
+ This decorator respects the @project decorator and triggers the flow
1461
+ when upstream runs within the same namespace complete successfully
1406
1462
 
1407
- Additionally, you can specify the parameter mappings
1408
- to map event payload to Metaflow parameters for the flow.
1463
+ Additionally, you can specify project aware upstream flow dependencies
1464
+ by specifying the fully qualified project_flow_name.
1409
1465
  ```
1410
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1466
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1411
1467
  ```
1412
1468
  or
1413
1469
  ```
1414
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1415
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1470
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1416
1471
  ```
1417
1472
 
1418
- 'parameters' can also be a list of strings and tuples like so:
1419
- ```
1420
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1421
- ```
1422
- This is equivalent to:
1473
+ You can also specify just the project or project branch (other values will be
1474
+ inferred from the current project or project branch):
1423
1475
  ```
1424
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1476
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1425
1477
  ```
1426
1478
 
1479
+ Note that `branch` is typically one of:
1480
+ - `prod`
1481
+ - `user.bob`
1482
+ - `test.my_experiment`
1483
+ - `prod.staging`
1484
+
1427
1485
 
1428
1486
  Parameters
1429
1487
  ----------
1430
- event : Union[str, Dict[str, Any]], optional, default None
1431
- Event dependency for this flow.
1432
- events : List[Union[str, Dict[str, Any]]], default []
1433
- Events dependency for this flow.
1488
+ flow : Union[str, Dict[str, str]], optional, default None
1489
+ Upstream flow dependency for this flow.
1490
+ flows : List[Union[str, Dict[str, str]]], default []
1491
+ Upstream flow dependencies for this flow.
1434
1492
  options : Dict[str, Any], default {}
1435
1493
  Backend-specific configuration for tuning eventing behavior.
1436
1494
  """
@@ -1487,104 +1545,46 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1487
1545
  """
1488
1546
  ...
1489
1547
 
1490
- @typing.overload
1491
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1492
- """
1493
- Specifies the flow(s) that this flow depends on.
1494
-
1495
- ```
1496
- @trigger_on_finish(flow='FooFlow')
1497
- ```
1498
- or
1499
- ```
1500
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1501
- ```
1502
- This decorator respects the @project decorator and triggers the flow
1503
- when upstream runs within the same namespace complete successfully
1504
-
1505
- Additionally, you can specify project aware upstream flow dependencies
1506
- by specifying the fully qualified project_flow_name.
1507
- ```
1508
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1509
- ```
1510
- or
1511
- ```
1512
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1513
- ```
1514
-
1515
- You can also specify just the project or project branch (other values will be
1516
- inferred from the current project or project branch):
1517
- ```
1518
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1519
- ```
1520
-
1521
- Note that `branch` is typically one of:
1522
- - `prod`
1523
- - `user.bob`
1524
- - `test.my_experiment`
1525
- - `prod.staging`
1526
-
1527
-
1528
- Parameters
1529
- ----------
1530
- flow : Union[str, Dict[str, str]], optional, default None
1531
- Upstream flow dependency for this flow.
1532
- flows : List[Union[str, Dict[str, str]]], default []
1533
- Upstream flow dependencies for this flow.
1534
- options : Dict[str, Any], default {}
1535
- Backend-specific configuration for tuning eventing behavior.
1536
- """
1537
- ...
1538
-
1539
- @typing.overload
1540
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1541
- ...
1542
-
1543
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1548
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1544
1549
  """
1545
- Specifies the flow(s) that this flow depends on.
1546
-
1547
- ```
1548
- @trigger_on_finish(flow='FooFlow')
1549
- ```
1550
- or
1551
- ```
1552
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1553
- ```
1554
- This decorator respects the @project decorator and triggers the flow
1555
- when upstream runs within the same namespace complete successfully
1556
-
1557
- Additionally, you can specify project aware upstream flow dependencies
1558
- by specifying the fully qualified project_flow_name.
1559
- ```
1560
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1561
- ```
1562
- or
1563
- ```
1564
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1565
- ```
1566
-
1567
- You can also specify just the project or project branch (other values will be
1568
- inferred from the current project or project branch):
1569
- ```
1570
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1571
- ```
1572
-
1573
- Note that `branch` is typically one of:
1574
- - `prod`
1575
- - `user.bob`
1576
- - `test.my_experiment`
1577
- - `prod.staging`
1550
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1551
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1578
1552
 
1579
1553
 
1580
1554
  Parameters
1581
1555
  ----------
1582
- flow : Union[str, Dict[str, str]], optional, default None
1583
- Upstream flow dependency for this flow.
1584
- flows : List[Union[str, Dict[str, str]]], default []
1585
- Upstream flow dependencies for this flow.
1586
- options : Dict[str, Any], default {}
1587
- Backend-specific configuration for tuning eventing behavior.
1556
+ timeout : int
1557
+ Time, in seconds before the task times out and fails. (Default: 3600)
1558
+ poke_interval : int
1559
+ Time in seconds that the job should wait in between each try. (Default: 60)
1560
+ mode : str
1561
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1562
+ exponential_backoff : bool
1563
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1564
+ pool : str
1565
+ the slot pool this task should run in,
1566
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1567
+ soft_fail : bool
1568
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1569
+ name : str
1570
+ Name of the sensor on Airflow
1571
+ description : str
1572
+ Description of sensor in the Airflow UI
1573
+ external_dag_id : str
1574
+ The dag_id that contains the task you want to wait for.
1575
+ external_task_ids : List[str]
1576
+ The list of task_ids that you want to wait for.
1577
+ If None (default value) the sensor waits for the DAG. (Default: None)
1578
+ allowed_states : List[str]
1579
+ Iterable of allowed states, (Default: ['success'])
1580
+ failed_states : List[str]
1581
+ Iterable of failed or dis-allowed states. (Default: None)
1582
+ execution_delta : datetime.timedelta
1583
+ time difference with the previous execution to look at,
1584
+ the default is the same logical date as the current task or DAG. (Default: None)
1585
+ check_existence: bool
1586
+ Set to True to check if the external task exists or check if
1587
+ the DAG to wait for exists. (Default: True)
1588
1588
  """
1589
1589
  ...
1590
1590