ob-metaflow-stubs 6.0.3.180rc5__py2.py3-none-any.whl → 6.0.3.182rc0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (220) hide show
  1. metaflow-stubs/__init__.pyi +632 -632
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +1 -1
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +84 -84
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +1 -1
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +1 -1
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +12 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  89. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  92. metaflow-stubs/multicore_utils.pyi +1 -1
  93. metaflow-stubs/ob_internal.pyi +2 -1
  94. metaflow-stubs/parameters.pyi +2 -2
  95. metaflow-stubs/plugins/__init__.pyi +11 -11
  96. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  97. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  98. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  99. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  100. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  101. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  102. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  103. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  104. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  105. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  106. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  107. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  109. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  110. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  111. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  112. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  113. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  114. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  115. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  116. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  117. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  118. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  119. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  120. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  121. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  122. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  123. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  124. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  125. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  126. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  128. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  129. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  130. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  131. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  132. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  133. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  134. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  135. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  136. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  137. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  138. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  139. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  140. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  141. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  142. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  143. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  144. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  145. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  146. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  147. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  148. metaflow-stubs/plugins/catch_decorator.pyi +1 -1
  149. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  150. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  151. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  152. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  153. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  154. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  155. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  156. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  157. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  158. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  159. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  160. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  161. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  162. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  163. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  164. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  165. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  166. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  167. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  168. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  169. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  170. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  171. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  172. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  173. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  174. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  175. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  176. metaflow-stubs/plugins/perimeters.pyi +1 -1
  177. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  178. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  179. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  180. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  181. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  182. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  183. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  184. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  185. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  186. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  187. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  188. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  189. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  192. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  193. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  194. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  195. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  196. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  197. metaflow-stubs/profilers/__init__.pyi +1 -1
  198. metaflow-stubs/pylint_wrapper.pyi +1 -1
  199. metaflow-stubs/runner/__init__.pyi +1 -1
  200. metaflow-stubs/runner/deployer.pyi +29 -29
  201. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  202. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  203. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  204. metaflow-stubs/runner/nbrun.pyi +1 -1
  205. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  206. metaflow-stubs/runner/utils.pyi +2 -2
  207. metaflow-stubs/system/__init__.pyi +1 -1
  208. metaflow-stubs/system/system_logger.pyi +1 -1
  209. metaflow-stubs/system/system_monitor.pyi +1 -1
  210. metaflow-stubs/tagging_util.pyi +1 -1
  211. metaflow-stubs/tuple_util.pyi +1 -1
  212. metaflow-stubs/user_configs/__init__.pyi +1 -1
  213. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  214. metaflow-stubs/user_configs/config_options.pyi +2 -2
  215. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  216. {ob_metaflow_stubs-6.0.3.180rc5.dist-info → ob_metaflow_stubs-6.0.3.182rc0.dist-info}/METADATA +1 -1
  217. ob_metaflow_stubs-6.0.3.182rc0.dist-info/RECORD +220 -0
  218. ob_metaflow_stubs-6.0.3.180rc5.dist-info/RECORD +0 -220
  219. {ob_metaflow_stubs-6.0.3.180rc5.dist-info → ob_metaflow_stubs-6.0.3.182rc0.dist-info}/WHEEL +0 -0
  220. {ob_metaflow_stubs-6.0.3.180rc5.dist-info → ob_metaflow_stubs-6.0.3.182rc0.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.17.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-06-17T20:32:02.265213 #
4
+ # Generated on 2025-06-18T09:39:16.439890 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -35,10 +35,10 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import tuple_util as tuple_util
39
38
  from . import cards as cards
40
- from . import events as events
39
+ from . import tuple_util as tuple_util
41
40
  from . import metaflow_git as metaflow_git
41
+ from . import events as events
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
@@ -155,6 +155,178 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
155
155
  """
156
156
  ...
157
157
 
158
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
159
+ """
160
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
161
+
162
+ User code call
163
+ --------------
164
+ @ollama(
165
+ models=[...],
166
+ ...
167
+ )
168
+
169
+ Valid backend options
170
+ ---------------------
171
+ - 'local': Run as a separate process on the local task machine.
172
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
173
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
174
+
175
+ Valid model options
176
+ -------------------
177
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
178
+
179
+
180
+ Parameters
181
+ ----------
182
+ models: list[str]
183
+ List of Ollama containers running models in sidecars.
184
+ backend: str
185
+ Determines where and how to run the Ollama process.
186
+ force_pull: bool
187
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
188
+ cache_update_policy: str
189
+ Cache update policy: "auto", "force", or "never".
190
+ force_cache_update: bool
191
+ Simple override for "force" cache update policy.
192
+ debug: bool
193
+ Whether to turn on verbose debugging logs.
194
+ circuit_breaker_config: dict
195
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
196
+ timeout_config: dict
197
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
198
+ """
199
+ ...
200
+
201
+ @typing.overload
202
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
203
+ """
204
+ Internal decorator to support Fast bakery
205
+ """
206
+ ...
207
+
208
+ @typing.overload
209
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
210
+ ...
211
+
212
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
213
+ """
214
+ Internal decorator to support Fast bakery
215
+ """
216
+ ...
217
+
218
+ @typing.overload
219
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
220
+ """
221
+ Enables checkpointing for a step.
222
+
223
+
224
+
225
+ Parameters
226
+ ----------
227
+ load_policy : str, default: "fresh"
228
+ The policy for loading the checkpoint. The following policies are supported:
229
+ - "eager": Loads the the latest available checkpoint within the namespace.
230
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
231
+ will be loaded at the start of the task.
232
+ - "none": Do not load any checkpoint
233
+ - "fresh": Loads the lastest checkpoint created within the running Task.
234
+ This mode helps loading checkpoints across various retry attempts of the same task.
235
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
236
+ created within the task will be loaded when the task is retries execution on failure.
237
+
238
+ temp_dir_root : str, default: None
239
+ The root directory under which `current.checkpoint.directory` will be created.
240
+ """
241
+ ...
242
+
243
+ @typing.overload
244
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
245
+ ...
246
+
247
+ @typing.overload
248
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
249
+ ...
250
+
251
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
252
+ """
253
+ Enables checkpointing for a step.
254
+
255
+
256
+
257
+ Parameters
258
+ ----------
259
+ load_policy : str, default: "fresh"
260
+ The policy for loading the checkpoint. The following policies are supported:
261
+ - "eager": Loads the the latest available checkpoint within the namespace.
262
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
263
+ will be loaded at the start of the task.
264
+ - "none": Do not load any checkpoint
265
+ - "fresh": Loads the lastest checkpoint created within the running Task.
266
+ This mode helps loading checkpoints across various retry attempts of the same task.
267
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
268
+ created within the task will be loaded when the task is retries execution on failure.
269
+
270
+ temp_dir_root : str, default: None
271
+ The root directory under which `current.checkpoint.directory` will be created.
272
+ """
273
+ ...
274
+
275
+ @typing.overload
276
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
277
+ """
278
+ Specifies the number of times the task corresponding
279
+ to a step needs to be retried.
280
+
281
+ This decorator is useful for handling transient errors, such as networking issues.
282
+ If your task contains operations that can't be retried safely, e.g. database updates,
283
+ it is advisable to annotate it with `@retry(times=0)`.
284
+
285
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
286
+ decorator will execute a no-op task after all retries have been exhausted,
287
+ ensuring that the flow execution can continue.
288
+
289
+
290
+ Parameters
291
+ ----------
292
+ times : int, default 3
293
+ Number of times to retry this task.
294
+ minutes_between_retries : int, default 2
295
+ Number of minutes between retries.
296
+ """
297
+ ...
298
+
299
+ @typing.overload
300
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
301
+ ...
302
+
303
+ @typing.overload
304
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
305
+ ...
306
+
307
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
308
+ """
309
+ Specifies the number of times the task corresponding
310
+ to a step needs to be retried.
311
+
312
+ This decorator is useful for handling transient errors, such as networking issues.
313
+ If your task contains operations that can't be retried safely, e.g. database updates,
314
+ it is advisable to annotate it with `@retry(times=0)`.
315
+
316
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
317
+ decorator will execute a no-op task after all retries have been exhausted,
318
+ ensuring that the flow execution can continue.
319
+
320
+
321
+ Parameters
322
+ ----------
323
+ times : int, default 3
324
+ Number of times to retry this task.
325
+ minutes_between_retries : int, default 2
326
+ Number of minutes between retries.
327
+ """
328
+ ...
329
+
158
330
  @typing.overload
159
331
  def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
160
332
  """
@@ -190,7 +362,7 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
190
362
  """
191
363
  ...
192
364
 
193
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
365
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
194
366
  """
195
367
  Specifies that this step should execute on DGX cloud.
196
368
 
@@ -201,8 +373,6 @@ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[t
201
373
  Number of GPUs to use.
202
374
  gpu_type : str
203
375
  Type of Nvidia GPU to use.
204
- queue_timeout : int
205
- Time to keep the job in NVCF's queue.
206
376
  """
207
377
  ...
208
378
 
@@ -257,6 +427,59 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
257
427
  """
258
428
  ...
259
429
 
430
+ def vllm(*, model: str, backend: str, debug: bool, kwargs: typing.Any) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
431
+ """
432
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
433
+
434
+ User code call
435
+ --------------
436
+ @vllm(
437
+ model="...",
438
+ ...
439
+ )
440
+
441
+ Valid backend options
442
+ ---------------------
443
+ - 'local': Run as a separate process on the local task machine.
444
+
445
+ Valid model options
446
+ -------------------
447
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
448
+
449
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
450
+ If you need multiple models, you must create multiple @vllm decorators.
451
+
452
+
453
+ Parameters
454
+ ----------
455
+ model: str
456
+ HuggingFace model identifier to be served by vLLM.
457
+ backend: str
458
+ Determines where and how to run the vLLM process.
459
+ debug: bool
460
+ Whether to turn on verbose debugging logs.
461
+ kwargs : Any
462
+ Any other keyword arguments are passed directly to the vLLM engine.
463
+ This allows for flexible configuration of vLLM server settings.
464
+ For example, `tensor_parallel_size=2`.
465
+ """
466
+ ...
467
+
468
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
469
+ """
470
+ Specifies that this step is used to deploy an instance of the app.
471
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
472
+
473
+
474
+ Parameters
475
+ ----------
476
+ app_port : int
477
+ Number of GPUs to use.
478
+ app_name : str
479
+ Name of the app to deploy.
480
+ """
481
+ ...
482
+
260
483
  @typing.overload
261
484
  def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
262
485
  """
@@ -404,116 +627,21 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
404
627
  ...
405
628
 
406
629
  @typing.overload
407
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
630
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
408
631
  """
409
- Specifies the number of times the task corresponding
410
- to a step needs to be retried.
411
-
412
- This decorator is useful for handling transient errors, such as networking issues.
413
- If your task contains operations that can't be retried safely, e.g. database updates,
414
- it is advisable to annotate it with `@retry(times=0)`.
415
-
416
- This can be used in conjunction with the `@catch` decorator. The `@catch`
417
- decorator will execute a no-op task after all retries have been exhausted,
418
- ensuring that the flow execution can continue.
419
-
420
-
421
- Parameters
422
- ----------
423
- times : int, default 3
424
- Number of times to retry this task.
425
- minutes_between_retries : int, default 2
426
- Number of minutes between retries.
632
+ Decorator prototype for all step decorators. This function gets specialized
633
+ and imported for all decorators types by _import_plugin_decorators().
427
634
  """
428
635
  ...
429
636
 
430
637
  @typing.overload
431
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
432
- ...
433
-
434
- @typing.overload
435
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
436
- ...
437
-
438
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
439
- """
440
- Specifies the number of times the task corresponding
441
- to a step needs to be retried.
442
-
443
- This decorator is useful for handling transient errors, such as networking issues.
444
- If your task contains operations that can't be retried safely, e.g. database updates,
445
- it is advisable to annotate it with `@retry(times=0)`.
446
-
447
- This can be used in conjunction with the `@catch` decorator. The `@catch`
448
- decorator will execute a no-op task after all retries have been exhausted,
449
- ensuring that the flow execution can continue.
450
-
451
-
452
- Parameters
453
- ----------
454
- times : int, default 3
455
- Number of times to retry this task.
456
- minutes_between_retries : int, default 2
457
- Number of minutes between retries.
458
- """
459
- ...
460
-
461
- @typing.overload
462
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
463
- """
464
- Specifies a timeout for your step.
465
-
466
- This decorator is useful if this step may hang indefinitely.
467
-
468
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
469
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
470
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
471
-
472
- Note that all the values specified in parameters are added together so if you specify
473
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
474
-
475
-
476
- Parameters
477
- ----------
478
- seconds : int, default 0
479
- Number of seconds to wait prior to timing out.
480
- minutes : int, default 0
481
- Number of minutes to wait prior to timing out.
482
- hours : int, default 0
483
- Number of hours to wait prior to timing out.
484
- """
485
- ...
486
-
487
- @typing.overload
488
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
489
- ...
490
-
491
- @typing.overload
492
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
638
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
493
639
  ...
494
640
 
495
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
641
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
496
642
  """
497
- Specifies a timeout for your step.
498
-
499
- This decorator is useful if this step may hang indefinitely.
500
-
501
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
502
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
503
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
504
-
505
- Note that all the values specified in parameters are added together so if you specify
506
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
507
-
508
-
509
- Parameters
510
- ----------
511
- seconds : int, default 0
512
- Number of seconds to wait prior to timing out.
513
- minutes : int, default 0
514
- Number of minutes to wait prior to timing out.
515
- hours : int, default 0
516
- Number of hours to wait prior to timing out.
643
+ Decorator prototype for all step decorators. This function gets specialized
644
+ and imported for all decorators types by _import_plugin_decorators().
517
645
  """
518
646
  ...
519
647
 
@@ -576,56 +704,32 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
576
704
  """
577
705
  ...
578
706
 
579
- @typing.overload
580
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
707
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
581
708
  """
582
- Creates a human-readable report, a Metaflow Card, after this step completes.
583
-
584
- Note that you may add multiple `@card` decorators in a step with different parameters.
709
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
585
710
 
586
711
 
587
712
  Parameters
588
713
  ----------
589
- type : str, default 'default'
590
- Card type.
591
- id : str, optional, default None
592
- If multiple cards are present, use this id to identify this card.
593
- options : Dict[str, Any], default {}
594
- Options passed to the card. The contents depend on the card type.
595
- timeout : int, default 45
596
- Interrupt reporting if it takes more than this many seconds.
597
- """
598
- ...
599
-
600
- @typing.overload
601
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
602
- ...
603
-
604
- @typing.overload
605
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
606
- ...
607
-
608
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
609
- """
610
- Creates a human-readable report, a Metaflow Card, after this step completes.
714
+ temp_dir_root : str, optional
715
+ The root directory that will hold the temporary directory where objects will be downloaded.
611
716
 
612
- Note that you may add multiple `@card` decorators in a step with different parameters.
717
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
718
+ The list of repos (models/datasets) to load.
613
719
 
720
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
614
721
 
615
- Parameters
616
- ----------
617
- type : str, default 'default'
618
- Card type.
619
- id : str, optional, default None
620
- If multiple cards are present, use this id to identify this card.
621
- options : Dict[str, Any], default {}
622
- Options passed to the card. The contents depend on the card type.
623
- timeout : int, default 45
624
- Interrupt reporting if it takes more than this many seconds.
722
+ - If repo (model/dataset) is not found in the datastore:
723
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
724
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
725
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
726
+
727
+ - If repo is found in the datastore:
728
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
625
729
  """
626
730
  ...
627
731
 
628
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
629
733
  """
630
734
  Specifies that this step should execute on DGX cloud.
631
735
 
@@ -636,6 +740,8 @@ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Cal
636
740
  Number of GPUs to use.
637
741
  gpu_type : str
638
742
  Type of Nvidia GPU to use.
743
+ queue_timeout : int
744
+ Time to keep the job in NVCF's queue.
639
745
  """
640
746
  ...
641
747
 
@@ -672,6 +778,57 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
672
778
  """
673
779
  ...
674
780
 
781
+ @typing.overload
782
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
783
+ """
784
+ Specifies the PyPI packages for the step.
785
+
786
+ Information in this decorator will augment any
787
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
788
+ you can use `@pypi_base` to set packages required by all
789
+ steps and use `@pypi` to specify step-specific overrides.
790
+
791
+
792
+ Parameters
793
+ ----------
794
+ packages : Dict[str, str], default: {}
795
+ Packages to use for this step. The key is the name of the package
796
+ and the value is the version to use.
797
+ python : str, optional, default: None
798
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
799
+ that the version used will correspond to the version of the Python interpreter used to start the run.
800
+ """
801
+ ...
802
+
803
+ @typing.overload
804
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
805
+ ...
806
+
807
+ @typing.overload
808
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
809
+ ...
810
+
811
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
812
+ """
813
+ Specifies the PyPI packages for the step.
814
+
815
+ Information in this decorator will augment any
816
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
817
+ you can use `@pypi_base` to set packages required by all
818
+ steps and use `@pypi` to specify step-specific overrides.
819
+
820
+
821
+ Parameters
822
+ ----------
823
+ packages : Dict[str, str], default: {}
824
+ Packages to use for this step. The key is the name of the package
825
+ and the value is the version to use.
826
+ python : str, optional, default: None
827
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
828
+ that the version used will correspond to the version of the Python interpreter used to start the run.
829
+ """
830
+ ...
831
+
675
832
  @typing.overload
676
833
  def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
677
834
  """
@@ -751,180 +908,126 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
751
908
  """
752
909
  ...
753
910
 
754
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
911
+ @typing.overload
912
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
755
913
  """
756
- Specifies that this step is used to deploy an instance of the app.
757
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
914
+ Creates a human-readable report, a Metaflow Card, after this step completes.
915
+
916
+ Note that you may add multiple `@card` decorators in a step with different parameters.
758
917
 
759
918
 
760
919
  Parameters
761
920
  ----------
762
- app_port : int
763
- Number of GPUs to use.
764
- app_name : str
765
- Name of the app to deploy.
921
+ type : str, default 'default'
922
+ Card type.
923
+ id : str, optional, default None
924
+ If multiple cards are present, use this id to identify this card.
925
+ options : Dict[str, Any], default {}
926
+ Options passed to the card. The contents depend on the card type.
927
+ timeout : int, default 45
928
+ Interrupt reporting if it takes more than this many seconds.
766
929
  """
767
930
  ...
768
931
 
769
932
  @typing.overload
770
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
771
- """
772
- Decorator prototype for all step decorators. This function gets specialized
773
- and imported for all decorators types by _import_plugin_decorators().
774
- """
933
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
775
934
  ...
776
935
 
777
936
  @typing.overload
778
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
937
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
779
938
  ...
780
939
 
781
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
940
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
782
941
  """
783
- Decorator prototype for all step decorators. This function gets specialized
784
- and imported for all decorators types by _import_plugin_decorators().
942
+ Creates a human-readable report, a Metaflow Card, after this step completes.
943
+
944
+ Note that you may add multiple `@card` decorators in a step with different parameters.
945
+
946
+
947
+ Parameters
948
+ ----------
949
+ type : str, default 'default'
950
+ Card type.
951
+ id : str, optional, default None
952
+ If multiple cards are present, use this id to identify this card.
953
+ options : Dict[str, Any], default {}
954
+ Options passed to the card. The contents depend on the card type.
955
+ timeout : int, default 45
956
+ Interrupt reporting if it takes more than this many seconds.
785
957
  """
786
958
  ...
787
959
 
788
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
960
+ @typing.overload
961
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
789
962
  """
790
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
963
+ Specifies a timeout for your step.
791
964
 
792
- User code call
793
- --------------
794
- @ollama(
795
- models=[...],
796
- ...
797
- )
965
+ This decorator is useful if this step may hang indefinitely.
798
966
 
799
- Valid backend options
800
- ---------------------
801
- - 'local': Run as a separate process on the local task machine.
802
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
803
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
804
-
805
- Valid model options
806
- -------------------
807
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
808
-
809
-
810
- Parameters
811
- ----------
812
- models: list[str]
813
- List of Ollama containers running models in sidecars.
814
- backend: str
815
- Determines where and how to run the Ollama process.
816
- force_pull: bool
817
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
818
- cache_update_policy: str
819
- Cache update policy: "auto", "force", or "never".
820
- force_cache_update: bool
821
- Simple override for "force" cache update policy.
822
- debug: bool
823
- Whether to turn on verbose debugging logs.
824
- circuit_breaker_config: dict
825
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
826
- timeout_config: dict
827
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
828
- """
829
- ...
830
-
831
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
832
- """
833
- Decorator that helps cache, version and store models/datasets from huggingface hub.
834
-
835
-
836
- Parameters
837
- ----------
838
- temp_dir_root : str, optional
839
- The root directory that will hold the temporary directory where objects will be downloaded.
840
-
841
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
842
- The list of repos (models/datasets) to load.
843
-
844
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
845
-
846
- - If repo (model/dataset) is not found in the datastore:
847
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
848
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
849
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
850
-
851
- - If repo is found in the datastore:
852
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
853
- """
854
- ...
855
-
856
- @typing.overload
857
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
858
- """
859
- Enables checkpointing for a step.
967
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
968
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
969
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
860
970
 
971
+ Note that all the values specified in parameters are added together so if you specify
972
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
861
973
 
862
974
 
863
975
  Parameters
864
976
  ----------
865
- load_policy : str, default: "fresh"
866
- The policy for loading the checkpoint. The following policies are supported:
867
- - "eager": Loads the the latest available checkpoint within the namespace.
868
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
869
- will be loaded at the start of the task.
870
- - "none": Do not load any checkpoint
871
- - "fresh": Loads the lastest checkpoint created within the running Task.
872
- This mode helps loading checkpoints across various retry attempts of the same task.
873
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
874
- created within the task will be loaded when the task is retries execution on failure.
875
-
876
- temp_dir_root : str, default: None
877
- The root directory under which `current.checkpoint.directory` will be created.
977
+ seconds : int, default 0
978
+ Number of seconds to wait prior to timing out.
979
+ minutes : int, default 0
980
+ Number of minutes to wait prior to timing out.
981
+ hours : int, default 0
982
+ Number of hours to wait prior to timing out.
878
983
  """
879
984
  ...
880
985
 
881
986
  @typing.overload
882
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
987
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
883
988
  ...
884
989
 
885
990
  @typing.overload
886
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
991
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
887
992
  ...
888
993
 
889
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
994
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
890
995
  """
891
- Enables checkpointing for a step.
996
+ Specifies a timeout for your step.
997
+
998
+ This decorator is useful if this step may hang indefinitely.
999
+
1000
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1001
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1002
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
892
1003
 
1004
+ Note that all the values specified in parameters are added together so if you specify
1005
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
893
1006
 
894
1007
 
895
1008
  Parameters
896
1009
  ----------
897
- load_policy : str, default: "fresh"
898
- The policy for loading the checkpoint. The following policies are supported:
899
- - "eager": Loads the the latest available checkpoint within the namespace.
900
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
901
- will be loaded at the start of the task.
902
- - "none": Do not load any checkpoint
903
- - "fresh": Loads the lastest checkpoint created within the running Task.
904
- This mode helps loading checkpoints across various retry attempts of the same task.
905
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
906
- created within the task will be loaded when the task is retries execution on failure.
907
-
908
- temp_dir_root : str, default: None
909
- The root directory under which `current.checkpoint.directory` will be created.
1010
+ seconds : int, default 0
1011
+ Number of seconds to wait prior to timing out.
1012
+ minutes : int, default 0
1013
+ Number of minutes to wait prior to timing out.
1014
+ hours : int, default 0
1015
+ Number of hours to wait prior to timing out.
910
1016
  """
911
1017
  ...
912
1018
 
913
1019
  @typing.overload
914
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1020
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
915
1021
  """
916
- Specifies the PyPI packages for the step.
1022
+ Specifies the PyPI packages for all steps of the flow.
917
1023
 
918
- Information in this decorator will augment any
919
- attributes set in the `@pyi_base` flow-level decorator. Hence,
920
- you can use `@pypi_base` to set packages required by all
1024
+ Use `@pypi_base` to set common packages required by all
921
1025
  steps and use `@pypi` to specify step-specific overrides.
922
1026
 
923
-
924
1027
  Parameters
925
1028
  ----------
926
1029
  packages : Dict[str, str], default: {}
927
- Packages to use for this step. The key is the name of the package
1030
+ Packages to use for this flow. The key is the name of the package
928
1031
  and the value is the version to use.
929
1032
  python : str, optional, default: None
930
1033
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
@@ -933,27 +1036,20 @@ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] =
933
1036
  ...
934
1037
 
935
1038
  @typing.overload
936
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
937
- ...
938
-
939
- @typing.overload
940
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1039
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
941
1040
  ...
942
1041
 
943
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1042
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
944
1043
  """
945
- Specifies the PyPI packages for the step.
1044
+ Specifies the PyPI packages for all steps of the flow.
946
1045
 
947
- Information in this decorator will augment any
948
- attributes set in the `@pyi_base` flow-level decorator. Hence,
949
- you can use `@pypi_base` to set packages required by all
1046
+ Use `@pypi_base` to set common packages required by all
950
1047
  steps and use `@pypi` to specify step-specific overrides.
951
1048
 
952
-
953
1049
  Parameters
954
1050
  ----------
955
1051
  packages : Dict[str, str], default: {}
956
- Packages to use for this step. The key is the name of the package
1052
+ Packages to use for this flow. The key is the name of the package
957
1053
  and the value is the version to use.
958
1054
  python : str, optional, default: None
959
1055
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
@@ -961,293 +1057,69 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
961
1057
  """
962
1058
  ...
963
1059
 
964
- def vllm(*, model: str, backend: str, debug: bool, kwargs: typing.Any) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
965
- """
966
- This decorator is used to run vllm APIs as Metaflow task sidecars.
967
-
968
- User code call
969
- --------------
970
- @vllm(
971
- model="...",
972
- ...
973
- )
974
-
975
- Valid backend options
976
- ---------------------
977
- - 'local': Run as a separate process on the local task machine.
978
-
979
- Valid model options
980
- -------------------
981
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
982
-
983
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
984
- If you need multiple models, you must create multiple @vllm decorators.
985
-
986
-
987
- Parameters
988
- ----------
989
- model: str
990
- HuggingFace model identifier to be served by vLLM.
991
- backend: str
992
- Determines where and how to run the vLLM process.
993
- debug: bool
994
- Whether to turn on verbose debugging logs.
995
- kwargs : Any
996
- Any other keyword arguments are passed directly to the vLLM engine.
997
- This allows for flexible configuration of vLLM server settings.
998
- For example, `tensor_parallel_size=2`.
999
- """
1000
- ...
1001
-
1002
- @typing.overload
1003
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1004
- """
1005
- Internal decorator to support Fast bakery
1006
- """
1007
- ...
1008
-
1009
- @typing.overload
1010
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1011
- ...
1012
-
1013
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1014
- """
1015
- Internal decorator to support Fast bakery
1016
- """
1017
- ...
1018
-
1019
1060
  @typing.overload
1020
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1061
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1021
1062
  """
1022
- Specifies the PyPI packages for all steps of the flow.
1063
+ Specifies the Conda environment for all steps of the flow.
1064
+
1065
+ Use `@conda_base` to set common libraries required by all
1066
+ steps and use `@conda` to specify step-specific additions.
1023
1067
 
1024
- Use `@pypi_base` to set common packages required by all
1025
- steps and use `@pypi` to specify step-specific overrides.
1026
1068
 
1027
1069
  Parameters
1028
1070
  ----------
1029
- packages : Dict[str, str], default: {}
1071
+ packages : Dict[str, str], default {}
1030
1072
  Packages to use for this flow. The key is the name of the package
1031
1073
  and the value is the version to use.
1032
- python : str, optional, default: None
1074
+ libraries : Dict[str, str], default {}
1075
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1076
+ python : str, optional, default None
1033
1077
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1034
1078
  that the version used will correspond to the version of the Python interpreter used to start the run.
1079
+ disabled : bool, default False
1080
+ If set to True, disables Conda.
1035
1081
  """
1036
1082
  ...
1037
1083
 
1038
1084
  @typing.overload
1039
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1085
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1040
1086
  ...
1041
1087
 
1042
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1088
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1043
1089
  """
1044
- Specifies the PyPI packages for all steps of the flow.
1090
+ Specifies the Conda environment for all steps of the flow.
1091
+
1092
+ Use `@conda_base` to set common libraries required by all
1093
+ steps and use `@conda` to specify step-specific additions.
1045
1094
 
1046
- Use `@pypi_base` to set common packages required by all
1047
- steps and use `@pypi` to specify step-specific overrides.
1048
1095
 
1049
1096
  Parameters
1050
1097
  ----------
1051
- packages : Dict[str, str], default: {}
1098
+ packages : Dict[str, str], default {}
1052
1099
  Packages to use for this flow. The key is the name of the package
1053
1100
  and the value is the version to use.
1054
- python : str, optional, default: None
1101
+ libraries : Dict[str, str], default {}
1102
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1103
+ python : str, optional, default None
1055
1104
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1056
1105
  that the version used will correspond to the version of the Python interpreter used to start the run.
1106
+ disabled : bool, default False
1107
+ If set to True, disables Conda.
1057
1108
  """
1058
1109
  ...
1059
1110
 
1060
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1111
+ @typing.overload
1112
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1061
1113
  """
1062
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1063
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1114
+ Specifies the event(s) that this flow depends on.
1064
1115
 
1065
-
1066
- Parameters
1067
- ----------
1068
- timeout : int
1069
- Time, in seconds before the task times out and fails. (Default: 3600)
1070
- poke_interval : int
1071
- Time in seconds that the job should wait in between each try. (Default: 60)
1072
- mode : str
1073
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1074
- exponential_backoff : bool
1075
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1076
- pool : str
1077
- the slot pool this task should run in,
1078
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1079
- soft_fail : bool
1080
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1081
- name : str
1082
- Name of the sensor on Airflow
1083
- description : str
1084
- Description of sensor in the Airflow UI
1085
- external_dag_id : str
1086
- The dag_id that contains the task you want to wait for.
1087
- external_task_ids : List[str]
1088
- The list of task_ids that you want to wait for.
1089
- If None (default value) the sensor waits for the DAG. (Default: None)
1090
- allowed_states : List[str]
1091
- Iterable of allowed states, (Default: ['success'])
1092
- failed_states : List[str]
1093
- Iterable of failed or dis-allowed states. (Default: None)
1094
- execution_delta : datetime.timedelta
1095
- time difference with the previous execution to look at,
1096
- the default is the same logical date as the current task or DAG. (Default: None)
1097
- check_existence: bool
1098
- Set to True to check if the external task exists or check if
1099
- the DAG to wait for exists. (Default: True)
1100
- """
1101
- ...
1102
-
1103
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1104
- """
1105
- Specifies what flows belong to the same project.
1106
-
1107
- A project-specific namespace is created for all flows that
1108
- use the same `@project(name)`.
1109
-
1110
-
1111
- Parameters
1112
- ----------
1113
- name : str
1114
- Project name. Make sure that the name is unique amongst all
1115
- projects that use the same production scheduler. The name may
1116
- contain only lowercase alphanumeric characters and underscores.
1117
-
1118
- branch : Optional[str], default None
1119
- The branch to use. If not specified, the branch is set to
1120
- `user.<username>` unless `production` is set to `True`. This can
1121
- also be set on the command line using `--branch` as a top-level option.
1122
- It is an error to specify `branch` in the decorator and on the command line.
1123
-
1124
- production : bool, default False
1125
- Whether or not the branch is the production branch. This can also be set on the
1126
- command line using `--production` as a top-level option. It is an error to specify
1127
- `production` in the decorator and on the command line.
1128
- The project branch name will be:
1129
- - if `branch` is specified:
1130
- - if `production` is True: `prod.<branch>`
1131
- - if `production` is False: `test.<branch>`
1132
- - if `branch` is not specified:
1133
- - if `production` is True: `prod`
1134
- - if `production` is False: `user.<username>`
1135
- """
1136
- ...
1137
-
1138
- @typing.overload
1139
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1140
- """
1141
- Specifies the flow(s) that this flow depends on.
1142
-
1143
- ```
1144
- @trigger_on_finish(flow='FooFlow')
1145
- ```
1146
- or
1147
- ```
1148
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1149
- ```
1150
- This decorator respects the @project decorator and triggers the flow
1151
- when upstream runs within the same namespace complete successfully
1152
-
1153
- Additionally, you can specify project aware upstream flow dependencies
1154
- by specifying the fully qualified project_flow_name.
1155
- ```
1156
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1157
- ```
1158
- or
1159
- ```
1160
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1161
- ```
1162
-
1163
- You can also specify just the project or project branch (other values will be
1164
- inferred from the current project or project branch):
1165
- ```
1166
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1167
- ```
1168
-
1169
- Note that `branch` is typically one of:
1170
- - `prod`
1171
- - `user.bob`
1172
- - `test.my_experiment`
1173
- - `prod.staging`
1174
-
1175
-
1176
- Parameters
1177
- ----------
1178
- flow : Union[str, Dict[str, str]], optional, default None
1179
- Upstream flow dependency for this flow.
1180
- flows : List[Union[str, Dict[str, str]]], default []
1181
- Upstream flow dependencies for this flow.
1182
- options : Dict[str, Any], default {}
1183
- Backend-specific configuration for tuning eventing behavior.
1184
- """
1185
- ...
1186
-
1187
- @typing.overload
1188
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1189
- ...
1190
-
1191
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1192
- """
1193
- Specifies the flow(s) that this flow depends on.
1194
-
1195
- ```
1196
- @trigger_on_finish(flow='FooFlow')
1197
- ```
1198
- or
1199
- ```
1200
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1201
- ```
1202
- This decorator respects the @project decorator and triggers the flow
1203
- when upstream runs within the same namespace complete successfully
1204
-
1205
- Additionally, you can specify project aware upstream flow dependencies
1206
- by specifying the fully qualified project_flow_name.
1207
- ```
1208
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1209
- ```
1210
- or
1211
- ```
1212
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1213
- ```
1214
-
1215
- You can also specify just the project or project branch (other values will be
1216
- inferred from the current project or project branch):
1217
- ```
1218
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1219
- ```
1220
-
1221
- Note that `branch` is typically one of:
1222
- - `prod`
1223
- - `user.bob`
1224
- - `test.my_experiment`
1225
- - `prod.staging`
1226
-
1227
-
1228
- Parameters
1229
- ----------
1230
- flow : Union[str, Dict[str, str]], optional, default None
1231
- Upstream flow dependency for this flow.
1232
- flows : List[Union[str, Dict[str, str]]], default []
1233
- Upstream flow dependencies for this flow.
1234
- options : Dict[str, Any], default {}
1235
- Backend-specific configuration for tuning eventing behavior.
1236
- """
1237
- ...
1238
-
1239
- @typing.overload
1240
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1241
- """
1242
- Specifies the event(s) that this flow depends on.
1243
-
1244
- ```
1245
- @trigger(event='foo')
1246
- ```
1247
- or
1248
- ```
1249
- @trigger(events=['foo', 'bar'])
1250
- ```
1116
+ ```
1117
+ @trigger(event='foo')
1118
+ ```
1119
+ or
1120
+ ```
1121
+ @trigger(events=['foo', 'bar'])
1122
+ ```
1251
1123
 
1252
1124
  Additionally, you can specify the parameter mappings
1253
1125
  to map event payload to Metaflow parameters for the flow.
@@ -1329,57 +1201,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1329
1201
  """
1330
1202
  ...
1331
1203
 
1332
- @typing.overload
1333
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1334
- """
1335
- Specifies the times when the flow should be run when running on a
1336
- production scheduler.
1337
-
1338
-
1339
- Parameters
1340
- ----------
1341
- hourly : bool, default False
1342
- Run the workflow hourly.
1343
- daily : bool, default True
1344
- Run the workflow daily.
1345
- weekly : bool, default False
1346
- Run the workflow weekly.
1347
- cron : str, optional, default None
1348
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1349
- specified by this expression.
1350
- timezone : str, optional, default None
1351
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1352
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1353
- """
1354
- ...
1355
-
1356
- @typing.overload
1357
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1358
- ...
1359
-
1360
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1361
- """
1362
- Specifies the times when the flow should be run when running on a
1363
- production scheduler.
1364
-
1365
-
1366
- Parameters
1367
- ----------
1368
- hourly : bool, default False
1369
- Run the workflow hourly.
1370
- daily : bool, default True
1371
- Run the workflow daily.
1372
- weekly : bool, default False
1373
- Run the workflow weekly.
1374
- cron : str, optional, default None
1375
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1376
- specified by this expression.
1377
- timezone : str, optional, default None
1378
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1379
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1380
- """
1381
- ...
1382
-
1383
1204
  def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1384
1205
  """
1385
1206
  Allows setting external datastores to save data for the
@@ -1494,6 +1315,193 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1494
1315
  """
1495
1316
  ...
1496
1317
 
1318
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1319
+ """
1320
+ Specifies what flows belong to the same project.
1321
+
1322
+ A project-specific namespace is created for all flows that
1323
+ use the same `@project(name)`.
1324
+
1325
+
1326
+ Parameters
1327
+ ----------
1328
+ name : str
1329
+ Project name. Make sure that the name is unique amongst all
1330
+ projects that use the same production scheduler. The name may
1331
+ contain only lowercase alphanumeric characters and underscores.
1332
+
1333
+ branch : Optional[str], default None
1334
+ The branch to use. If not specified, the branch is set to
1335
+ `user.<username>` unless `production` is set to `True`. This can
1336
+ also be set on the command line using `--branch` as a top-level option.
1337
+ It is an error to specify `branch` in the decorator and on the command line.
1338
+
1339
+ production : bool, default False
1340
+ Whether or not the branch is the production branch. This can also be set on the
1341
+ command line using `--production` as a top-level option. It is an error to specify
1342
+ `production` in the decorator and on the command line.
1343
+ The project branch name will be:
1344
+ - if `branch` is specified:
1345
+ - if `production` is True: `prod.<branch>`
1346
+ - if `production` is False: `test.<branch>`
1347
+ - if `branch` is not specified:
1348
+ - if `production` is True: `prod`
1349
+ - if `production` is False: `user.<username>`
1350
+ """
1351
+ ...
1352
+
1353
+ @typing.overload
1354
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1355
+ """
1356
+ Specifies the times when the flow should be run when running on a
1357
+ production scheduler.
1358
+
1359
+
1360
+ Parameters
1361
+ ----------
1362
+ hourly : bool, default False
1363
+ Run the workflow hourly.
1364
+ daily : bool, default True
1365
+ Run the workflow daily.
1366
+ weekly : bool, default False
1367
+ Run the workflow weekly.
1368
+ cron : str, optional, default None
1369
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1370
+ specified by this expression.
1371
+ timezone : str, optional, default None
1372
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1373
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1374
+ """
1375
+ ...
1376
+
1377
+ @typing.overload
1378
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1379
+ ...
1380
+
1381
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1382
+ """
1383
+ Specifies the times when the flow should be run when running on a
1384
+ production scheduler.
1385
+
1386
+
1387
+ Parameters
1388
+ ----------
1389
+ hourly : bool, default False
1390
+ Run the workflow hourly.
1391
+ daily : bool, default True
1392
+ Run the workflow daily.
1393
+ weekly : bool, default False
1394
+ Run the workflow weekly.
1395
+ cron : str, optional, default None
1396
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1397
+ specified by this expression.
1398
+ timezone : str, optional, default None
1399
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1400
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1401
+ """
1402
+ ...
1403
+
1404
+ @typing.overload
1405
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1406
+ """
1407
+ Specifies the flow(s) that this flow depends on.
1408
+
1409
+ ```
1410
+ @trigger_on_finish(flow='FooFlow')
1411
+ ```
1412
+ or
1413
+ ```
1414
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1415
+ ```
1416
+ This decorator respects the @project decorator and triggers the flow
1417
+ when upstream runs within the same namespace complete successfully
1418
+
1419
+ Additionally, you can specify project aware upstream flow dependencies
1420
+ by specifying the fully qualified project_flow_name.
1421
+ ```
1422
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1423
+ ```
1424
+ or
1425
+ ```
1426
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1427
+ ```
1428
+
1429
+ You can also specify just the project or project branch (other values will be
1430
+ inferred from the current project or project branch):
1431
+ ```
1432
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1433
+ ```
1434
+
1435
+ Note that `branch` is typically one of:
1436
+ - `prod`
1437
+ - `user.bob`
1438
+ - `test.my_experiment`
1439
+ - `prod.staging`
1440
+
1441
+
1442
+ Parameters
1443
+ ----------
1444
+ flow : Union[str, Dict[str, str]], optional, default None
1445
+ Upstream flow dependency for this flow.
1446
+ flows : List[Union[str, Dict[str, str]]], default []
1447
+ Upstream flow dependencies for this flow.
1448
+ options : Dict[str, Any], default {}
1449
+ Backend-specific configuration for tuning eventing behavior.
1450
+ """
1451
+ ...
1452
+
1453
+ @typing.overload
1454
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1455
+ ...
1456
+
1457
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1458
+ """
1459
+ Specifies the flow(s) that this flow depends on.
1460
+
1461
+ ```
1462
+ @trigger_on_finish(flow='FooFlow')
1463
+ ```
1464
+ or
1465
+ ```
1466
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1467
+ ```
1468
+ This decorator respects the @project decorator and triggers the flow
1469
+ when upstream runs within the same namespace complete successfully
1470
+
1471
+ Additionally, you can specify project aware upstream flow dependencies
1472
+ by specifying the fully qualified project_flow_name.
1473
+ ```
1474
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1475
+ ```
1476
+ or
1477
+ ```
1478
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1479
+ ```
1480
+
1481
+ You can also specify just the project or project branch (other values will be
1482
+ inferred from the current project or project branch):
1483
+ ```
1484
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1485
+ ```
1486
+
1487
+ Note that `branch` is typically one of:
1488
+ - `prod`
1489
+ - `user.bob`
1490
+ - `test.my_experiment`
1491
+ - `prod.staging`
1492
+
1493
+
1494
+ Parameters
1495
+ ----------
1496
+ flow : Union[str, Dict[str, str]], optional, default None
1497
+ Upstream flow dependency for this flow.
1498
+ flows : List[Union[str, Dict[str, str]]], default []
1499
+ Upstream flow dependencies for this flow.
1500
+ options : Dict[str, Any], default {}
1501
+ Backend-specific configuration for tuning eventing behavior.
1502
+ """
1503
+ ...
1504
+
1497
1505
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1498
1506
  """
1499
1507
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1537,54 +1545,46 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1537
1545
  """
1538
1546
  ...
1539
1547
 
1540
- @typing.overload
1541
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1542
- """
1543
- Specifies the Conda environment for all steps of the flow.
1544
-
1545
- Use `@conda_base` to set common libraries required by all
1546
- steps and use `@conda` to specify step-specific additions.
1547
-
1548
-
1549
- Parameters
1550
- ----------
1551
- packages : Dict[str, str], default {}
1552
- Packages to use for this flow. The key is the name of the package
1553
- and the value is the version to use.
1554
- libraries : Dict[str, str], default {}
1555
- Supported for backward compatibility. When used with packages, packages will take precedence.
1556
- python : str, optional, default None
1557
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1558
- that the version used will correspond to the version of the Python interpreter used to start the run.
1559
- disabled : bool, default False
1560
- If set to True, disables Conda.
1561
- """
1562
- ...
1563
-
1564
- @typing.overload
1565
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1566
- ...
1567
-
1568
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1548
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1569
1549
  """
1570
- Specifies the Conda environment for all steps of the flow.
1571
-
1572
- Use `@conda_base` to set common libraries required by all
1573
- steps and use `@conda` to specify step-specific additions.
1550
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1551
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1574
1552
 
1575
1553
 
1576
1554
  Parameters
1577
1555
  ----------
1578
- packages : Dict[str, str], default {}
1579
- Packages to use for this flow. The key is the name of the package
1580
- and the value is the version to use.
1581
- libraries : Dict[str, str], default {}
1582
- Supported for backward compatibility. When used with packages, packages will take precedence.
1583
- python : str, optional, default None
1584
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1585
- that the version used will correspond to the version of the Python interpreter used to start the run.
1586
- disabled : bool, default False
1587
- If set to True, disables Conda.
1556
+ timeout : int
1557
+ Time, in seconds before the task times out and fails. (Default: 3600)
1558
+ poke_interval : int
1559
+ Time in seconds that the job should wait in between each try. (Default: 60)
1560
+ mode : str
1561
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1562
+ exponential_backoff : bool
1563
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1564
+ pool : str
1565
+ the slot pool this task should run in,
1566
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1567
+ soft_fail : bool
1568
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1569
+ name : str
1570
+ Name of the sensor on Airflow
1571
+ description : str
1572
+ Description of sensor in the Airflow UI
1573
+ external_dag_id : str
1574
+ The dag_id that contains the task you want to wait for.
1575
+ external_task_ids : List[str]
1576
+ The list of task_ids that you want to wait for.
1577
+ If None (default value) the sensor waits for the DAG. (Default: None)
1578
+ allowed_states : List[str]
1579
+ Iterable of allowed states, (Default: ['success'])
1580
+ failed_states : List[str]
1581
+ Iterable of failed or dis-allowed states. (Default: None)
1582
+ execution_delta : datetime.timedelta
1583
+ time difference with the previous execution to look at,
1584
+ the default is the same logical date as the current task or DAG. (Default: None)
1585
+ check_existence: bool
1586
+ Set to True to check if the external task exists or check if
1587
+ the DAG to wait for exists. (Default: True)
1588
1588
  """
1589
1589
  ...
1590
1590