ob-metaflow-stubs 6.0.3.182rc0__py2.py3-none-any.whl → 6.0.3.182rc1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (220) hide show
  1. metaflow-stubs/__init__.pyi +723 -723
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +1 -1
  9. metaflow-stubs/events.pyi +1 -1
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +97 -97
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +5 -5
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +1 -1
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  89. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  92. metaflow-stubs/multicore_utils.pyi +1 -1
  93. metaflow-stubs/ob_internal.pyi +1 -1
  94. metaflow-stubs/parameters.pyi +3 -3
  95. metaflow-stubs/plugins/__init__.pyi +9 -9
  96. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  97. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  98. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  99. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  100. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  101. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  102. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  103. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  104. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  105. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  106. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  107. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  109. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  110. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  111. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  112. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  113. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  114. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  115. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  116. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  117. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  118. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  119. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  120. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  121. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  122. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  123. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  124. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  125. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  126. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  128. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  129. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  130. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  131. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  132. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  133. metaflow-stubs/plugins/cards/__init__.pyi +5 -5
  134. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  135. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  136. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  137. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  138. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  139. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  140. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  141. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  142. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  143. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  144. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  145. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  146. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  147. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  148. metaflow-stubs/plugins/catch_decorator.pyi +1 -1
  149. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  150. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  151. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  152. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  153. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  154. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  155. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  156. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  157. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  158. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  159. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  160. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  161. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  162. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  163. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  164. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  165. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  166. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  167. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  168. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  169. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  170. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  171. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  172. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  173. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  174. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  175. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  176. metaflow-stubs/plugins/perimeters.pyi +1 -1
  177. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  178. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  179. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  180. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  181. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  182. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  183. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  184. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  185. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  186. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  187. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  188. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +1 -1
  189. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  192. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  193. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  194. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  195. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  196. metaflow-stubs/plugins/uv/uv_environment.pyi +1 -1
  197. metaflow-stubs/profilers/__init__.pyi +1 -1
  198. metaflow-stubs/pylint_wrapper.pyi +1 -1
  199. metaflow-stubs/runner/__init__.pyi +1 -1
  200. metaflow-stubs/runner/deployer.pyi +5 -5
  201. metaflow-stubs/runner/deployer_impl.pyi +1 -1
  202. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  203. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  204. metaflow-stubs/runner/nbrun.pyi +1 -1
  205. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  206. metaflow-stubs/runner/utils.pyi +2 -2
  207. metaflow-stubs/system/__init__.pyi +1 -1
  208. metaflow-stubs/system/system_logger.pyi +2 -2
  209. metaflow-stubs/system/system_monitor.pyi +1 -1
  210. metaflow-stubs/tagging_util.pyi +1 -1
  211. metaflow-stubs/tuple_util.pyi +1 -1
  212. metaflow-stubs/user_configs/__init__.pyi +1 -1
  213. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  214. metaflow-stubs/user_configs/config_options.pyi +2 -2
  215. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  216. {ob_metaflow_stubs-6.0.3.182rc0.dist-info → ob_metaflow_stubs-6.0.3.182rc1.dist-info}/METADATA +1 -1
  217. ob_metaflow_stubs-6.0.3.182rc1.dist-info/RECORD +220 -0
  218. ob_metaflow_stubs-6.0.3.182rc0.dist-info/RECORD +0 -220
  219. {ob_metaflow_stubs-6.0.3.182rc0.dist-info → ob_metaflow_stubs-6.0.3.182rc1.dist-info}/WHEEL +0 -0
  220. {ob_metaflow_stubs-6.0.3.182rc0.dist-info → ob_metaflow_stubs-6.0.3.182rc1.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.17.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-06-18T09:39:16.439890 #
4
+ # Generated on 2025-06-18T10:06:22.722981 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -35,17 +35,17 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import cards as cards
39
- from . import tuple_util as tuple_util
40
38
  from . import metaflow_git as metaflow_git
39
+ from . import tuple_util as tuple_util
40
+ from . import cards as cards
41
41
  from . import events as events
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
47
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
48
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
49
49
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
@@ -155,328 +155,243 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
155
155
  """
156
156
  ...
157
157
 
158
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
158
+ @typing.overload
159
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
159
160
  """
160
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
161
-
162
- User code call
163
- --------------
164
- @ollama(
165
- models=[...],
166
- ...
167
- )
168
-
169
- Valid backend options
170
- ---------------------
171
- - 'local': Run as a separate process on the local task machine.
172
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
173
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
161
+ Specifies that the step will success under all circumstances.
174
162
 
175
- Valid model options
176
- -------------------
177
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
163
+ The decorator will create an optional artifact, specified by `var`, which
164
+ contains the exception raised. You can use it to detect the presence
165
+ of errors, indicating that all happy-path artifacts produced by the step
166
+ are missing.
178
167
 
179
168
 
180
169
  Parameters
181
170
  ----------
182
- models: list[str]
183
- List of Ollama containers running models in sidecars.
184
- backend: str
185
- Determines where and how to run the Ollama process.
186
- force_pull: bool
187
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
188
- cache_update_policy: str
189
- Cache update policy: "auto", "force", or "never".
190
- force_cache_update: bool
191
- Simple override for "force" cache update policy.
192
- debug: bool
193
- Whether to turn on verbose debugging logs.
194
- circuit_breaker_config: dict
195
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
196
- timeout_config: dict
197
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
171
+ var : str, optional, default None
172
+ Name of the artifact in which to store the caught exception.
173
+ If not specified, the exception is not stored.
174
+ print_exception : bool, default True
175
+ Determines whether or not the exception is printed to
176
+ stdout when caught.
198
177
  """
199
178
  ...
200
179
 
201
180
  @typing.overload
202
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
203
- """
204
- Internal decorator to support Fast bakery
205
- """
181
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
206
182
  ...
207
183
 
208
184
  @typing.overload
209
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
210
- ...
211
-
212
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
213
- """
214
- Internal decorator to support Fast bakery
215
- """
185
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
216
186
  ...
217
187
 
218
- @typing.overload
219
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
188
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
220
189
  """
221
- Enables checkpointing for a step.
190
+ Specifies that the step will success under all circumstances.
222
191
 
192
+ The decorator will create an optional artifact, specified by `var`, which
193
+ contains the exception raised. You can use it to detect the presence
194
+ of errors, indicating that all happy-path artifacts produced by the step
195
+ are missing.
223
196
 
224
197
 
225
198
  Parameters
226
199
  ----------
227
- load_policy : str, default: "fresh"
228
- The policy for loading the checkpoint. The following policies are supported:
229
- - "eager": Loads the the latest available checkpoint within the namespace.
230
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
231
- will be loaded at the start of the task.
232
- - "none": Do not load any checkpoint
233
- - "fresh": Loads the lastest checkpoint created within the running Task.
234
- This mode helps loading checkpoints across various retry attempts of the same task.
235
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
236
- created within the task will be loaded when the task is retries execution on failure.
237
-
238
- temp_dir_root : str, default: None
239
- The root directory under which `current.checkpoint.directory` will be created.
200
+ var : str, optional, default None
201
+ Name of the artifact in which to store the caught exception.
202
+ If not specified, the exception is not stored.
203
+ print_exception : bool, default True
204
+ Determines whether or not the exception is printed to
205
+ stdout when caught.
240
206
  """
241
207
  ...
242
208
 
243
- @typing.overload
244
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
245
- ...
246
-
247
- @typing.overload
248
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
249
- ...
250
-
251
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
209
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
252
210
  """
253
- Enables checkpointing for a step.
254
-
211
+ Specifies that this step should execute on Kubernetes.
255
212
 
256
213
 
257
214
  Parameters
258
215
  ----------
259
- load_policy : str, default: "fresh"
260
- The policy for loading the checkpoint. The following policies are supported:
261
- - "eager": Loads the the latest available checkpoint within the namespace.
262
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
263
- will be loaded at the start of the task.
264
- - "none": Do not load any checkpoint
265
- - "fresh": Loads the lastest checkpoint created within the running Task.
266
- This mode helps loading checkpoints across various retry attempts of the same task.
267
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
268
- created within the task will be loaded when the task is retries execution on failure.
216
+ cpu : int, default 1
217
+ Number of CPUs required for this step. If `@resources` is
218
+ also present, the maximum value from all decorators is used.
219
+ memory : int, default 4096
220
+ Memory size (in MB) required for this step. If
221
+ `@resources` is also present, the maximum value from all decorators is
222
+ used.
223
+ disk : int, default 10240
224
+ Disk size (in MB) required for this step. If
225
+ `@resources` is also present, the maximum value from all decorators is
226
+ used.
227
+ image : str, optional, default None
228
+ Docker image to use when launching on Kubernetes. If not specified, and
229
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
230
+ not, a default Docker image mapping to the current version of Python is used.
231
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
232
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
233
+ image_pull_secrets: List[str], default []
234
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
235
+ Kubernetes image pull secrets to use when pulling container images
236
+ in Kubernetes.
237
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
238
+ Kubernetes service account to use when launching pod in Kubernetes.
239
+ secrets : List[str], optional, default None
240
+ Kubernetes secrets to use when launching pod in Kubernetes. These
241
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
242
+ in Metaflow configuration.
243
+ node_selector: Union[Dict[str,str], str], optional, default None
244
+ Kubernetes node selector(s) to apply to the pod running the task.
245
+ Can be passed in as a comma separated string of values e.g.
246
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
247
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
248
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
249
+ Kubernetes namespace to use when launching pod in Kubernetes.
250
+ gpu : int, optional, default None
251
+ Number of GPUs required for this step. A value of zero implies that
252
+ the scheduled node should not have GPUs.
253
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
254
+ The vendor of the GPUs to be used for this step.
255
+ tolerations : List[str], default []
256
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
257
+ Kubernetes tolerations to use when launching pod in Kubernetes.
258
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
259
+ Kubernetes labels to use when launching pod in Kubernetes.
260
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
261
+ Kubernetes annotations to use when launching pod in Kubernetes.
262
+ use_tmpfs : bool, default False
263
+ This enables an explicit tmpfs mount for this step.
264
+ tmpfs_tempdir : bool, default True
265
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
266
+ tmpfs_size : int, optional, default: None
267
+ The value for the size (in MiB) of the tmpfs mount for this step.
268
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
269
+ memory allocated for this step.
270
+ tmpfs_path : str, optional, default /metaflow_temp
271
+ Path to tmpfs mount for this step.
272
+ persistent_volume_claims : Dict[str, str], optional, default None
273
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
274
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
275
+ shared_memory: int, optional
276
+ Shared memory size (in MiB) required for this step
277
+ port: int, optional
278
+ Port number to specify in the Kubernetes job object
279
+ compute_pool : str, optional, default None
280
+ Compute pool to be used for for this step.
281
+ If not specified, any accessible compute pool within the perimeter is used.
282
+ hostname_resolution_timeout: int, default 10 * 60
283
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
284
+ Only applicable when @parallel is used.
285
+ qos: str, default: Burstable
286
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
269
287
 
270
- temp_dir_root : str, default: None
271
- The root directory under which `current.checkpoint.directory` will be created.
288
+ security_context: Dict[str, Any], optional, default None
289
+ Container security context. Applies to the task container. Allows the following keys:
290
+ - privileged: bool, optional, default None
291
+ - allow_privilege_escalation: bool, optional, default None
292
+ - run_as_user: int, optional, default None
293
+ - run_as_group: int, optional, default None
294
+ - run_as_non_root: bool, optional, default None
272
295
  """
273
296
  ...
274
297
 
275
298
  @typing.overload
276
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
299
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
277
300
  """
278
- Specifies the number of times the task corresponding
279
- to a step needs to be retried.
280
-
281
- This decorator is useful for handling transient errors, such as networking issues.
282
- If your task contains operations that can't be retried safely, e.g. database updates,
283
- it is advisable to annotate it with `@retry(times=0)`.
301
+ Creates a human-readable report, a Metaflow Card, after this step completes.
284
302
 
285
- This can be used in conjunction with the `@catch` decorator. The `@catch`
286
- decorator will execute a no-op task after all retries have been exhausted,
287
- ensuring that the flow execution can continue.
303
+ Note that you may add multiple `@card` decorators in a step with different parameters.
288
304
 
289
305
 
290
306
  Parameters
291
307
  ----------
292
- times : int, default 3
293
- Number of times to retry this task.
294
- minutes_between_retries : int, default 2
295
- Number of minutes between retries.
308
+ type : str, default 'default'
309
+ Card type.
310
+ id : str, optional, default None
311
+ If multiple cards are present, use this id to identify this card.
312
+ options : Dict[str, Any], default {}
313
+ Options passed to the card. The contents depend on the card type.
314
+ timeout : int, default 45
315
+ Interrupt reporting if it takes more than this many seconds.
296
316
  """
297
317
  ...
298
318
 
299
319
  @typing.overload
300
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
320
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
301
321
  ...
302
322
 
303
323
  @typing.overload
304
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
324
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
305
325
  ...
306
326
 
307
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
327
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
308
328
  """
309
- Specifies the number of times the task corresponding
310
- to a step needs to be retried.
311
-
312
- This decorator is useful for handling transient errors, such as networking issues.
313
- If your task contains operations that can't be retried safely, e.g. database updates,
314
- it is advisable to annotate it with `@retry(times=0)`.
329
+ Creates a human-readable report, a Metaflow Card, after this step completes.
315
330
 
316
- This can be used in conjunction with the `@catch` decorator. The `@catch`
317
- decorator will execute a no-op task after all retries have been exhausted,
318
- ensuring that the flow execution can continue.
331
+ Note that you may add multiple `@card` decorators in a step with different parameters.
319
332
 
320
333
 
321
334
  Parameters
322
335
  ----------
323
- times : int, default 3
324
- Number of times to retry this task.
325
- minutes_between_retries : int, default 2
326
- Number of minutes between retries.
336
+ type : str, default 'default'
337
+ Card type.
338
+ id : str, optional, default None
339
+ If multiple cards are present, use this id to identify this card.
340
+ options : Dict[str, Any], default {}
341
+ Options passed to the card. The contents depend on the card type.
342
+ timeout : int, default 45
343
+ Interrupt reporting if it takes more than this many seconds.
327
344
  """
328
345
  ...
329
346
 
330
347
  @typing.overload
331
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
348
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
332
349
  """
333
- Specifies secrets to be retrieved and injected as environment variables prior to
334
- the execution of a step.
350
+ Specifies the PyPI packages for the step.
335
351
 
336
-
337
- Parameters
338
- ----------
339
- sources : List[Union[str, Dict[str, Any]]], default: []
340
- List of secret specs, defining how the secrets are to be retrieved
341
- """
342
- ...
343
-
344
- @typing.overload
345
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
346
- ...
347
-
348
- @typing.overload
349
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
350
- ...
351
-
352
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
353
- """
354
- Specifies secrets to be retrieved and injected as environment variables prior to
355
- the execution of a step.
356
-
357
-
358
- Parameters
359
- ----------
360
- sources : List[Union[str, Dict[str, Any]]], default: []
361
- List of secret specs, defining how the secrets are to be retrieved
362
- """
363
- ...
364
-
365
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
366
- """
367
- Specifies that this step should execute on DGX cloud.
368
-
369
-
370
- Parameters
371
- ----------
372
- gpu : int
373
- Number of GPUs to use.
374
- gpu_type : str
375
- Type of Nvidia GPU to use.
376
- """
377
- ...
378
-
379
- @typing.overload
380
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
381
- """
382
- Specifies that the step will success under all circumstances.
383
-
384
- The decorator will create an optional artifact, specified by `var`, which
385
- contains the exception raised. You can use it to detect the presence
386
- of errors, indicating that all happy-path artifacts produced by the step
387
- are missing.
352
+ Information in this decorator will augment any
353
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
354
+ you can use `@pypi_base` to set packages required by all
355
+ steps and use `@pypi` to specify step-specific overrides.
388
356
 
389
357
 
390
358
  Parameters
391
359
  ----------
392
- var : str, optional, default None
393
- Name of the artifact in which to store the caught exception.
394
- If not specified, the exception is not stored.
395
- print_exception : bool, default True
396
- Determines whether or not the exception is printed to
397
- stdout when caught.
360
+ packages : Dict[str, str], default: {}
361
+ Packages to use for this step. The key is the name of the package
362
+ and the value is the version to use.
363
+ python : str, optional, default: None
364
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
365
+ that the version used will correspond to the version of the Python interpreter used to start the run.
398
366
  """
399
367
  ...
400
368
 
401
369
  @typing.overload
402
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
370
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
403
371
  ...
404
372
 
405
373
  @typing.overload
406
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
407
- ...
408
-
409
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
410
- """
411
- Specifies that the step will success under all circumstances.
412
-
413
- The decorator will create an optional artifact, specified by `var`, which
414
- contains the exception raised. You can use it to detect the presence
415
- of errors, indicating that all happy-path artifacts produced by the step
416
- are missing.
417
-
418
-
419
- Parameters
420
- ----------
421
- var : str, optional, default None
422
- Name of the artifact in which to store the caught exception.
423
- If not specified, the exception is not stored.
424
- print_exception : bool, default True
425
- Determines whether or not the exception is printed to
426
- stdout when caught.
427
- """
374
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
428
375
  ...
429
376
 
430
- def vllm(*, model: str, backend: str, debug: bool, kwargs: typing.Any) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
377
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
431
378
  """
432
- This decorator is used to run vllm APIs as Metaflow task sidecars.
433
-
434
- User code call
435
- --------------
436
- @vllm(
437
- model="...",
438
- ...
439
- )
440
-
441
- Valid backend options
442
- ---------------------
443
- - 'local': Run as a separate process on the local task machine.
444
-
445
- Valid model options
446
- -------------------
447
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
448
-
449
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
450
- If you need multiple models, you must create multiple @vllm decorators.
451
-
379
+ Specifies the PyPI packages for the step.
452
380
 
453
- Parameters
454
- ----------
455
- model: str
456
- HuggingFace model identifier to be served by vLLM.
457
- backend: str
458
- Determines where and how to run the vLLM process.
459
- debug: bool
460
- Whether to turn on verbose debugging logs.
461
- kwargs : Any
462
- Any other keyword arguments are passed directly to the vLLM engine.
463
- This allows for flexible configuration of vLLM server settings.
464
- For example, `tensor_parallel_size=2`.
465
- """
466
- ...
467
-
468
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
469
- """
470
- Specifies that this step is used to deploy an instance of the app.
471
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
381
+ Information in this decorator will augment any
382
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
383
+ you can use `@pypi_base` to set packages required by all
384
+ steps and use `@pypi` to specify step-specific overrides.
472
385
 
473
386
 
474
387
  Parameters
475
388
  ----------
476
- app_port : int
477
- Number of GPUs to use.
478
- app_name : str
479
- Name of the app to deploy.
389
+ packages : Dict[str, str], default: {}
390
+ Packages to use for this step. The key is the name of the package
391
+ and the value is the version to use.
392
+ python : str, optional, default: None
393
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
394
+ that the version used will correspond to the version of the Python interpreter used to start the run.
480
395
  """
481
396
  ...
482
397
 
@@ -537,195 +452,95 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
537
452
  """
538
453
  ...
539
454
 
540
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
455
+ @typing.overload
456
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
541
457
  """
542
- Specifies that this step should execute on Kubernetes.
458
+ Enables checkpointing for a step.
459
+
543
460
 
544
461
 
545
462
  Parameters
546
463
  ----------
547
- cpu : int, default 1
548
- Number of CPUs required for this step. If `@resources` is
549
- also present, the maximum value from all decorators is used.
550
- memory : int, default 4096
551
- Memory size (in MB) required for this step. If
552
- `@resources` is also present, the maximum value from all decorators is
553
- used.
554
- disk : int, default 10240
555
- Disk size (in MB) required for this step. If
556
- `@resources` is also present, the maximum value from all decorators is
557
- used.
558
- image : str, optional, default None
559
- Docker image to use when launching on Kubernetes. If not specified, and
560
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
561
- not, a default Docker image mapping to the current version of Python is used.
562
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
563
- If given, the imagePullPolicy to be applied to the Docker image of the step.
564
- image_pull_secrets: List[str], default []
565
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
566
- Kubernetes image pull secrets to use when pulling container images
567
- in Kubernetes.
568
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
569
- Kubernetes service account to use when launching pod in Kubernetes.
570
- secrets : List[str], optional, default None
571
- Kubernetes secrets to use when launching pod in Kubernetes. These
572
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
573
- in Metaflow configuration.
574
- node_selector: Union[Dict[str,str], str], optional, default None
575
- Kubernetes node selector(s) to apply to the pod running the task.
576
- Can be passed in as a comma separated string of values e.g.
577
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
578
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
579
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
580
- Kubernetes namespace to use when launching pod in Kubernetes.
581
- gpu : int, optional, default None
582
- Number of GPUs required for this step. A value of zero implies that
583
- the scheduled node should not have GPUs.
584
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
585
- The vendor of the GPUs to be used for this step.
586
- tolerations : List[str], default []
587
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
588
- Kubernetes tolerations to use when launching pod in Kubernetes.
589
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
590
- Kubernetes labels to use when launching pod in Kubernetes.
591
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
592
- Kubernetes annotations to use when launching pod in Kubernetes.
593
- use_tmpfs : bool, default False
594
- This enables an explicit tmpfs mount for this step.
595
- tmpfs_tempdir : bool, default True
596
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
597
- tmpfs_size : int, optional, default: None
598
- The value for the size (in MiB) of the tmpfs mount for this step.
599
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
600
- memory allocated for this step.
601
- tmpfs_path : str, optional, default /metaflow_temp
602
- Path to tmpfs mount for this step.
603
- persistent_volume_claims : Dict[str, str], optional, default None
604
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
605
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
606
- shared_memory: int, optional
607
- Shared memory size (in MiB) required for this step
608
- port: int, optional
609
- Port number to specify in the Kubernetes job object
610
- compute_pool : str, optional, default None
611
- Compute pool to be used for for this step.
612
- If not specified, any accessible compute pool within the perimeter is used.
613
- hostname_resolution_timeout: int, default 10 * 60
614
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
615
- Only applicable when @parallel is used.
616
- qos: str, default: Burstable
617
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
464
+ load_policy : str, default: "fresh"
465
+ The policy for loading the checkpoint. The following policies are supported:
466
+ - "eager": Loads the the latest available checkpoint within the namespace.
467
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
468
+ will be loaded at the start of the task.
469
+ - "none": Do not load any checkpoint
470
+ - "fresh": Loads the lastest checkpoint created within the running Task.
471
+ This mode helps loading checkpoints across various retry attempts of the same task.
472
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
473
+ created within the task will be loaded when the task is retries execution on failure.
618
474
 
619
- security_context: Dict[str, Any], optional, default None
620
- Container security context. Applies to the task container. Allows the following keys:
621
- - privileged: bool, optional, default None
622
- - allow_privilege_escalation: bool, optional, default None
623
- - run_as_user: int, optional, default None
624
- - run_as_group: int, optional, default None
625
- - run_as_non_root: bool, optional, default None
475
+ temp_dir_root : str, default: None
476
+ The root directory under which `current.checkpoint.directory` will be created.
626
477
  """
627
478
  ...
628
479
 
629
480
  @typing.overload
630
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
631
- """
632
- Decorator prototype for all step decorators. This function gets specialized
633
- and imported for all decorators types by _import_plugin_decorators().
634
- """
481
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
635
482
  ...
636
483
 
637
484
  @typing.overload
638
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
485
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
639
486
  ...
640
487
 
641
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
488
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
642
489
  """
643
- Decorator prototype for all step decorators. This function gets specialized
644
- and imported for all decorators types by _import_plugin_decorators().
490
+ Enables checkpointing for a step.
491
+
492
+
493
+
494
+ Parameters
495
+ ----------
496
+ load_policy : str, default: "fresh"
497
+ The policy for loading the checkpoint. The following policies are supported:
498
+ - "eager": Loads the the latest available checkpoint within the namespace.
499
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
500
+ will be loaded at the start of the task.
501
+ - "none": Do not load any checkpoint
502
+ - "fresh": Loads the lastest checkpoint created within the running Task.
503
+ This mode helps loading checkpoints across various retry attempts of the same task.
504
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
505
+ created within the task will be loaded when the task is retries execution on failure.
506
+
507
+ temp_dir_root : str, default: None
508
+ The root directory under which `current.checkpoint.directory` will be created.
645
509
  """
646
510
  ...
647
511
 
648
512
  @typing.overload
649
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
513
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
650
514
  """
651
- Specifies the Conda environment for the step.
652
-
653
- Information in this decorator will augment any
654
- attributes set in the `@conda_base` flow-level decorator. Hence,
655
- you can use `@conda_base` to set packages required by all
656
- steps and use `@conda` to specify step-specific overrides.
515
+ Specifies secrets to be retrieved and injected as environment variables prior to
516
+ the execution of a step.
657
517
 
658
518
 
659
519
  Parameters
660
520
  ----------
661
- packages : Dict[str, str], default {}
662
- Packages to use for this step. The key is the name of the package
663
- and the value is the version to use.
664
- libraries : Dict[str, str], default {}
665
- Supported for backward compatibility. When used with packages, packages will take precedence.
666
- python : str, optional, default None
667
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
668
- that the version used will correspond to the version of the Python interpreter used to start the run.
669
- disabled : bool, default False
670
- If set to True, disables @conda.
521
+ sources : List[Union[str, Dict[str, Any]]], default: []
522
+ List of secret specs, defining how the secrets are to be retrieved
671
523
  """
672
524
  ...
673
525
 
674
526
  @typing.overload
675
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
527
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
676
528
  ...
677
529
 
678
530
  @typing.overload
679
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
680
- ...
681
-
682
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
683
- """
684
- Specifies the Conda environment for the step.
685
-
686
- Information in this decorator will augment any
687
- attributes set in the `@conda_base` flow-level decorator. Hence,
688
- you can use `@conda_base` to set packages required by all
689
- steps and use `@conda` to specify step-specific overrides.
690
-
691
-
692
- Parameters
693
- ----------
694
- packages : Dict[str, str], default {}
695
- Packages to use for this step. The key is the name of the package
696
- and the value is the version to use.
697
- libraries : Dict[str, str], default {}
698
- Supported for backward compatibility. When used with packages, packages will take precedence.
699
- python : str, optional, default None
700
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
701
- that the version used will correspond to the version of the Python interpreter used to start the run.
702
- disabled : bool, default False
703
- If set to True, disables @conda.
704
- """
531
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
705
532
  ...
706
533
 
707
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
534
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
708
535
  """
709
- Decorator that helps cache, version and store models/datasets from huggingface hub.
536
+ Specifies secrets to be retrieved and injected as environment variables prior to
537
+ the execution of a step.
710
538
 
711
539
 
712
540
  Parameters
713
541
  ----------
714
- temp_dir_root : str, optional
715
- The root directory that will hold the temporary directory where objects will be downloaded.
716
-
717
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
718
- The list of repos (models/datasets) to load.
719
-
720
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
721
-
722
- - If repo (model/dataset) is not found in the datastore:
723
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
724
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
725
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
726
-
727
- - If repo is found in the datastore:
728
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
542
+ sources : List[Union[str, Dict[str, Any]]], default: []
543
+ List of secret specs, defining how the secrets are to be retrieved
729
544
  """
730
545
  ...
731
546
 
@@ -745,87 +560,28 @@ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[t
745
560
  """
746
561
  ...
747
562
 
748
- @typing.overload
749
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
750
- """
751
- Specifies environment variables to be set prior to the execution of a step.
752
-
753
-
754
- Parameters
755
- ----------
756
- vars : Dict[str, str], default {}
757
- Dictionary of environment variables to set.
758
- """
759
- ...
760
-
761
- @typing.overload
762
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
763
- ...
764
-
765
- @typing.overload
766
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
767
- ...
768
-
769
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
770
- """
771
- Specifies environment variables to be set prior to the execution of a step.
772
-
773
-
774
- Parameters
775
- ----------
776
- vars : Dict[str, str], default {}
777
- Dictionary of environment variables to set.
778
- """
779
- ...
780
-
781
- @typing.overload
782
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
783
- """
784
- Specifies the PyPI packages for the step.
785
-
786
- Information in this decorator will augment any
787
- attributes set in the `@pyi_base` flow-level decorator. Hence,
788
- you can use `@pypi_base` to set packages required by all
789
- steps and use `@pypi` to specify step-specific overrides.
790
-
791
-
792
- Parameters
793
- ----------
794
- packages : Dict[str, str], default: {}
795
- Packages to use for this step. The key is the name of the package
796
- and the value is the version to use.
797
- python : str, optional, default: None
798
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
799
- that the version used will correspond to the version of the Python interpreter used to start the run.
800
- """
801
- ...
802
-
803
- @typing.overload
804
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
805
- ...
806
-
807
- @typing.overload
808
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
809
- ...
810
-
811
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
563
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
812
564
  """
813
- Specifies the PyPI packages for the step.
814
-
815
- Information in this decorator will augment any
816
- attributes set in the `@pyi_base` flow-level decorator. Hence,
817
- you can use `@pypi_base` to set packages required by all
818
- steps and use `@pypi` to specify step-specific overrides.
565
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
819
566
 
820
567
 
821
568
  Parameters
822
569
  ----------
823
- packages : Dict[str, str], default: {}
824
- Packages to use for this step. The key is the name of the package
825
- and the value is the version to use.
826
- python : str, optional, default: None
827
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
828
- that the version used will correspond to the version of the Python interpreter used to start the run.
570
+ temp_dir_root : str, optional
571
+ The root directory that will hold the temporary directory where objects will be downloaded.
572
+
573
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
574
+ The list of repos (models/datasets) to load.
575
+
576
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
577
+
578
+ - If repo (model/dataset) is not found in the datastore:
579
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
580
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
581
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
582
+
583
+ - If repo is found in the datastore:
584
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
829
585
  """
830
586
  ...
831
587
 
@@ -909,51 +665,100 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
909
665
  ...
910
666
 
911
667
  @typing.overload
912
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
668
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
913
669
  """
914
- Creates a human-readable report, a Metaflow Card, after this step completes.
670
+ Specifies the number of times the task corresponding
671
+ to a step needs to be retried.
915
672
 
916
- Note that you may add multiple `@card` decorators in a step with different parameters.
673
+ This decorator is useful for handling transient errors, such as networking issues.
674
+ If your task contains operations that can't be retried safely, e.g. database updates,
675
+ it is advisable to annotate it with `@retry(times=0)`.
676
+
677
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
678
+ decorator will execute a no-op task after all retries have been exhausted,
679
+ ensuring that the flow execution can continue.
917
680
 
918
681
 
919
682
  Parameters
920
683
  ----------
921
- type : str, default 'default'
922
- Card type.
923
- id : str, optional, default None
924
- If multiple cards are present, use this id to identify this card.
925
- options : Dict[str, Any], default {}
926
- Options passed to the card. The contents depend on the card type.
927
- timeout : int, default 45
928
- Interrupt reporting if it takes more than this many seconds.
684
+ times : int, default 3
685
+ Number of times to retry this task.
686
+ minutes_between_retries : int, default 2
687
+ Number of minutes between retries.
929
688
  """
930
689
  ...
931
690
 
932
691
  @typing.overload
933
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
692
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
934
693
  ...
935
694
 
936
695
  @typing.overload
937
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
696
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
938
697
  ...
939
698
 
940
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
699
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
941
700
  """
942
- Creates a human-readable report, a Metaflow Card, after this step completes.
701
+ Specifies the number of times the task corresponding
702
+ to a step needs to be retried.
943
703
 
944
- Note that you may add multiple `@card` decorators in a step with different parameters.
704
+ This decorator is useful for handling transient errors, such as networking issues.
705
+ If your task contains operations that can't be retried safely, e.g. database updates,
706
+ it is advisable to annotate it with `@retry(times=0)`.
707
+
708
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
709
+ decorator will execute a no-op task after all retries have been exhausted,
710
+ ensuring that the flow execution can continue.
945
711
 
946
712
 
947
713
  Parameters
948
714
  ----------
949
- type : str, default 'default'
950
- Card type.
951
- id : str, optional, default None
952
- If multiple cards are present, use this id to identify this card.
953
- options : Dict[str, Any], default {}
954
- Options passed to the card. The contents depend on the card type.
955
- timeout : int, default 45
956
- Interrupt reporting if it takes more than this many seconds.
715
+ times : int, default 3
716
+ Number of times to retry this task.
717
+ minutes_between_retries : int, default 2
718
+ Number of minutes between retries.
719
+ """
720
+ ...
721
+
722
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
723
+ """
724
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
725
+
726
+ User code call
727
+ --------------
728
+ @ollama(
729
+ models=[...],
730
+ ...
731
+ )
732
+
733
+ Valid backend options
734
+ ---------------------
735
+ - 'local': Run as a separate process on the local task machine.
736
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
737
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
738
+
739
+ Valid model options
740
+ -------------------
741
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
742
+
743
+
744
+ Parameters
745
+ ----------
746
+ models: list[str]
747
+ List of Ollama containers running models in sidecars.
748
+ backend: str
749
+ Determines where and how to run the Ollama process.
750
+ force_pull: bool
751
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
752
+ cache_update_policy: str
753
+ Cache update policy: "auto", "force", or "never".
754
+ force_cache_update: bool
755
+ Simple override for "force" cache update policy.
756
+ debug: bool
757
+ Whether to turn on verbose debugging logs.
758
+ circuit_breaker_config: dict
759
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
760
+ timeout_config: dict
761
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
957
762
  """
958
763
  ...
959
764
 
@@ -1017,59 +822,53 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1017
822
  ...
1018
823
 
1019
824
  @typing.overload
1020
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
825
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1021
826
  """
1022
- Specifies the PyPI packages for all steps of the flow.
827
+ Specifies the Conda environment for the step.
828
+
829
+ Information in this decorator will augment any
830
+ attributes set in the `@conda_base` flow-level decorator. Hence,
831
+ you can use `@conda_base` to set packages required by all
832
+ steps and use `@conda` to specify step-specific overrides.
1023
833
 
1024
- Use `@pypi_base` to set common packages required by all
1025
- steps and use `@pypi` to specify step-specific overrides.
1026
834
 
1027
835
  Parameters
1028
836
  ----------
1029
- packages : Dict[str, str], default: {}
1030
- Packages to use for this flow. The key is the name of the package
837
+ packages : Dict[str, str], default {}
838
+ Packages to use for this step. The key is the name of the package
1031
839
  and the value is the version to use.
1032
- python : str, optional, default: None
840
+ libraries : Dict[str, str], default {}
841
+ Supported for backward compatibility. When used with packages, packages will take precedence.
842
+ python : str, optional, default None
1033
843
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1034
844
  that the version used will correspond to the version of the Python interpreter used to start the run.
845
+ disabled : bool, default False
846
+ If set to True, disables @conda.
1035
847
  """
1036
848
  ...
1037
849
 
1038
850
  @typing.overload
1039
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
851
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1040
852
  ...
1041
853
 
1042
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1043
- """
1044
- Specifies the PyPI packages for all steps of the flow.
1045
-
1046
- Use `@pypi_base` to set common packages required by all
1047
- steps and use `@pypi` to specify step-specific overrides.
1048
-
1049
- Parameters
1050
- ----------
1051
- packages : Dict[str, str], default: {}
1052
- Packages to use for this flow. The key is the name of the package
1053
- and the value is the version to use.
1054
- python : str, optional, default: None
1055
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1056
- that the version used will correspond to the version of the Python interpreter used to start the run.
1057
- """
854
+ @typing.overload
855
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1058
856
  ...
1059
857
 
1060
- @typing.overload
1061
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
858
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1062
859
  """
1063
- Specifies the Conda environment for all steps of the flow.
860
+ Specifies the Conda environment for the step.
1064
861
 
1065
- Use `@conda_base` to set common libraries required by all
1066
- steps and use `@conda` to specify step-specific additions.
862
+ Information in this decorator will augment any
863
+ attributes set in the `@conda_base` flow-level decorator. Hence,
864
+ you can use `@conda_base` to set packages required by all
865
+ steps and use `@conda` to specify step-specific overrides.
1067
866
 
1068
867
 
1069
868
  Parameters
1070
869
  ----------
1071
870
  packages : Dict[str, str], default {}
1072
- Packages to use for this flow. The key is the name of the package
871
+ Packages to use for this step. The key is the name of the package
1073
872
  and the value is the version to use.
1074
873
  libraries : Dict[str, str], default {}
1075
874
  Supported for backward compatibility. When used with packages, packages will take precedence.
@@ -1077,127 +876,143 @@ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[s
1077
876
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1078
877
  that the version used will correspond to the version of the Python interpreter used to start the run.
1079
878
  disabled : bool, default False
1080
- If set to True, disables Conda.
879
+ If set to True, disables @conda.
1081
880
  """
1082
881
  ...
1083
882
 
1084
883
  @typing.overload
1085
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
884
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
885
+ """
886
+ Decorator prototype for all step decorators. This function gets specialized
887
+ and imported for all decorators types by _import_plugin_decorators().
888
+ """
1086
889
  ...
1087
890
 
1088
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
891
+ @typing.overload
892
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
893
+ ...
894
+
895
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1089
896
  """
1090
- Specifies the Conda environment for all steps of the flow.
1091
-
1092
- Use `@conda_base` to set common libraries required by all
1093
- steps and use `@conda` to specify step-specific additions.
897
+ Decorator prototype for all step decorators. This function gets specialized
898
+ and imported for all decorators types by _import_plugin_decorators().
899
+ """
900
+ ...
901
+
902
+ @typing.overload
903
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
904
+ """
905
+ Specifies environment variables to be set prior to the execution of a step.
1094
906
 
1095
907
 
1096
908
  Parameters
1097
909
  ----------
1098
- packages : Dict[str, str], default {}
1099
- Packages to use for this flow. The key is the name of the package
1100
- and the value is the version to use.
1101
- libraries : Dict[str, str], default {}
1102
- Supported for backward compatibility. When used with packages, packages will take precedence.
1103
- python : str, optional, default None
1104
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1105
- that the version used will correspond to the version of the Python interpreter used to start the run.
1106
- disabled : bool, default False
1107
- If set to True, disables Conda.
910
+ vars : Dict[str, str], default {}
911
+ Dictionary of environment variables to set.
1108
912
  """
1109
913
  ...
1110
914
 
1111
915
  @typing.overload
1112
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
916
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
917
+ ...
918
+
919
+ @typing.overload
920
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
921
+ ...
922
+
923
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1113
924
  """
1114
- Specifies the event(s) that this flow depends on.
1115
-
1116
- ```
1117
- @trigger(event='foo')
1118
- ```
1119
- or
1120
- ```
1121
- @trigger(events=['foo', 'bar'])
1122
- ```
925
+ Specifies environment variables to be set prior to the execution of a step.
1123
926
 
1124
- Additionally, you can specify the parameter mappings
1125
- to map event payload to Metaflow parameters for the flow.
1126
- ```
1127
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1128
- ```
1129
- or
1130
- ```
1131
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1132
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1133
- ```
1134
927
 
1135
- 'parameters' can also be a list of strings and tuples like so:
1136
- ```
1137
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1138
- ```
1139
- This is equivalent to:
1140
- ```
1141
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1142
- ```
928
+ Parameters
929
+ ----------
930
+ vars : Dict[str, str], default {}
931
+ Dictionary of environment variables to set.
932
+ """
933
+ ...
934
+
935
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
936
+ """
937
+ Specifies that this step is used to deploy an instance of the app.
938
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
1143
939
 
1144
940
 
1145
941
  Parameters
1146
942
  ----------
1147
- event : Union[str, Dict[str, Any]], optional, default None
1148
- Event dependency for this flow.
1149
- events : List[Union[str, Dict[str, Any]]], default []
1150
- Events dependency for this flow.
1151
- options : Dict[str, Any], default {}
1152
- Backend-specific configuration for tuning eventing behavior.
943
+ app_port : int
944
+ Number of GPUs to use.
945
+ app_name : str
946
+ Name of the app to deploy.
1153
947
  """
1154
948
  ...
1155
949
 
1156
- @typing.overload
1157
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
950
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
951
+ """
952
+ Specifies that this step should execute on DGX cloud.
953
+
954
+
955
+ Parameters
956
+ ----------
957
+ gpu : int
958
+ Number of GPUs to use.
959
+ gpu_type : str
960
+ Type of Nvidia GPU to use.
961
+ """
1158
962
  ...
1159
963
 
1160
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
964
+ def vllm(*, model: str, backend: str, debug: bool, kwargs: typing.Any) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1161
965
  """
1162
- Specifies the event(s) that this flow depends on.
966
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
1163
967
 
1164
- ```
1165
- @trigger(event='foo')
1166
- ```
1167
- or
1168
- ```
1169
- @trigger(events=['foo', 'bar'])
1170
- ```
968
+ User code call
969
+ --------------
970
+ @vllm(
971
+ model="...",
972
+ ...
973
+ )
1171
974
 
1172
- Additionally, you can specify the parameter mappings
1173
- to map event payload to Metaflow parameters for the flow.
1174
- ```
1175
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1176
- ```
1177
- or
1178
- ```
1179
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1180
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1181
- ```
975
+ Valid backend options
976
+ ---------------------
977
+ - 'local': Run as a separate process on the local task machine.
1182
978
 
1183
- 'parameters' can also be a list of strings and tuples like so:
1184
- ```
1185
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1186
- ```
1187
- This is equivalent to:
1188
- ```
1189
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1190
- ```
979
+ Valid model options
980
+ -------------------
981
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
982
+
983
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
984
+ If you need multiple models, you must create multiple @vllm decorators.
1191
985
 
1192
986
 
1193
987
  Parameters
1194
988
  ----------
1195
- event : Union[str, Dict[str, Any]], optional, default None
1196
- Event dependency for this flow.
1197
- events : List[Union[str, Dict[str, Any]]], default []
1198
- Events dependency for this flow.
1199
- options : Dict[str, Any], default {}
1200
- Backend-specific configuration for tuning eventing behavior.
989
+ model: str
990
+ HuggingFace model identifier to be served by vLLM.
991
+ backend: str
992
+ Determines where and how to run the vLLM process.
993
+ debug: bool
994
+ Whether to turn on verbose debugging logs.
995
+ kwargs : Any
996
+ Any other keyword arguments are passed directly to the vLLM engine.
997
+ This allows for flexible configuration of vLLM server settings.
998
+ For example, `tensor_parallel_size=2`.
999
+ """
1000
+ ...
1001
+
1002
+ @typing.overload
1003
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1004
+ """
1005
+ Internal decorator to support Fast bakery
1006
+ """
1007
+ ...
1008
+
1009
+ @typing.overload
1010
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1011
+ ...
1012
+
1013
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1014
+ """
1015
+ Internal decorator to support Fast bakery
1201
1016
  """
1202
1017
  ...
1203
1018
 
@@ -1300,104 +1115,281 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1300
1115
  Parameters:
1301
1116
  ----------
1302
1117
 
1303
- type: str
1304
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1118
+ type: str
1119
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1120
+
1121
+ config: dict or Callable
1122
+ Dictionary of configuration options for the datastore. The following keys are required:
1123
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1124
+ - example: 's3://bucket-name/path/to/root'
1125
+ - example: 'gs://bucket-name/path/to/root'
1126
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1127
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1128
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1129
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1130
+ """
1131
+ ...
1132
+
1133
+ @typing.overload
1134
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1135
+ """
1136
+ Specifies the PyPI packages for all steps of the flow.
1137
+
1138
+ Use `@pypi_base` to set common packages required by all
1139
+ steps and use `@pypi` to specify step-specific overrides.
1140
+
1141
+ Parameters
1142
+ ----------
1143
+ packages : Dict[str, str], default: {}
1144
+ Packages to use for this flow. The key is the name of the package
1145
+ and the value is the version to use.
1146
+ python : str, optional, default: None
1147
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1148
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1149
+ """
1150
+ ...
1151
+
1152
+ @typing.overload
1153
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1154
+ ...
1155
+
1156
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1157
+ """
1158
+ Specifies the PyPI packages for all steps of the flow.
1159
+
1160
+ Use `@pypi_base` to set common packages required by all
1161
+ steps and use `@pypi` to specify step-specific overrides.
1162
+
1163
+ Parameters
1164
+ ----------
1165
+ packages : Dict[str, str], default: {}
1166
+ Packages to use for this flow. The key is the name of the package
1167
+ and the value is the version to use.
1168
+ python : str, optional, default: None
1169
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1170
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1171
+ """
1172
+ ...
1173
+
1174
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1175
+ """
1176
+ Specifies what flows belong to the same project.
1177
+
1178
+ A project-specific namespace is created for all flows that
1179
+ use the same `@project(name)`.
1180
+
1181
+
1182
+ Parameters
1183
+ ----------
1184
+ name : str
1185
+ Project name. Make sure that the name is unique amongst all
1186
+ projects that use the same production scheduler. The name may
1187
+ contain only lowercase alphanumeric characters and underscores.
1188
+
1189
+ branch : Optional[str], default None
1190
+ The branch to use. If not specified, the branch is set to
1191
+ `user.<username>` unless `production` is set to `True`. This can
1192
+ also be set on the command line using `--branch` as a top-level option.
1193
+ It is an error to specify `branch` in the decorator and on the command line.
1194
+
1195
+ production : bool, default False
1196
+ Whether or not the branch is the production branch. This can also be set on the
1197
+ command line using `--production` as a top-level option. It is an error to specify
1198
+ `production` in the decorator and on the command line.
1199
+ The project branch name will be:
1200
+ - if `branch` is specified:
1201
+ - if `production` is True: `prod.<branch>`
1202
+ - if `production` is False: `test.<branch>`
1203
+ - if `branch` is not specified:
1204
+ - if `production` is True: `prod`
1205
+ - if `production` is False: `user.<username>`
1206
+ """
1207
+ ...
1208
+
1209
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1210
+ """
1211
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1212
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1213
+
1214
+
1215
+ Parameters
1216
+ ----------
1217
+ timeout : int
1218
+ Time, in seconds before the task times out and fails. (Default: 3600)
1219
+ poke_interval : int
1220
+ Time in seconds that the job should wait in between each try. (Default: 60)
1221
+ mode : str
1222
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1223
+ exponential_backoff : bool
1224
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1225
+ pool : str
1226
+ the slot pool this task should run in,
1227
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1228
+ soft_fail : bool
1229
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1230
+ name : str
1231
+ Name of the sensor on Airflow
1232
+ description : str
1233
+ Description of sensor in the Airflow UI
1234
+ external_dag_id : str
1235
+ The dag_id that contains the task you want to wait for.
1236
+ external_task_ids : List[str]
1237
+ The list of task_ids that you want to wait for.
1238
+ If None (default value) the sensor waits for the DAG. (Default: None)
1239
+ allowed_states : List[str]
1240
+ Iterable of allowed states, (Default: ['success'])
1241
+ failed_states : List[str]
1242
+ Iterable of failed or dis-allowed states. (Default: None)
1243
+ execution_delta : datetime.timedelta
1244
+ time difference with the previous execution to look at,
1245
+ the default is the same logical date as the current task or DAG. (Default: None)
1246
+ check_existence: bool
1247
+ Set to True to check if the external task exists or check if
1248
+ the DAG to wait for exists. (Default: True)
1249
+ """
1250
+ ...
1251
+
1252
+ @typing.overload
1253
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1254
+ """
1255
+ Specifies the event(s) that this flow depends on.
1256
+
1257
+ ```
1258
+ @trigger(event='foo')
1259
+ ```
1260
+ or
1261
+ ```
1262
+ @trigger(events=['foo', 'bar'])
1263
+ ```
1264
+
1265
+ Additionally, you can specify the parameter mappings
1266
+ to map event payload to Metaflow parameters for the flow.
1267
+ ```
1268
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1269
+ ```
1270
+ or
1271
+ ```
1272
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1273
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1274
+ ```
1275
+
1276
+ 'parameters' can also be a list of strings and tuples like so:
1277
+ ```
1278
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1279
+ ```
1280
+ This is equivalent to:
1281
+ ```
1282
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1283
+ ```
1305
1284
 
1306
- config: dict or Callable
1307
- Dictionary of configuration options for the datastore. The following keys are required:
1308
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1309
- - example: 's3://bucket-name/path/to/root'
1310
- - example: 'gs://bucket-name/path/to/root'
1311
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1312
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1313
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1314
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1285
+
1286
+ Parameters
1287
+ ----------
1288
+ event : Union[str, Dict[str, Any]], optional, default None
1289
+ Event dependency for this flow.
1290
+ events : List[Union[str, Dict[str, Any]]], default []
1291
+ Events dependency for this flow.
1292
+ options : Dict[str, Any], default {}
1293
+ Backend-specific configuration for tuning eventing behavior.
1315
1294
  """
1316
1295
  ...
1317
1296
 
1318
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1297
+ @typing.overload
1298
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1299
+ ...
1300
+
1301
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1319
1302
  """
1320
- Specifies what flows belong to the same project.
1303
+ Specifies the event(s) that this flow depends on.
1321
1304
 
1322
- A project-specific namespace is created for all flows that
1323
- use the same `@project(name)`.
1305
+ ```
1306
+ @trigger(event='foo')
1307
+ ```
1308
+ or
1309
+ ```
1310
+ @trigger(events=['foo', 'bar'])
1311
+ ```
1324
1312
 
1313
+ Additionally, you can specify the parameter mappings
1314
+ to map event payload to Metaflow parameters for the flow.
1315
+ ```
1316
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1317
+ ```
1318
+ or
1319
+ ```
1320
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1321
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1322
+ ```
1325
1323
 
1326
- Parameters
1327
- ----------
1328
- name : str
1329
- Project name. Make sure that the name is unique amongst all
1330
- projects that use the same production scheduler. The name may
1331
- contain only lowercase alphanumeric characters and underscores.
1324
+ 'parameters' can also be a list of strings and tuples like so:
1325
+ ```
1326
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1327
+ ```
1328
+ This is equivalent to:
1329
+ ```
1330
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1331
+ ```
1332
1332
 
1333
- branch : Optional[str], default None
1334
- The branch to use. If not specified, the branch is set to
1335
- `user.<username>` unless `production` is set to `True`. This can
1336
- also be set on the command line using `--branch` as a top-level option.
1337
- It is an error to specify `branch` in the decorator and on the command line.
1338
1333
 
1339
- production : bool, default False
1340
- Whether or not the branch is the production branch. This can also be set on the
1341
- command line using `--production` as a top-level option. It is an error to specify
1342
- `production` in the decorator and on the command line.
1343
- The project branch name will be:
1344
- - if `branch` is specified:
1345
- - if `production` is True: `prod.<branch>`
1346
- - if `production` is False: `test.<branch>`
1347
- - if `branch` is not specified:
1348
- - if `production` is True: `prod`
1349
- - if `production` is False: `user.<username>`
1334
+ Parameters
1335
+ ----------
1336
+ event : Union[str, Dict[str, Any]], optional, default None
1337
+ Event dependency for this flow.
1338
+ events : List[Union[str, Dict[str, Any]]], default []
1339
+ Events dependency for this flow.
1340
+ options : Dict[str, Any], default {}
1341
+ Backend-specific configuration for tuning eventing behavior.
1350
1342
  """
1351
1343
  ...
1352
1344
 
1353
1345
  @typing.overload
1354
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1346
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1355
1347
  """
1356
- Specifies the times when the flow should be run when running on a
1357
- production scheduler.
1348
+ Specifies the Conda environment for all steps of the flow.
1349
+
1350
+ Use `@conda_base` to set common libraries required by all
1351
+ steps and use `@conda` to specify step-specific additions.
1358
1352
 
1359
1353
 
1360
1354
  Parameters
1361
1355
  ----------
1362
- hourly : bool, default False
1363
- Run the workflow hourly.
1364
- daily : bool, default True
1365
- Run the workflow daily.
1366
- weekly : bool, default False
1367
- Run the workflow weekly.
1368
- cron : str, optional, default None
1369
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1370
- specified by this expression.
1371
- timezone : str, optional, default None
1372
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1373
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1356
+ packages : Dict[str, str], default {}
1357
+ Packages to use for this flow. The key is the name of the package
1358
+ and the value is the version to use.
1359
+ libraries : Dict[str, str], default {}
1360
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1361
+ python : str, optional, default None
1362
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1363
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1364
+ disabled : bool, default False
1365
+ If set to True, disables Conda.
1374
1366
  """
1375
1367
  ...
1376
1368
 
1377
1369
  @typing.overload
1378
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1370
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1379
1371
  ...
1380
1372
 
1381
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1373
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1382
1374
  """
1383
- Specifies the times when the flow should be run when running on a
1384
- production scheduler.
1375
+ Specifies the Conda environment for all steps of the flow.
1376
+
1377
+ Use `@conda_base` to set common libraries required by all
1378
+ steps and use `@conda` to specify step-specific additions.
1385
1379
 
1386
1380
 
1387
1381
  Parameters
1388
1382
  ----------
1389
- hourly : bool, default False
1390
- Run the workflow hourly.
1391
- daily : bool, default True
1392
- Run the workflow daily.
1393
- weekly : bool, default False
1394
- Run the workflow weekly.
1395
- cron : str, optional, default None
1396
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1397
- specified by this expression.
1398
- timezone : str, optional, default None
1399
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1400
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1383
+ packages : Dict[str, str], default {}
1384
+ Packages to use for this flow. The key is the name of the package
1385
+ and the value is the version to use.
1386
+ libraries : Dict[str, str], default {}
1387
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1388
+ python : str, optional, default None
1389
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1390
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1391
+ disabled : bool, default False
1392
+ If set to True, disables Conda.
1401
1393
  """
1402
1394
  ...
1403
1395
 
@@ -1502,6 +1494,57 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1502
1494
  """
1503
1495
  ...
1504
1496
 
1497
+ @typing.overload
1498
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1499
+ """
1500
+ Specifies the times when the flow should be run when running on a
1501
+ production scheduler.
1502
+
1503
+
1504
+ Parameters
1505
+ ----------
1506
+ hourly : bool, default False
1507
+ Run the workflow hourly.
1508
+ daily : bool, default True
1509
+ Run the workflow daily.
1510
+ weekly : bool, default False
1511
+ Run the workflow weekly.
1512
+ cron : str, optional, default None
1513
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1514
+ specified by this expression.
1515
+ timezone : str, optional, default None
1516
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1517
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1518
+ """
1519
+ ...
1520
+
1521
+ @typing.overload
1522
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1523
+ ...
1524
+
1525
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1526
+ """
1527
+ Specifies the times when the flow should be run when running on a
1528
+ production scheduler.
1529
+
1530
+
1531
+ Parameters
1532
+ ----------
1533
+ hourly : bool, default False
1534
+ Run the workflow hourly.
1535
+ daily : bool, default True
1536
+ Run the workflow daily.
1537
+ weekly : bool, default False
1538
+ Run the workflow weekly.
1539
+ cron : str, optional, default None
1540
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1541
+ specified by this expression.
1542
+ timezone : str, optional, default None
1543
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1544
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1545
+ """
1546
+ ...
1547
+
1505
1548
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1506
1549
  """
1507
1550
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1545,48 +1588,5 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1545
1588
  """
1546
1589
  ...
1547
1590
 
1548
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1549
- """
1550
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1551
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1552
-
1553
-
1554
- Parameters
1555
- ----------
1556
- timeout : int
1557
- Time, in seconds before the task times out and fails. (Default: 3600)
1558
- poke_interval : int
1559
- Time in seconds that the job should wait in between each try. (Default: 60)
1560
- mode : str
1561
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1562
- exponential_backoff : bool
1563
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1564
- pool : str
1565
- the slot pool this task should run in,
1566
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1567
- soft_fail : bool
1568
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1569
- name : str
1570
- Name of the sensor on Airflow
1571
- description : str
1572
- Description of sensor in the Airflow UI
1573
- external_dag_id : str
1574
- The dag_id that contains the task you want to wait for.
1575
- external_task_ids : List[str]
1576
- The list of task_ids that you want to wait for.
1577
- If None (default value) the sensor waits for the DAG. (Default: None)
1578
- allowed_states : List[str]
1579
- Iterable of allowed states, (Default: ['success'])
1580
- failed_states : List[str]
1581
- Iterable of failed or dis-allowed states. (Default: None)
1582
- execution_delta : datetime.timedelta
1583
- time difference with the previous execution to look at,
1584
- the default is the same logical date as the current task or DAG. (Default: None)
1585
- check_existence: bool
1586
- Set to True to check if the external task exists or check if
1587
- the DAG to wait for exists. (Default: True)
1588
- """
1589
- ...
1590
-
1591
1591
  pkg_name: str
1592
1592