ob-metaflow-stubs 6.0.3.180rc3__py2.py3-none-any.whl → 6.0.3.180rc5__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (220) hide show
  1. metaflow-stubs/__init__.pyi +679 -679
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +1 -1
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +86 -86
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +2 -2
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +1 -1
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +1 -1
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  89. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  92. metaflow-stubs/multicore_utils.pyi +1 -1
  93. metaflow-stubs/ob_internal.pyi +1 -1
  94. metaflow-stubs/parameters.pyi +3 -3
  95. metaflow-stubs/plugins/__init__.pyi +12 -12
  96. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  97. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  98. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  99. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  100. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  101. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  102. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  103. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  104. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  105. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  106. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  107. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  109. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  110. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  111. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  112. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  113. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  114. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  115. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  116. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  117. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  118. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +1 -1
  119. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  120. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  121. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  122. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  123. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  124. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  125. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  126. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  128. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  129. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +1 -1
  130. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  131. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  132. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  133. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  134. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  135. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  136. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  137. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  138. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  139. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  140. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  141. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  142. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  143. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  144. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  145. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  146. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  147. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  148. metaflow-stubs/plugins/catch_decorator.pyi +1 -1
  149. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  150. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  151. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  152. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  153. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  154. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  155. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  156. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  157. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  158. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  159. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  160. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  161. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  162. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +1 -1
  163. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  164. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  165. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  166. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  167. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  168. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  169. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  170. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  171. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  172. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  173. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  174. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  175. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  176. metaflow-stubs/plugins/perimeters.pyi +1 -1
  177. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  178. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  179. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  180. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  181. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  182. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  183. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  184. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  185. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  186. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  187. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  188. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +1 -1
  189. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  192. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  193. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  194. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  195. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  196. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  197. metaflow-stubs/profilers/__init__.pyi +1 -1
  198. metaflow-stubs/pylint_wrapper.pyi +1 -1
  199. metaflow-stubs/runner/__init__.pyi +1 -1
  200. metaflow-stubs/runner/deployer.pyi +29 -29
  201. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  202. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  203. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  204. metaflow-stubs/runner/nbrun.pyi +1 -1
  205. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  206. metaflow-stubs/runner/utils.pyi +2 -2
  207. metaflow-stubs/system/__init__.pyi +1 -1
  208. metaflow-stubs/system/system_logger.pyi +1 -1
  209. metaflow-stubs/system/system_monitor.pyi +1 -1
  210. metaflow-stubs/tagging_util.pyi +1 -1
  211. metaflow-stubs/tuple_util.pyi +1 -1
  212. metaflow-stubs/user_configs/__init__.pyi +1 -1
  213. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  214. metaflow-stubs/user_configs/config_options.pyi +2 -2
  215. metaflow-stubs/user_configs/config_parameters.pyi +4 -4
  216. {ob_metaflow_stubs-6.0.3.180rc3.dist-info → ob_metaflow_stubs-6.0.3.180rc5.dist-info}/METADATA +1 -1
  217. ob_metaflow_stubs-6.0.3.180rc5.dist-info/RECORD +220 -0
  218. ob_metaflow_stubs-6.0.3.180rc3.dist-info/RECORD +0 -220
  219. {ob_metaflow_stubs-6.0.3.180rc3.dist-info → ob_metaflow_stubs-6.0.3.180rc5.dist-info}/WHEEL +0 -0
  220. {ob_metaflow_stubs-6.0.3.180rc3.dist-info → ob_metaflow_stubs-6.0.3.180rc5.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.17.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-06-17T10:52:54.470733 #
4
+ # Generated on 2025-06-17T20:32:02.265213 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -35,10 +35,10 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import cards as cards
39
38
  from . import tuple_util as tuple_util
40
- from . import metaflow_git as metaflow_git
39
+ from . import cards as cards
41
40
  from . import events as events
41
+ from . import metaflow_git as metaflow_git
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
@@ -155,46 +155,54 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
155
155
  """
156
156
  ...
157
157
 
158
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
158
+ @typing.overload
159
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
159
160
  """
160
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
161
+ Specifies secrets to be retrieved and injected as environment variables prior to
162
+ the execution of a step.
161
163
 
162
- User code call
163
- --------------
164
- @ollama(
165
- models=[...],
166
- ...
167
- )
168
164
 
169
- Valid backend options
170
- ---------------------
171
- - 'local': Run as a separate process on the local task machine.
172
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
173
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
165
+ Parameters
166
+ ----------
167
+ sources : List[Union[str, Dict[str, Any]]], default: []
168
+ List of secret specs, defining how the secrets are to be retrieved
169
+ """
170
+ ...
171
+
172
+ @typing.overload
173
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
174
+ ...
175
+
176
+ @typing.overload
177
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
178
+ ...
179
+
180
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
181
+ """
182
+ Specifies secrets to be retrieved and injected as environment variables prior to
183
+ the execution of a step.
174
184
 
175
- Valid model options
176
- -------------------
177
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
185
+
186
+ Parameters
187
+ ----------
188
+ sources : List[Union[str, Dict[str, Any]]], default: []
189
+ List of secret specs, defining how the secrets are to be retrieved
190
+ """
191
+ ...
192
+
193
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
194
+ """
195
+ Specifies that this step should execute on DGX cloud.
178
196
 
179
197
 
180
198
  Parameters
181
199
  ----------
182
- models: list[str]
183
- List of Ollama containers running models in sidecars.
184
- backend: str
185
- Determines where and how to run the Ollama process.
186
- force_pull: bool
187
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
188
- cache_update_policy: str
189
- Cache update policy: "auto", "force", or "never".
190
- force_cache_update: bool
191
- Simple override for "force" cache update policy.
192
- debug: bool
193
- Whether to turn on verbose debugging logs.
194
- circuit_breaker_config: dict
195
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
196
- timeout_config: dict
197
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
200
+ gpu : int
201
+ Number of GPUs to use.
202
+ gpu_type : str
203
+ Type of Nvidia GPU to use.
204
+ queue_timeout : int
205
+ Time to keep the job in NVCF's queue.
198
206
  """
199
207
  ...
200
208
 
@@ -306,87 +314,147 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
306
314
  """
307
315
  ...
308
316
 
309
- @typing.overload
310
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
317
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
311
318
  """
312
- Specifies environment variables to be set prior to the execution of a step.
319
+ Specifies that this step should execute on Kubernetes.
313
320
 
314
321
 
315
322
  Parameters
316
323
  ----------
317
- vars : Dict[str, str], default {}
318
- Dictionary of environment variables to set.
319
- """
320
- ...
321
-
322
- @typing.overload
323
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
324
- ...
325
-
326
- @typing.overload
327
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
328
- ...
329
-
330
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
331
- """
332
- Specifies environment variables to be set prior to the execution of a step.
333
-
324
+ cpu : int, default 1
325
+ Number of CPUs required for this step. If `@resources` is
326
+ also present, the maximum value from all decorators is used.
327
+ memory : int, default 4096
328
+ Memory size (in MB) required for this step. If
329
+ `@resources` is also present, the maximum value from all decorators is
330
+ used.
331
+ disk : int, default 10240
332
+ Disk size (in MB) required for this step. If
333
+ `@resources` is also present, the maximum value from all decorators is
334
+ used.
335
+ image : str, optional, default None
336
+ Docker image to use when launching on Kubernetes. If not specified, and
337
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
338
+ not, a default Docker image mapping to the current version of Python is used.
339
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
340
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
341
+ image_pull_secrets: List[str], default []
342
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
343
+ Kubernetes image pull secrets to use when pulling container images
344
+ in Kubernetes.
345
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
346
+ Kubernetes service account to use when launching pod in Kubernetes.
347
+ secrets : List[str], optional, default None
348
+ Kubernetes secrets to use when launching pod in Kubernetes. These
349
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
350
+ in Metaflow configuration.
351
+ node_selector: Union[Dict[str,str], str], optional, default None
352
+ Kubernetes node selector(s) to apply to the pod running the task.
353
+ Can be passed in as a comma separated string of values e.g.
354
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
355
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
356
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
357
+ Kubernetes namespace to use when launching pod in Kubernetes.
358
+ gpu : int, optional, default None
359
+ Number of GPUs required for this step. A value of zero implies that
360
+ the scheduled node should not have GPUs.
361
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
362
+ The vendor of the GPUs to be used for this step.
363
+ tolerations : List[str], default []
364
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
365
+ Kubernetes tolerations to use when launching pod in Kubernetes.
366
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
367
+ Kubernetes labels to use when launching pod in Kubernetes.
368
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
369
+ Kubernetes annotations to use when launching pod in Kubernetes.
370
+ use_tmpfs : bool, default False
371
+ This enables an explicit tmpfs mount for this step.
372
+ tmpfs_tempdir : bool, default True
373
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
374
+ tmpfs_size : int, optional, default: None
375
+ The value for the size (in MiB) of the tmpfs mount for this step.
376
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
377
+ memory allocated for this step.
378
+ tmpfs_path : str, optional, default /metaflow_temp
379
+ Path to tmpfs mount for this step.
380
+ persistent_volume_claims : Dict[str, str], optional, default None
381
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
382
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
383
+ shared_memory: int, optional
384
+ Shared memory size (in MiB) required for this step
385
+ port: int, optional
386
+ Port number to specify in the Kubernetes job object
387
+ compute_pool : str, optional, default None
388
+ Compute pool to be used for for this step.
389
+ If not specified, any accessible compute pool within the perimeter is used.
390
+ hostname_resolution_timeout: int, default 10 * 60
391
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
392
+ Only applicable when @parallel is used.
393
+ qos: str, default: Burstable
394
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
334
395
 
335
- Parameters
336
- ----------
337
- vars : Dict[str, str], default {}
338
- Dictionary of environment variables to set.
396
+ security_context: Dict[str, Any], optional, default None
397
+ Container security context. Applies to the task container. Allows the following keys:
398
+ - privileged: bool, optional, default None
399
+ - allow_privilege_escalation: bool, optional, default None
400
+ - run_as_user: int, optional, default None
401
+ - run_as_group: int, optional, default None
402
+ - run_as_non_root: bool, optional, default None
339
403
  """
340
404
  ...
341
405
 
342
406
  @typing.overload
343
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
407
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
344
408
  """
345
- Specifies the PyPI packages for the step.
409
+ Specifies the number of times the task corresponding
410
+ to a step needs to be retried.
346
411
 
347
- Information in this decorator will augment any
348
- attributes set in the `@pyi_base` flow-level decorator. Hence,
349
- you can use `@pypi_base` to set packages required by all
350
- steps and use `@pypi` to specify step-specific overrides.
412
+ This decorator is useful for handling transient errors, such as networking issues.
413
+ If your task contains operations that can't be retried safely, e.g. database updates,
414
+ it is advisable to annotate it with `@retry(times=0)`.
415
+
416
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
417
+ decorator will execute a no-op task after all retries have been exhausted,
418
+ ensuring that the flow execution can continue.
351
419
 
352
420
 
353
421
  Parameters
354
422
  ----------
355
- packages : Dict[str, str], default: {}
356
- Packages to use for this step. The key is the name of the package
357
- and the value is the version to use.
358
- python : str, optional, default: None
359
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
360
- that the version used will correspond to the version of the Python interpreter used to start the run.
423
+ times : int, default 3
424
+ Number of times to retry this task.
425
+ minutes_between_retries : int, default 2
426
+ Number of minutes between retries.
361
427
  """
362
428
  ...
363
429
 
364
430
  @typing.overload
365
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
431
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
366
432
  ...
367
433
 
368
434
  @typing.overload
369
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
435
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
370
436
  ...
371
437
 
372
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
438
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
373
439
  """
374
- Specifies the PyPI packages for the step.
440
+ Specifies the number of times the task corresponding
441
+ to a step needs to be retried.
375
442
 
376
- Information in this decorator will augment any
377
- attributes set in the `@pyi_base` flow-level decorator. Hence,
378
- you can use `@pypi_base` to set packages required by all
379
- steps and use `@pypi` to specify step-specific overrides.
443
+ This decorator is useful for handling transient errors, such as networking issues.
444
+ If your task contains operations that can't be retried safely, e.g. database updates,
445
+ it is advisable to annotate it with `@retry(times=0)`.
446
+
447
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
448
+ decorator will execute a no-op task after all retries have been exhausted,
449
+ ensuring that the flow execution can continue.
380
450
 
381
451
 
382
452
  Parameters
383
453
  ----------
384
- packages : Dict[str, str], default: {}
385
- Packages to use for this step. The key is the name of the package
386
- and the value is the version to use.
387
- python : str, optional, default: None
388
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
389
- that the version used will correspond to the version of the Python interpreter used to start the run.
454
+ times : int, default 3
455
+ Number of times to retry this task.
456
+ minutes_between_retries : int, default 2
457
+ Number of minutes between retries.
390
458
  """
391
459
  ...
392
460
 
@@ -450,61 +518,7 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
450
518
  ...
451
519
 
452
520
  @typing.overload
453
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
454
- """
455
- Decorator prototype for all step decorators. This function gets specialized
456
- and imported for all decorators types by _import_plugin_decorators().
457
- """
458
- ...
459
-
460
- @typing.overload
461
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
462
- ...
463
-
464
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
465
- """
466
- Decorator prototype for all step decorators. This function gets specialized
467
- and imported for all decorators types by _import_plugin_decorators().
468
- """
469
- ...
470
-
471
- @typing.overload
472
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
473
- """
474
- Specifies secrets to be retrieved and injected as environment variables prior to
475
- the execution of a step.
476
-
477
-
478
- Parameters
479
- ----------
480
- sources : List[Union[str, Dict[str, Any]]], default: []
481
- List of secret specs, defining how the secrets are to be retrieved
482
- """
483
- ...
484
-
485
- @typing.overload
486
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
487
- ...
488
-
489
- @typing.overload
490
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
491
- ...
492
-
493
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
494
- """
495
- Specifies secrets to be retrieved and injected as environment variables prior to
496
- the execution of a step.
497
-
498
-
499
- Parameters
500
- ----------
501
- sources : List[Union[str, Dict[str, Any]]], default: []
502
- List of secret specs, defining how the secrets are to be retrieved
503
- """
504
- ...
505
-
506
- @typing.overload
507
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
521
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
508
522
  """
509
523
  Specifies the Conda environment for the step.
510
524
 
@@ -563,114 +577,51 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
563
577
  ...
564
578
 
565
579
  @typing.overload
566
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
567
- """
568
- Enables checkpointing for a step.
569
-
570
-
571
-
572
- Parameters
573
- ----------
574
- load_policy : str, default: "fresh"
575
- The policy for loading the checkpoint. The following policies are supported:
576
- - "eager": Loads the the latest available checkpoint within the namespace.
577
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
578
- will be loaded at the start of the task.
579
- - "none": Do not load any checkpoint
580
- - "fresh": Loads the lastest checkpoint created within the running Task.
581
- This mode helps loading checkpoints across various retry attempts of the same task.
582
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
583
- created within the task will be loaded when the task is retries execution on failure.
584
-
585
- temp_dir_root : str, default: None
586
- The root directory under which `current.checkpoint.directory` will be created.
587
- """
588
- ...
589
-
590
- @typing.overload
591
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
592
- ...
593
-
594
- @typing.overload
595
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
596
- ...
597
-
598
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
599
- """
600
- Enables checkpointing for a step.
601
-
602
-
603
-
604
- Parameters
605
- ----------
606
- load_policy : str, default: "fresh"
607
- The policy for loading the checkpoint. The following policies are supported:
608
- - "eager": Loads the the latest available checkpoint within the namespace.
609
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
610
- will be loaded at the start of the task.
611
- - "none": Do not load any checkpoint
612
- - "fresh": Loads the lastest checkpoint created within the running Task.
613
- This mode helps loading checkpoints across various retry attempts of the same task.
614
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
615
- created within the task will be loaded when the task is retries execution on failure.
616
-
617
- temp_dir_root : str, default: None
618
- The root directory under which `current.checkpoint.directory` will be created.
619
- """
620
- ...
621
-
622
- @typing.overload
623
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
580
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
624
581
  """
625
- Specifies the number of times the task corresponding
626
- to a step needs to be retried.
627
-
628
- This decorator is useful for handling transient errors, such as networking issues.
629
- If your task contains operations that can't be retried safely, e.g. database updates,
630
- it is advisable to annotate it with `@retry(times=0)`.
582
+ Creates a human-readable report, a Metaflow Card, after this step completes.
631
583
 
632
- This can be used in conjunction with the `@catch` decorator. The `@catch`
633
- decorator will execute a no-op task after all retries have been exhausted,
634
- ensuring that the flow execution can continue.
584
+ Note that you may add multiple `@card` decorators in a step with different parameters.
635
585
 
636
586
 
637
587
  Parameters
638
588
  ----------
639
- times : int, default 3
640
- Number of times to retry this task.
641
- minutes_between_retries : int, default 2
642
- Number of minutes between retries.
589
+ type : str, default 'default'
590
+ Card type.
591
+ id : str, optional, default None
592
+ If multiple cards are present, use this id to identify this card.
593
+ options : Dict[str, Any], default {}
594
+ Options passed to the card. The contents depend on the card type.
595
+ timeout : int, default 45
596
+ Interrupt reporting if it takes more than this many seconds.
643
597
  """
644
598
  ...
645
599
 
646
600
  @typing.overload
647
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
601
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
648
602
  ...
649
603
 
650
604
  @typing.overload
651
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
605
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
652
606
  ...
653
607
 
654
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
608
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
655
609
  """
656
- Specifies the number of times the task corresponding
657
- to a step needs to be retried.
658
-
659
- This decorator is useful for handling transient errors, such as networking issues.
660
- If your task contains operations that can't be retried safely, e.g. database updates,
661
- it is advisable to annotate it with `@retry(times=0)`.
610
+ Creates a human-readable report, a Metaflow Card, after this step completes.
662
611
 
663
- This can be used in conjunction with the `@catch` decorator. The `@catch`
664
- decorator will execute a no-op task after all retries have been exhausted,
665
- ensuring that the flow execution can continue.
612
+ Note that you may add multiple `@card` decorators in a step with different parameters.
666
613
 
667
614
 
668
615
  Parameters
669
616
  ----------
670
- times : int, default 3
671
- Number of times to retry this task.
672
- minutes_between_retries : int, default 2
673
- Number of minutes between retries.
617
+ type : str, default 'default'
618
+ Card type.
619
+ id : str, optional, default None
620
+ If multiple cards are present, use this id to identify this card.
621
+ options : Dict[str, Any], default {}
622
+ Options passed to the card. The contents depend on the card type.
623
+ timeout : int, default 45
624
+ Interrupt reporting if it takes more than this many seconds.
674
625
  """
675
626
  ...
676
627
 
@@ -688,83 +639,36 @@ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Cal
688
639
  """
689
640
  ...
690
641
 
691
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
642
+ @typing.overload
643
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
692
644
  """
693
- Decorator that helps cache, version and store models/datasets from huggingface hub.
645
+ Specifies environment variables to be set prior to the execution of a step.
694
646
 
695
647
 
696
648
  Parameters
697
649
  ----------
698
- temp_dir_root : str, optional
699
- The root directory that will hold the temporary directory where objects will be downloaded.
700
-
701
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
702
- The list of repos (models/datasets) to load.
703
-
704
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
705
-
706
- - If repo (model/dataset) is not found in the datastore:
707
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
708
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
709
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
710
-
711
- - If repo is found in the datastore:
712
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
650
+ vars : Dict[str, str], default {}
651
+ Dictionary of environment variables to set.
713
652
  """
714
653
  ...
715
654
 
716
655
  @typing.overload
717
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
718
- """
719
- Internal decorator to support Fast bakery
720
- """
656
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
721
657
  ...
722
658
 
723
659
  @typing.overload
724
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
725
- ...
726
-
727
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
728
- """
729
- Internal decorator to support Fast bakery
730
- """
660
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
731
661
  ...
732
662
 
733
- def vllm(*, model: str, backend: str, debug: bool, kwargs: typing.Any) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
663
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
734
664
  """
735
- This decorator is used to run vllm APIs as Metaflow task sidecars.
736
-
737
- User code call
738
- --------------
739
- @vllm(
740
- model="...",
741
- ...
742
- )
743
-
744
- Valid backend options
745
- ---------------------
746
- - 'local': Run as a separate process on the local task machine.
747
-
748
- Valid model options
749
- -------------------
750
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
751
-
752
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
753
- If you need multiple models, you must create multiple @vllm decorators.
665
+ Specifies environment variables to be set prior to the execution of a step.
754
666
 
755
667
 
756
668
  Parameters
757
669
  ----------
758
- model: str
759
- HuggingFace model identifier to be served by vLLM.
760
- backend: str
761
- Determines where and how to run the vLLM process.
762
- debug: bool
763
- Whether to turn on verbose debugging logs.
764
- kwargs : Any
765
- Any other keyword arguments are passed directly to the vLLM engine.
766
- This allows for flexible configuration of vLLM server settings.
767
- For example, `tensor_parallel_size=2`.
670
+ vars : Dict[str, str], default {}
671
+ Dictionary of environment variables to set.
768
672
  """
769
673
  ...
770
674
 
@@ -847,499 +751,579 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
847
751
  """
848
752
  ...
849
753
 
850
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
754
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
851
755
  """
852
- Specifies that this step should execute on Kubernetes.
756
+ Specifies that this step is used to deploy an instance of the app.
757
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
853
758
 
854
759
 
855
760
  Parameters
856
761
  ----------
857
- cpu : int, default 1
858
- Number of CPUs required for this step. If `@resources` is
859
- also present, the maximum value from all decorators is used.
860
- memory : int, default 4096
861
- Memory size (in MB) required for this step. If
862
- `@resources` is also present, the maximum value from all decorators is
863
- used.
864
- disk : int, default 10240
865
- Disk size (in MB) required for this step. If
866
- `@resources` is also present, the maximum value from all decorators is
867
- used.
868
- image : str, optional, default None
869
- Docker image to use when launching on Kubernetes. If not specified, and
870
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
871
- not, a default Docker image mapping to the current version of Python is used.
872
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
873
- If given, the imagePullPolicy to be applied to the Docker image of the step.
874
- image_pull_secrets: List[str], default []
875
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
876
- Kubernetes image pull secrets to use when pulling container images
877
- in Kubernetes.
878
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
879
- Kubernetes service account to use when launching pod in Kubernetes.
880
- secrets : List[str], optional, default None
881
- Kubernetes secrets to use when launching pod in Kubernetes. These
882
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
883
- in Metaflow configuration.
884
- node_selector: Union[Dict[str,str], str], optional, default None
885
- Kubernetes node selector(s) to apply to the pod running the task.
886
- Can be passed in as a comma separated string of values e.g.
887
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
888
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
889
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
890
- Kubernetes namespace to use when launching pod in Kubernetes.
891
- gpu : int, optional, default None
892
- Number of GPUs required for this step. A value of zero implies that
893
- the scheduled node should not have GPUs.
894
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
895
- The vendor of the GPUs to be used for this step.
896
- tolerations : List[str], default []
897
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
898
- Kubernetes tolerations to use when launching pod in Kubernetes.
899
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
900
- Kubernetes labels to use when launching pod in Kubernetes.
901
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
902
- Kubernetes annotations to use when launching pod in Kubernetes.
903
- use_tmpfs : bool, default False
904
- This enables an explicit tmpfs mount for this step.
905
- tmpfs_tempdir : bool, default True
906
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
907
- tmpfs_size : int, optional, default: None
908
- The value for the size (in MiB) of the tmpfs mount for this step.
909
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
910
- memory allocated for this step.
911
- tmpfs_path : str, optional, default /metaflow_temp
912
- Path to tmpfs mount for this step.
913
- persistent_volume_claims : Dict[str, str], optional, default None
914
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
915
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
916
- shared_memory: int, optional
917
- Shared memory size (in MiB) required for this step
918
- port: int, optional
919
- Port number to specify in the Kubernetes job object
920
- compute_pool : str, optional, default None
921
- Compute pool to be used for for this step.
922
- If not specified, any accessible compute pool within the perimeter is used.
923
- hostname_resolution_timeout: int, default 10 * 60
924
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
925
- Only applicable when @parallel is used.
926
- qos: str, default: Burstable
927
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
762
+ app_port : int
763
+ Number of GPUs to use.
764
+ app_name : str
765
+ Name of the app to deploy.
766
+ """
767
+ ...
768
+
769
+ @typing.overload
770
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
771
+ """
772
+ Decorator prototype for all step decorators. This function gets specialized
773
+ and imported for all decorators types by _import_plugin_decorators().
774
+ """
775
+ ...
776
+
777
+ @typing.overload
778
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
779
+ ...
780
+
781
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
782
+ """
783
+ Decorator prototype for all step decorators. This function gets specialized
784
+ and imported for all decorators types by _import_plugin_decorators().
785
+ """
786
+ ...
787
+
788
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
789
+ """
790
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
928
791
 
929
- security_context: Dict[str, Any], optional, default None
930
- Container security context. Applies to the task container. Allows the following keys:
931
- - privileged: bool, optional, default None
932
- - allow_privilege_escalation: bool, optional, default None
933
- - run_as_user: int, optional, default None
934
- - run_as_group: int, optional, default None
935
- - run_as_non_root: bool, optional, default None
792
+ User code call
793
+ --------------
794
+ @ollama(
795
+ models=[...],
796
+ ...
797
+ )
798
+
799
+ Valid backend options
800
+ ---------------------
801
+ - 'local': Run as a separate process on the local task machine.
802
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
803
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
804
+
805
+ Valid model options
806
+ -------------------
807
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
808
+
809
+
810
+ Parameters
811
+ ----------
812
+ models: list[str]
813
+ List of Ollama containers running models in sidecars.
814
+ backend: str
815
+ Determines where and how to run the Ollama process.
816
+ force_pull: bool
817
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
818
+ cache_update_policy: str
819
+ Cache update policy: "auto", "force", or "never".
820
+ force_cache_update: bool
821
+ Simple override for "force" cache update policy.
822
+ debug: bool
823
+ Whether to turn on verbose debugging logs.
824
+ circuit_breaker_config: dict
825
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
826
+ timeout_config: dict
827
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
936
828
  """
937
829
  ...
938
830
 
939
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
831
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
940
832
  """
941
- Specifies that this step is used to deploy an instance of the app.
942
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
833
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
943
834
 
944
835
 
945
836
  Parameters
946
837
  ----------
947
- app_port : int
948
- Number of GPUs to use.
949
- app_name : str
950
- Name of the app to deploy.
838
+ temp_dir_root : str, optional
839
+ The root directory that will hold the temporary directory where objects will be downloaded.
840
+
841
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
842
+ The list of repos (models/datasets) to load.
843
+
844
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
845
+
846
+ - If repo (model/dataset) is not found in the datastore:
847
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
848
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
849
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
850
+
851
+ - If repo is found in the datastore:
852
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
951
853
  """
952
854
  ...
953
855
 
954
856
  @typing.overload
955
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
857
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
956
858
  """
957
- Creates a human-readable report, a Metaflow Card, after this step completes.
859
+ Enables checkpointing for a step.
958
860
 
959
- Note that you may add multiple `@card` decorators in a step with different parameters.
960
861
 
961
862
 
962
863
  Parameters
963
864
  ----------
964
- type : str, default 'default'
965
- Card type.
966
- id : str, optional, default None
967
- If multiple cards are present, use this id to identify this card.
968
- options : Dict[str, Any], default {}
969
- Options passed to the card. The contents depend on the card type.
970
- timeout : int, default 45
971
- Interrupt reporting if it takes more than this many seconds.
865
+ load_policy : str, default: "fresh"
866
+ The policy for loading the checkpoint. The following policies are supported:
867
+ - "eager": Loads the the latest available checkpoint within the namespace.
868
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
869
+ will be loaded at the start of the task.
870
+ - "none": Do not load any checkpoint
871
+ - "fresh": Loads the lastest checkpoint created within the running Task.
872
+ This mode helps loading checkpoints across various retry attempts of the same task.
873
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
874
+ created within the task will be loaded when the task is retries execution on failure.
875
+
876
+ temp_dir_root : str, default: None
877
+ The root directory under which `current.checkpoint.directory` will be created.
972
878
  """
973
879
  ...
974
880
 
975
881
  @typing.overload
976
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
882
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
977
883
  ...
978
884
 
979
885
  @typing.overload
980
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
886
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
981
887
  ...
982
888
 
983
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
889
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
984
890
  """
985
- Creates a human-readable report, a Metaflow Card, after this step completes.
891
+ Enables checkpointing for a step.
986
892
 
987
- Note that you may add multiple `@card` decorators in a step with different parameters.
988
893
 
989
894
 
990
895
  Parameters
991
896
  ----------
992
- type : str, default 'default'
993
- Card type.
994
- id : str, optional, default None
995
- If multiple cards are present, use this id to identify this card.
996
- options : Dict[str, Any], default {}
997
- Options passed to the card. The contents depend on the card type.
998
- timeout : int, default 45
999
- Interrupt reporting if it takes more than this many seconds.
897
+ load_policy : str, default: "fresh"
898
+ The policy for loading the checkpoint. The following policies are supported:
899
+ - "eager": Loads the the latest available checkpoint within the namespace.
900
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
901
+ will be loaded at the start of the task.
902
+ - "none": Do not load any checkpoint
903
+ - "fresh": Loads the lastest checkpoint created within the running Task.
904
+ This mode helps loading checkpoints across various retry attempts of the same task.
905
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
906
+ created within the task will be loaded when the task is retries execution on failure.
907
+
908
+ temp_dir_root : str, default: None
909
+ The root directory under which `current.checkpoint.directory` will be created.
1000
910
  """
1001
911
  ...
1002
912
 
1003
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
913
+ @typing.overload
914
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1004
915
  """
1005
- Specifies that this step should execute on DGX cloud.
916
+ Specifies the PyPI packages for the step.
917
+
918
+ Information in this decorator will augment any
919
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
920
+ you can use `@pypi_base` to set packages required by all
921
+ steps and use `@pypi` to specify step-specific overrides.
1006
922
 
1007
923
 
1008
924
  Parameters
1009
925
  ----------
1010
- gpu : int
1011
- Number of GPUs to use.
1012
- gpu_type : str
1013
- Type of Nvidia GPU to use.
1014
- queue_timeout : int
1015
- Time to keep the job in NVCF's queue.
926
+ packages : Dict[str, str], default: {}
927
+ Packages to use for this step. The key is the name of the package
928
+ and the value is the version to use.
929
+ python : str, optional, default: None
930
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
931
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1016
932
  """
1017
933
  ...
1018
934
 
1019
935
  @typing.overload
1020
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
936
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
937
+ ...
938
+
939
+ @typing.overload
940
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
941
+ ...
942
+
943
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1021
944
  """
1022
- Specifies the Conda environment for all steps of the flow.
945
+ Specifies the PyPI packages for the step.
1023
946
 
1024
- Use `@conda_base` to set common libraries required by all
1025
- steps and use `@conda` to specify step-specific additions.
947
+ Information in this decorator will augment any
948
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
949
+ you can use `@pypi_base` to set packages required by all
950
+ steps and use `@pypi` to specify step-specific overrides.
1026
951
 
1027
952
 
1028
953
  Parameters
1029
954
  ----------
1030
- packages : Dict[str, str], default {}
955
+ packages : Dict[str, str], default: {}
956
+ Packages to use for this step. The key is the name of the package
957
+ and the value is the version to use.
958
+ python : str, optional, default: None
959
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
960
+ that the version used will correspond to the version of the Python interpreter used to start the run.
961
+ """
962
+ ...
963
+
964
+ def vllm(*, model: str, backend: str, debug: bool, kwargs: typing.Any) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
965
+ """
966
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
967
+
968
+ User code call
969
+ --------------
970
+ @vllm(
971
+ model="...",
972
+ ...
973
+ )
974
+
975
+ Valid backend options
976
+ ---------------------
977
+ - 'local': Run as a separate process on the local task machine.
978
+
979
+ Valid model options
980
+ -------------------
981
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
982
+
983
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
984
+ If you need multiple models, you must create multiple @vllm decorators.
985
+
986
+
987
+ Parameters
988
+ ----------
989
+ model: str
990
+ HuggingFace model identifier to be served by vLLM.
991
+ backend: str
992
+ Determines where and how to run the vLLM process.
993
+ debug: bool
994
+ Whether to turn on verbose debugging logs.
995
+ kwargs : Any
996
+ Any other keyword arguments are passed directly to the vLLM engine.
997
+ This allows for flexible configuration of vLLM server settings.
998
+ For example, `tensor_parallel_size=2`.
999
+ """
1000
+ ...
1001
+
1002
+ @typing.overload
1003
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1004
+ """
1005
+ Internal decorator to support Fast bakery
1006
+ """
1007
+ ...
1008
+
1009
+ @typing.overload
1010
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1011
+ ...
1012
+
1013
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1014
+ """
1015
+ Internal decorator to support Fast bakery
1016
+ """
1017
+ ...
1018
+
1019
+ @typing.overload
1020
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1021
+ """
1022
+ Specifies the PyPI packages for all steps of the flow.
1023
+
1024
+ Use `@pypi_base` to set common packages required by all
1025
+ steps and use `@pypi` to specify step-specific overrides.
1026
+
1027
+ Parameters
1028
+ ----------
1029
+ packages : Dict[str, str], default: {}
1031
1030
  Packages to use for this flow. The key is the name of the package
1032
1031
  and the value is the version to use.
1033
- libraries : Dict[str, str], default {}
1034
- Supported for backward compatibility. When used with packages, packages will take precedence.
1035
- python : str, optional, default None
1032
+ python : str, optional, default: None
1033
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1034
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1035
+ """
1036
+ ...
1037
+
1038
+ @typing.overload
1039
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1040
+ ...
1041
+
1042
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1043
+ """
1044
+ Specifies the PyPI packages for all steps of the flow.
1045
+
1046
+ Use `@pypi_base` to set common packages required by all
1047
+ steps and use `@pypi` to specify step-specific overrides.
1048
+
1049
+ Parameters
1050
+ ----------
1051
+ packages : Dict[str, str], default: {}
1052
+ Packages to use for this flow. The key is the name of the package
1053
+ and the value is the version to use.
1054
+ python : str, optional, default: None
1036
1055
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1037
1056
  that the version used will correspond to the version of the Python interpreter used to start the run.
1038
- disabled : bool, default False
1039
- If set to True, disables Conda.
1040
1057
  """
1041
1058
  ...
1042
1059
 
1043
- @typing.overload
1044
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1060
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1061
+ """
1062
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1063
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1064
+
1065
+
1066
+ Parameters
1067
+ ----------
1068
+ timeout : int
1069
+ Time, in seconds before the task times out and fails. (Default: 3600)
1070
+ poke_interval : int
1071
+ Time in seconds that the job should wait in between each try. (Default: 60)
1072
+ mode : str
1073
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1074
+ exponential_backoff : bool
1075
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1076
+ pool : str
1077
+ the slot pool this task should run in,
1078
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1079
+ soft_fail : bool
1080
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1081
+ name : str
1082
+ Name of the sensor on Airflow
1083
+ description : str
1084
+ Description of sensor in the Airflow UI
1085
+ external_dag_id : str
1086
+ The dag_id that contains the task you want to wait for.
1087
+ external_task_ids : List[str]
1088
+ The list of task_ids that you want to wait for.
1089
+ If None (default value) the sensor waits for the DAG. (Default: None)
1090
+ allowed_states : List[str]
1091
+ Iterable of allowed states, (Default: ['success'])
1092
+ failed_states : List[str]
1093
+ Iterable of failed or dis-allowed states. (Default: None)
1094
+ execution_delta : datetime.timedelta
1095
+ time difference with the previous execution to look at,
1096
+ the default is the same logical date as the current task or DAG. (Default: None)
1097
+ check_existence: bool
1098
+ Set to True to check if the external task exists or check if
1099
+ the DAG to wait for exists. (Default: True)
1100
+ """
1045
1101
  ...
1046
1102
 
1047
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1103
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1048
1104
  """
1049
- Specifies the Conda environment for all steps of the flow.
1105
+ Specifies what flows belong to the same project.
1050
1106
 
1051
- Use `@conda_base` to set common libraries required by all
1052
- steps and use `@conda` to specify step-specific additions.
1107
+ A project-specific namespace is created for all flows that
1108
+ use the same `@project(name)`.
1053
1109
 
1054
1110
 
1055
1111
  Parameters
1056
1112
  ----------
1057
- packages : Dict[str, str], default {}
1058
- Packages to use for this flow. The key is the name of the package
1059
- and the value is the version to use.
1060
- libraries : Dict[str, str], default {}
1061
- Supported for backward compatibility. When used with packages, packages will take precedence.
1062
- python : str, optional, default None
1063
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1064
- that the version used will correspond to the version of the Python interpreter used to start the run.
1065
- disabled : bool, default False
1066
- If set to True, disables Conda.
1113
+ name : str
1114
+ Project name. Make sure that the name is unique amongst all
1115
+ projects that use the same production scheduler. The name may
1116
+ contain only lowercase alphanumeric characters and underscores.
1117
+
1118
+ branch : Optional[str], default None
1119
+ The branch to use. If not specified, the branch is set to
1120
+ `user.<username>` unless `production` is set to `True`. This can
1121
+ also be set on the command line using `--branch` as a top-level option.
1122
+ It is an error to specify `branch` in the decorator and on the command line.
1123
+
1124
+ production : bool, default False
1125
+ Whether or not the branch is the production branch. This can also be set on the
1126
+ command line using `--production` as a top-level option. It is an error to specify
1127
+ `production` in the decorator and on the command line.
1128
+ The project branch name will be:
1129
+ - if `branch` is specified:
1130
+ - if `production` is True: `prod.<branch>`
1131
+ - if `production` is False: `test.<branch>`
1132
+ - if `branch` is not specified:
1133
+ - if `production` is True: `prod`
1134
+ - if `production` is False: `user.<username>`
1067
1135
  """
1068
1136
  ...
1069
1137
 
1070
1138
  @typing.overload
1071
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1139
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1072
1140
  """
1073
- Specifies the event(s) that this flow depends on.
1141
+ Specifies the flow(s) that this flow depends on.
1074
1142
 
1075
1143
  ```
1076
- @trigger(event='foo')
1144
+ @trigger_on_finish(flow='FooFlow')
1077
1145
  ```
1078
1146
  or
1079
1147
  ```
1080
- @trigger(events=['foo', 'bar'])
1148
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1081
1149
  ```
1150
+ This decorator respects the @project decorator and triggers the flow
1151
+ when upstream runs within the same namespace complete successfully
1082
1152
 
1083
- Additionally, you can specify the parameter mappings
1084
- to map event payload to Metaflow parameters for the flow.
1153
+ Additionally, you can specify project aware upstream flow dependencies
1154
+ by specifying the fully qualified project_flow_name.
1085
1155
  ```
1086
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1156
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1087
1157
  ```
1088
1158
  or
1089
1159
  ```
1090
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1091
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1160
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1092
1161
  ```
1093
1162
 
1094
- 'parameters' can also be a list of strings and tuples like so:
1095
- ```
1096
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1097
- ```
1098
- This is equivalent to:
1163
+ You can also specify just the project or project branch (other values will be
1164
+ inferred from the current project or project branch):
1099
1165
  ```
1100
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1166
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1101
1167
  ```
1102
1168
 
1169
+ Note that `branch` is typically one of:
1170
+ - `prod`
1171
+ - `user.bob`
1172
+ - `test.my_experiment`
1173
+ - `prod.staging`
1174
+
1103
1175
 
1104
1176
  Parameters
1105
1177
  ----------
1106
- event : Union[str, Dict[str, Any]], optional, default None
1107
- Event dependency for this flow.
1108
- events : List[Union[str, Dict[str, Any]]], default []
1109
- Events dependency for this flow.
1178
+ flow : Union[str, Dict[str, str]], optional, default None
1179
+ Upstream flow dependency for this flow.
1180
+ flows : List[Union[str, Dict[str, str]]], default []
1181
+ Upstream flow dependencies for this flow.
1110
1182
  options : Dict[str, Any], default {}
1111
1183
  Backend-specific configuration for tuning eventing behavior.
1112
1184
  """
1113
1185
  ...
1114
1186
 
1115
1187
  @typing.overload
1116
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1188
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1117
1189
  ...
1118
1190
 
1119
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1191
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1120
1192
  """
1121
- Specifies the event(s) that this flow depends on.
1193
+ Specifies the flow(s) that this flow depends on.
1122
1194
 
1123
1195
  ```
1124
- @trigger(event='foo')
1196
+ @trigger_on_finish(flow='FooFlow')
1125
1197
  ```
1126
1198
  or
1127
1199
  ```
1128
- @trigger(events=['foo', 'bar'])
1200
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1129
1201
  ```
1202
+ This decorator respects the @project decorator and triggers the flow
1203
+ when upstream runs within the same namespace complete successfully
1130
1204
 
1131
- Additionally, you can specify the parameter mappings
1132
- to map event payload to Metaflow parameters for the flow.
1205
+ Additionally, you can specify project aware upstream flow dependencies
1206
+ by specifying the fully qualified project_flow_name.
1133
1207
  ```
1134
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1208
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1135
1209
  ```
1136
1210
  or
1137
1211
  ```
1138
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1139
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1212
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1140
1213
  ```
1141
1214
 
1142
- 'parameters' can also be a list of strings and tuples like so:
1143
- ```
1144
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1145
- ```
1146
- This is equivalent to:
1215
+ You can also specify just the project or project branch (other values will be
1216
+ inferred from the current project or project branch):
1147
1217
  ```
1148
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1218
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1149
1219
  ```
1150
1220
 
1221
+ Note that `branch` is typically one of:
1222
+ - `prod`
1223
+ - `user.bob`
1224
+ - `test.my_experiment`
1225
+ - `prod.staging`
1226
+
1151
1227
 
1152
1228
  Parameters
1153
1229
  ----------
1154
- event : Union[str, Dict[str, Any]], optional, default None
1155
- Event dependency for this flow.
1156
- events : List[Union[str, Dict[str, Any]]], default []
1157
- Events dependency for this flow.
1230
+ flow : Union[str, Dict[str, str]], optional, default None
1231
+ Upstream flow dependency for this flow.
1232
+ flows : List[Union[str, Dict[str, str]]], default []
1233
+ Upstream flow dependencies for this flow.
1158
1234
  options : Dict[str, Any], default {}
1159
1235
  Backend-specific configuration for tuning eventing behavior.
1160
1236
  """
1161
1237
  ...
1162
1238
 
1163
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1164
- """
1165
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1166
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1167
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1168
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1169
- starts only after all sensors finish.
1170
-
1171
-
1172
- Parameters
1173
- ----------
1174
- timeout : int
1175
- Time, in seconds before the task times out and fails. (Default: 3600)
1176
- poke_interval : int
1177
- Time in seconds that the job should wait in between each try. (Default: 60)
1178
- mode : str
1179
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1180
- exponential_backoff : bool
1181
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1182
- pool : str
1183
- the slot pool this task should run in,
1184
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1185
- soft_fail : bool
1186
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1187
- name : str
1188
- Name of the sensor on Airflow
1189
- description : str
1190
- Description of sensor in the Airflow UI
1191
- bucket_key : Union[str, List[str]]
1192
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1193
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1194
- bucket_name : str
1195
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1196
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1197
- wildcard_match : bool
1198
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1199
- aws_conn_id : str
1200
- a reference to the s3 connection on Airflow. (Default: None)
1201
- verify : bool
1202
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1203
- """
1204
- ...
1205
-
1206
- @typing.overload
1207
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1208
- """
1209
- Specifies the PyPI packages for all steps of the flow.
1210
-
1211
- Use `@pypi_base` to set common packages required by all
1212
- steps and use `@pypi` to specify step-specific overrides.
1213
-
1214
- Parameters
1215
- ----------
1216
- packages : Dict[str, str], default: {}
1217
- Packages to use for this flow. The key is the name of the package
1218
- and the value is the version to use.
1219
- python : str, optional, default: None
1220
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1221
- that the version used will correspond to the version of the Python interpreter used to start the run.
1222
- """
1223
- ...
1224
-
1225
- @typing.overload
1226
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1227
- ...
1228
-
1229
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1230
- """
1231
- Specifies the PyPI packages for all steps of the flow.
1232
-
1233
- Use `@pypi_base` to set common packages required by all
1234
- steps and use `@pypi` to specify step-specific overrides.
1235
-
1236
- Parameters
1237
- ----------
1238
- packages : Dict[str, str], default: {}
1239
- Packages to use for this flow. The key is the name of the package
1240
- and the value is the version to use.
1241
- python : str, optional, default: None
1242
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1243
- that the version used will correspond to the version of the Python interpreter used to start the run.
1244
- """
1245
- ...
1246
-
1247
1239
  @typing.overload
1248
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1240
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1249
1241
  """
1250
- Specifies the flow(s) that this flow depends on.
1242
+ Specifies the event(s) that this flow depends on.
1251
1243
 
1252
1244
  ```
1253
- @trigger_on_finish(flow='FooFlow')
1245
+ @trigger(event='foo')
1254
1246
  ```
1255
1247
  or
1256
1248
  ```
1257
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1249
+ @trigger(events=['foo', 'bar'])
1258
1250
  ```
1259
- This decorator respects the @project decorator and triggers the flow
1260
- when upstream runs within the same namespace complete successfully
1261
1251
 
1262
- Additionally, you can specify project aware upstream flow dependencies
1263
- by specifying the fully qualified project_flow_name.
1252
+ Additionally, you can specify the parameter mappings
1253
+ to map event payload to Metaflow parameters for the flow.
1264
1254
  ```
1265
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1255
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1266
1256
  ```
1267
1257
  or
1268
1258
  ```
1269
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1259
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1260
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1270
1261
  ```
1271
1262
 
1272
- You can also specify just the project or project branch (other values will be
1273
- inferred from the current project or project branch):
1263
+ 'parameters' can also be a list of strings and tuples like so:
1274
1264
  ```
1275
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1265
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1266
+ ```
1267
+ This is equivalent to:
1268
+ ```
1269
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1276
1270
  ```
1277
-
1278
- Note that `branch` is typically one of:
1279
- - `prod`
1280
- - `user.bob`
1281
- - `test.my_experiment`
1282
- - `prod.staging`
1283
1271
 
1284
1272
 
1285
1273
  Parameters
1286
1274
  ----------
1287
- flow : Union[str, Dict[str, str]], optional, default None
1288
- Upstream flow dependency for this flow.
1289
- flows : List[Union[str, Dict[str, str]]], default []
1290
- Upstream flow dependencies for this flow.
1275
+ event : Union[str, Dict[str, Any]], optional, default None
1276
+ Event dependency for this flow.
1277
+ events : List[Union[str, Dict[str, Any]]], default []
1278
+ Events dependency for this flow.
1291
1279
  options : Dict[str, Any], default {}
1292
1280
  Backend-specific configuration for tuning eventing behavior.
1293
1281
  """
1294
1282
  ...
1295
1283
 
1296
1284
  @typing.overload
1297
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1285
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1298
1286
  ...
1299
1287
 
1300
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1288
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1301
1289
  """
1302
- Specifies the flow(s) that this flow depends on.
1290
+ Specifies the event(s) that this flow depends on.
1303
1291
 
1304
1292
  ```
1305
- @trigger_on_finish(flow='FooFlow')
1293
+ @trigger(event='foo')
1306
1294
  ```
1307
1295
  or
1308
1296
  ```
1309
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1297
+ @trigger(events=['foo', 'bar'])
1310
1298
  ```
1311
- This decorator respects the @project decorator and triggers the flow
1312
- when upstream runs within the same namespace complete successfully
1313
1299
 
1314
- Additionally, you can specify project aware upstream flow dependencies
1315
- by specifying the fully qualified project_flow_name.
1300
+ Additionally, you can specify the parameter mappings
1301
+ to map event payload to Metaflow parameters for the flow.
1316
1302
  ```
1317
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1303
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1318
1304
  ```
1319
1305
  or
1320
1306
  ```
1321
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1307
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1308
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1322
1309
  ```
1323
1310
 
1324
- You can also specify just the project or project branch (other values will be
1325
- inferred from the current project or project branch):
1311
+ 'parameters' can also be a list of strings and tuples like so:
1326
1312
  ```
1327
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1313
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1314
+ ```
1315
+ This is equivalent to:
1316
+ ```
1317
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1328
1318
  ```
1329
-
1330
- Note that `branch` is typically one of:
1331
- - `prod`
1332
- - `user.bob`
1333
- - `test.my_experiment`
1334
- - `prod.staging`
1335
1319
 
1336
1320
 
1337
1321
  Parameters
1338
1322
  ----------
1339
- flow : Union[str, Dict[str, str]], optional, default None
1340
- Upstream flow dependency for this flow.
1341
- flows : List[Union[str, Dict[str, str]]], default []
1342
- Upstream flow dependencies for this flow.
1323
+ event : Union[str, Dict[str, Any]], optional, default None
1324
+ Event dependency for this flow.
1325
+ events : List[Union[str, Dict[str, Any]]], default []
1326
+ Events dependency for this flow.
1343
1327
  options : Dict[str, Any], default {}
1344
1328
  Backend-specific configuration for tuning eventing behavior.
1345
1329
  """
@@ -1396,84 +1380,6 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1396
1380
  """
1397
1381
  ...
1398
1382
 
1399
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1400
- """
1401
- Specifies what flows belong to the same project.
1402
-
1403
- A project-specific namespace is created for all flows that
1404
- use the same `@project(name)`.
1405
-
1406
-
1407
- Parameters
1408
- ----------
1409
- name : str
1410
- Project name. Make sure that the name is unique amongst all
1411
- projects that use the same production scheduler. The name may
1412
- contain only lowercase alphanumeric characters and underscores.
1413
-
1414
- branch : Optional[str], default None
1415
- The branch to use. If not specified, the branch is set to
1416
- `user.<username>` unless `production` is set to `True`. This can
1417
- also be set on the command line using `--branch` as a top-level option.
1418
- It is an error to specify `branch` in the decorator and on the command line.
1419
-
1420
- production : bool, default False
1421
- Whether or not the branch is the production branch. This can also be set on the
1422
- command line using `--production` as a top-level option. It is an error to specify
1423
- `production` in the decorator and on the command line.
1424
- The project branch name will be:
1425
- - if `branch` is specified:
1426
- - if `production` is True: `prod.<branch>`
1427
- - if `production` is False: `test.<branch>`
1428
- - if `branch` is not specified:
1429
- - if `production` is True: `prod`
1430
- - if `production` is False: `user.<username>`
1431
- """
1432
- ...
1433
-
1434
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1435
- """
1436
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1437
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1438
-
1439
-
1440
- Parameters
1441
- ----------
1442
- timeout : int
1443
- Time, in seconds before the task times out and fails. (Default: 3600)
1444
- poke_interval : int
1445
- Time in seconds that the job should wait in between each try. (Default: 60)
1446
- mode : str
1447
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1448
- exponential_backoff : bool
1449
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1450
- pool : str
1451
- the slot pool this task should run in,
1452
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1453
- soft_fail : bool
1454
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1455
- name : str
1456
- Name of the sensor on Airflow
1457
- description : str
1458
- Description of sensor in the Airflow UI
1459
- external_dag_id : str
1460
- The dag_id that contains the task you want to wait for.
1461
- external_task_ids : List[str]
1462
- The list of task_ids that you want to wait for.
1463
- If None (default value) the sensor waits for the DAG. (Default: None)
1464
- allowed_states : List[str]
1465
- Iterable of allowed states, (Default: ['success'])
1466
- failed_states : List[str]
1467
- Iterable of failed or dis-allowed states. (Default: None)
1468
- execution_delta : datetime.timedelta
1469
- time difference with the previous execution to look at,
1470
- the default is the same logical date as the current task or DAG. (Default: None)
1471
- check_existence: bool
1472
- Set to True to check if the external task exists or check if
1473
- the DAG to wait for exists. (Default: True)
1474
- """
1475
- ...
1476
-
1477
1383
  def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1478
1384
  """
1479
1385
  Allows setting external datastores to save data for the
@@ -1588,5 +1494,99 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1588
1494
  """
1589
1495
  ...
1590
1496
 
1497
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1498
+ """
1499
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1500
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1501
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1502
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1503
+ starts only after all sensors finish.
1504
+
1505
+
1506
+ Parameters
1507
+ ----------
1508
+ timeout : int
1509
+ Time, in seconds before the task times out and fails. (Default: 3600)
1510
+ poke_interval : int
1511
+ Time in seconds that the job should wait in between each try. (Default: 60)
1512
+ mode : str
1513
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1514
+ exponential_backoff : bool
1515
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1516
+ pool : str
1517
+ the slot pool this task should run in,
1518
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1519
+ soft_fail : bool
1520
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1521
+ name : str
1522
+ Name of the sensor on Airflow
1523
+ description : str
1524
+ Description of sensor in the Airflow UI
1525
+ bucket_key : Union[str, List[str]]
1526
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1527
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1528
+ bucket_name : str
1529
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1530
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1531
+ wildcard_match : bool
1532
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1533
+ aws_conn_id : str
1534
+ a reference to the s3 connection on Airflow. (Default: None)
1535
+ verify : bool
1536
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1537
+ """
1538
+ ...
1539
+
1540
+ @typing.overload
1541
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1542
+ """
1543
+ Specifies the Conda environment for all steps of the flow.
1544
+
1545
+ Use `@conda_base` to set common libraries required by all
1546
+ steps and use `@conda` to specify step-specific additions.
1547
+
1548
+
1549
+ Parameters
1550
+ ----------
1551
+ packages : Dict[str, str], default {}
1552
+ Packages to use for this flow. The key is the name of the package
1553
+ and the value is the version to use.
1554
+ libraries : Dict[str, str], default {}
1555
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1556
+ python : str, optional, default None
1557
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1558
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1559
+ disabled : bool, default False
1560
+ If set to True, disables Conda.
1561
+ """
1562
+ ...
1563
+
1564
+ @typing.overload
1565
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1566
+ ...
1567
+
1568
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1569
+ """
1570
+ Specifies the Conda environment for all steps of the flow.
1571
+
1572
+ Use `@conda_base` to set common libraries required by all
1573
+ steps and use `@conda` to specify step-specific additions.
1574
+
1575
+
1576
+ Parameters
1577
+ ----------
1578
+ packages : Dict[str, str], default {}
1579
+ Packages to use for this flow. The key is the name of the package
1580
+ and the value is the version to use.
1581
+ libraries : Dict[str, str], default {}
1582
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1583
+ python : str, optional, default None
1584
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1585
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1586
+ disabled : bool, default False
1587
+ If set to True, disables Conda.
1588
+ """
1589
+ ...
1590
+
1591
1591
  pkg_name: str
1592
1592