ob-metaflow-stubs 6.0.3.178__py2.py3-none-any.whl → 6.0.3.179rc1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (220) hide show
  1. metaflow-stubs/__init__.pyi +670 -669
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +122 -122
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +2 -2
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +2 -2
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +1 -1
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +2 -2
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +6 -0
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +51 -0
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +65 -0
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +74 -0
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  89. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -1
  92. metaflow-stubs/multicore_utils.pyi +1 -1
  93. metaflow-stubs/ob_internal.pyi +11 -0
  94. metaflow-stubs/parameters.pyi +3 -3
  95. metaflow-stubs/plugins/__init__.pyi +9 -9
  96. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  97. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  98. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  99. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  100. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  101. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  102. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  103. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  104. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  105. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  106. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  107. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  108. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  109. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  110. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  111. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  112. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  113. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  114. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  115. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  116. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  117. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  118. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  119. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  120. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  121. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  122. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  123. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  124. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  125. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  126. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  128. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  129. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  130. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  131. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  132. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  133. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  134. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  135. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  136. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  137. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  138. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  139. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  140. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  141. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  142. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  143. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  144. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  145. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  146. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  147. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  148. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  149. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  150. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  151. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  152. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  153. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  154. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  155. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  156. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  157. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  158. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  159. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  160. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  161. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  162. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  163. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  164. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  165. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  166. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  167. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  168. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  169. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  170. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  171. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  172. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  173. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  174. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  175. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  176. metaflow-stubs/plugins/perimeters.pyi +1 -1
  177. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  178. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  179. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  180. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  181. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  182. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  183. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  184. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  185. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  186. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  187. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  188. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  189. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  192. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  193. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  194. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  195. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  196. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  197. metaflow-stubs/profilers/__init__.pyi +1 -1
  198. metaflow-stubs/pylint_wrapper.pyi +1 -1
  199. metaflow-stubs/runner/__init__.pyi +1 -1
  200. metaflow-stubs/runner/deployer.pyi +29 -29
  201. metaflow-stubs/runner/deployer_impl.pyi +1 -1
  202. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  203. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  204. metaflow-stubs/runner/nbrun.pyi +1 -1
  205. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  206. metaflow-stubs/runner/utils.pyi +3 -3
  207. metaflow-stubs/system/__init__.pyi +1 -1
  208. metaflow-stubs/system/system_logger.pyi +2 -2
  209. metaflow-stubs/system/system_monitor.pyi +1 -1
  210. metaflow-stubs/tagging_util.pyi +1 -1
  211. metaflow-stubs/tuple_util.pyi +1 -1
  212. metaflow-stubs/user_configs/__init__.pyi +1 -1
  213. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  214. metaflow-stubs/user_configs/config_options.pyi +2 -2
  215. metaflow-stubs/user_configs/config_parameters.pyi +4 -4
  216. {ob_metaflow_stubs-6.0.3.178.dist-info → ob_metaflow_stubs-6.0.3.179rc1.dist-info}/METADATA +1 -1
  217. ob_metaflow_stubs-6.0.3.179rc1.dist-info/RECORD +220 -0
  218. ob_metaflow_stubs-6.0.3.178.dist-info/RECORD +0 -215
  219. {ob_metaflow_stubs-6.0.3.178.dist-info → ob_metaflow_stubs-6.0.3.179rc1.dist-info}/WHEEL +0 -0
  220. {ob_metaflow_stubs-6.0.3.178.dist-info → ob_metaflow_stubs-6.0.3.179rc1.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.17.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-06-13T18:34:09.309751 #
4
+ # Generated on 2025-06-13T20:01:40.043002 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -37,16 +37,16 @@ from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDec
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
38
  from . import tuple_util as tuple_util
39
39
  from . import cards as cards
40
- from . import events as events
41
40
  from . import metaflow_git as metaflow_git
41
+ from . import events as events
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
49
47
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
48
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
49
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
52
52
  from .client.core import get_namespace as get_namespace
@@ -78,6 +78,7 @@ from . import system as system
78
78
  from . import pylint_wrapper as pylint_wrapper
79
79
  from . import cli as cli
80
80
  from . import profilers as profilers
81
+ from . import ob_internal as ob_internal
81
82
 
82
83
  EXT_PKG: str
83
84
 
@@ -154,6 +155,23 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
154
155
  """
155
156
  ...
156
157
 
158
+ @typing.overload
159
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
160
+ """
161
+ Internal decorator to support Fast bakery
162
+ """
163
+ ...
164
+
165
+ @typing.overload
166
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
167
+ ...
168
+
169
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
170
+ """
171
+ Internal decorator to support Fast bakery
172
+ """
173
+ ...
174
+
157
175
  @typing.overload
158
176
  def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
159
177
  """
@@ -214,133 +232,198 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
214
232
  ...
215
233
 
216
234
  @typing.overload
217
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
235
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
218
236
  """
219
- Internal decorator to support Fast bakery
237
+ Specifies the number of times the task corresponding
238
+ to a step needs to be retried.
239
+
240
+ This decorator is useful for handling transient errors, such as networking issues.
241
+ If your task contains operations that can't be retried safely, e.g. database updates,
242
+ it is advisable to annotate it with `@retry(times=0)`.
243
+
244
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
245
+ decorator will execute a no-op task after all retries have been exhausted,
246
+ ensuring that the flow execution can continue.
247
+
248
+
249
+ Parameters
250
+ ----------
251
+ times : int, default 3
252
+ Number of times to retry this task.
253
+ minutes_between_retries : int, default 2
254
+ Number of minutes between retries.
220
255
  """
221
256
  ...
222
257
 
223
258
  @typing.overload
224
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
259
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
225
260
  ...
226
261
 
227
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
262
+ @typing.overload
263
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
264
+ ...
265
+
266
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
228
267
  """
229
- Internal decorator to support Fast bakery
268
+ Specifies the number of times the task corresponding
269
+ to a step needs to be retried.
270
+
271
+ This decorator is useful for handling transient errors, such as networking issues.
272
+ If your task contains operations that can't be retried safely, e.g. database updates,
273
+ it is advisable to annotate it with `@retry(times=0)`.
274
+
275
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
276
+ decorator will execute a no-op task after all retries have been exhausted,
277
+ ensuring that the flow execution can continue.
278
+
279
+
280
+ Parameters
281
+ ----------
282
+ times : int, default 3
283
+ Number of times to retry this task.
284
+ minutes_between_retries : int, default 2
285
+ Number of minutes between retries.
230
286
  """
231
287
  ...
232
288
 
233
289
  @typing.overload
234
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
290
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
235
291
  """
236
- Specifies the resources needed when executing this step.
237
-
238
- Use `@resources` to specify the resource requirements
239
- independently of the specific compute layer (`@batch`, `@kubernetes`).
292
+ Enables checkpointing for a step.
240
293
 
241
- You can choose the compute layer on the command line by executing e.g.
242
- ```
243
- python myflow.py run --with batch
244
- ```
245
- or
246
- ```
247
- python myflow.py run --with kubernetes
248
- ```
249
- which executes the flow on the desired system using the
250
- requirements specified in `@resources`.
251
294
 
252
295
 
253
296
  Parameters
254
297
  ----------
255
- cpu : int, default 1
256
- Number of CPUs required for this step.
257
- gpu : int, optional, default None
258
- Number of GPUs required for this step.
259
- disk : int, optional, default None
260
- Disk size (in MB) required for this step. Only applies on Kubernetes.
261
- memory : int, default 4096
262
- Memory size (in MB) required for this step.
263
- shared_memory : int, optional, default None
264
- The value for the size (in MiB) of the /dev/shm volume for this step.
265
- This parameter maps to the `--shm-size` option in Docker.
298
+ load_policy : str, default: "fresh"
299
+ The policy for loading the checkpoint. The following policies are supported:
300
+ - "eager": Loads the the latest available checkpoint within the namespace.
301
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
302
+ will be loaded at the start of the task.
303
+ - "none": Do not load any checkpoint
304
+ - "fresh": Loads the lastest checkpoint created within the running Task.
305
+ This mode helps loading checkpoints across various retry attempts of the same task.
306
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
307
+ created within the task will be loaded when the task is retries execution on failure.
308
+
309
+ temp_dir_root : str, default: None
310
+ The root directory under which `current.checkpoint.directory` will be created.
266
311
  """
267
312
  ...
268
313
 
269
314
  @typing.overload
270
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
315
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
271
316
  ...
272
317
 
273
318
  @typing.overload
274
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
319
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
275
320
  ...
276
321
 
277
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
322
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
278
323
  """
279
- Specifies the resources needed when executing this step.
280
-
281
- Use `@resources` to specify the resource requirements
282
- independently of the specific compute layer (`@batch`, `@kubernetes`).
324
+ Enables checkpointing for a step.
283
325
 
284
- You can choose the compute layer on the command line by executing e.g.
285
- ```
286
- python myflow.py run --with batch
287
- ```
288
- or
289
- ```
290
- python myflow.py run --with kubernetes
291
- ```
292
- which executes the flow on the desired system using the
293
- requirements specified in `@resources`.
294
326
 
295
327
 
296
328
  Parameters
297
329
  ----------
298
- cpu : int, default 1
299
- Number of CPUs required for this step.
300
- gpu : int, optional, default None
301
- Number of GPUs required for this step.
302
- disk : int, optional, default None
303
- Disk size (in MB) required for this step. Only applies on Kubernetes.
304
- memory : int, default 4096
305
- Memory size (in MB) required for this step.
306
- shared_memory : int, optional, default None
307
- The value for the size (in MiB) of the /dev/shm volume for this step.
308
- This parameter maps to the `--shm-size` option in Docker.
330
+ load_policy : str, default: "fresh"
331
+ The policy for loading the checkpoint. The following policies are supported:
332
+ - "eager": Loads the the latest available checkpoint within the namespace.
333
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
334
+ will be loaded at the start of the task.
335
+ - "none": Do not load any checkpoint
336
+ - "fresh": Loads the lastest checkpoint created within the running Task.
337
+ This mode helps loading checkpoints across various retry attempts of the same task.
338
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
339
+ created within the task will be loaded when the task is retries execution on failure.
340
+
341
+ temp_dir_root : str, default: None
342
+ The root directory under which `current.checkpoint.directory` will be created.
309
343
  """
310
344
  ...
311
345
 
312
346
  @typing.overload
313
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
347
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
314
348
  """
315
- Specifies secrets to be retrieved and injected as environment variables prior to
316
- the execution of a step.
349
+ Specifies the Conda environment for the step.
350
+
351
+ Information in this decorator will augment any
352
+ attributes set in the `@conda_base` flow-level decorator. Hence,
353
+ you can use `@conda_base` to set packages required by all
354
+ steps and use `@conda` to specify step-specific overrides.
317
355
 
318
356
 
319
357
  Parameters
320
358
  ----------
321
- sources : List[Union[str, Dict[str, Any]]], default: []
322
- List of secret specs, defining how the secrets are to be retrieved
359
+ packages : Dict[str, str], default {}
360
+ Packages to use for this step. The key is the name of the package
361
+ and the value is the version to use.
362
+ libraries : Dict[str, str], default {}
363
+ Supported for backward compatibility. When used with packages, packages will take precedence.
364
+ python : str, optional, default None
365
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
366
+ that the version used will correspond to the version of the Python interpreter used to start the run.
367
+ disabled : bool, default False
368
+ If set to True, disables @conda.
323
369
  """
324
370
  ...
325
371
 
326
372
  @typing.overload
327
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
373
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
328
374
  ...
329
375
 
330
376
  @typing.overload
331
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
377
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
332
378
  ...
333
379
 
334
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
380
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
335
381
  """
336
- Specifies secrets to be retrieved and injected as environment variables prior to
337
- the execution of a step.
382
+ Specifies the Conda environment for the step.
383
+
384
+ Information in this decorator will augment any
385
+ attributes set in the `@conda_base` flow-level decorator. Hence,
386
+ you can use `@conda_base` to set packages required by all
387
+ steps and use `@conda` to specify step-specific overrides.
338
388
 
339
389
 
340
390
  Parameters
341
391
  ----------
342
- sources : List[Union[str, Dict[str, Any]]], default: []
343
- List of secret specs, defining how the secrets are to be retrieved
392
+ packages : Dict[str, str], default {}
393
+ Packages to use for this step. The key is the name of the package
394
+ and the value is the version to use.
395
+ libraries : Dict[str, str], default {}
396
+ Supported for backward compatibility. When used with packages, packages will take precedence.
397
+ python : str, optional, default None
398
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
399
+ that the version used will correspond to the version of the Python interpreter used to start the run.
400
+ disabled : bool, default False
401
+ If set to True, disables @conda.
402
+ """
403
+ ...
404
+
405
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
406
+ """
407
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
408
+
409
+
410
+ Parameters
411
+ ----------
412
+ temp_dir_root : str, optional
413
+ The root directory that will hold the temporary directory where objects will be downloaded.
414
+
415
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
416
+ The list of repos (models/datasets) to load.
417
+
418
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
419
+
420
+ - If repo (model/dataset) is not found in the datastore:
421
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
422
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
423
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
424
+
425
+ - If repo is found in the datastore:
426
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
344
427
  """
345
428
  ...
346
429
 
@@ -401,151 +484,87 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
401
484
  """
402
485
  ...
403
486
 
404
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
487
+ @typing.overload
488
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
405
489
  """
406
- Specifies that this step is used to deploy an instance of the app.
407
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
408
-
409
-
410
- Parameters
411
- ----------
412
- app_port : int
413
- Number of GPUs to use.
414
- app_name : str
415
- Name of the app to deploy.
490
+ Decorator prototype for all step decorators. This function gets specialized
491
+ and imported for all decorators types by _import_plugin_decorators().
416
492
  """
417
493
  ...
418
494
 
419
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
495
+ @typing.overload
496
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
497
+ ...
498
+
499
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
420
500
  """
421
- Specifies that this step should execute on DGX cloud.
422
-
423
-
424
- Parameters
425
- ----------
426
- gpu : int
427
- Number of GPUs to use.
428
- gpu_type : str
429
- Type of Nvidia GPU to use.
430
- queue_timeout : int
431
- Time to keep the job in NVCF's queue.
432
- """
433
- ...
434
-
435
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
436
- """
437
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
438
-
439
- User code call
440
- --------------
441
- @ollama(
442
- models=[...],
443
- ...
444
- )
445
-
446
- Valid backend options
447
- ---------------------
448
- - 'local': Run as a separate process on the local task machine.
449
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
450
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
451
-
452
- Valid model options
453
- -------------------
454
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
455
-
456
-
457
- Parameters
458
- ----------
459
- models: list[str]
460
- List of Ollama containers running models in sidecars.
461
- backend: str
462
- Determines where and how to run the Ollama process.
463
- force_pull: bool
464
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
465
- cache_update_policy: str
466
- Cache update policy: "auto", "force", or "never".
467
- force_cache_update: bool
468
- Simple override for "force" cache update policy.
469
- debug: bool
470
- Whether to turn on verbose debugging logs.
471
- circuit_breaker_config: dict
472
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
473
- timeout_config: dict
474
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
501
+ Decorator prototype for all step decorators. This function gets specialized
502
+ and imported for all decorators types by _import_plugin_decorators().
475
503
  """
476
504
  ...
477
505
 
478
506
  @typing.overload
479
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
507
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
480
508
  """
481
- Creates a human-readable report, a Metaflow Card, after this step completes.
509
+ Specifies that the step will success under all circumstances.
482
510
 
483
- Note that you may add multiple `@card` decorators in a step with different parameters.
511
+ The decorator will create an optional artifact, specified by `var`, which
512
+ contains the exception raised. You can use it to detect the presence
513
+ of errors, indicating that all happy-path artifacts produced by the step
514
+ are missing.
484
515
 
485
516
 
486
517
  Parameters
487
518
  ----------
488
- type : str, default 'default'
489
- Card type.
490
- id : str, optional, default None
491
- If multiple cards are present, use this id to identify this card.
492
- options : Dict[str, Any], default {}
493
- Options passed to the card. The contents depend on the card type.
494
- timeout : int, default 45
495
- Interrupt reporting if it takes more than this many seconds.
519
+ var : str, optional, default None
520
+ Name of the artifact in which to store the caught exception.
521
+ If not specified, the exception is not stored.
522
+ print_exception : bool, default True
523
+ Determines whether or not the exception is printed to
524
+ stdout when caught.
496
525
  """
497
526
  ...
498
527
 
499
528
  @typing.overload
500
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
529
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
501
530
  ...
502
531
 
503
532
  @typing.overload
504
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
533
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
505
534
  ...
506
535
 
507
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
536
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
508
537
  """
509
- Creates a human-readable report, a Metaflow Card, after this step completes.
538
+ Specifies that the step will success under all circumstances.
510
539
 
511
- Note that you may add multiple `@card` decorators in a step with different parameters.
540
+ The decorator will create an optional artifact, specified by `var`, which
541
+ contains the exception raised. You can use it to detect the presence
542
+ of errors, indicating that all happy-path artifacts produced by the step
543
+ are missing.
512
544
 
513
545
 
514
546
  Parameters
515
547
  ----------
516
- type : str, default 'default'
517
- Card type.
518
- id : str, optional, default None
519
- If multiple cards are present, use this id to identify this card.
520
- options : Dict[str, Any], default {}
521
- Options passed to the card. The contents depend on the card type.
522
- timeout : int, default 45
523
- Interrupt reporting if it takes more than this many seconds.
548
+ var : str, optional, default None
549
+ Name of the artifact in which to store the caught exception.
550
+ If not specified, the exception is not stored.
551
+ print_exception : bool, default True
552
+ Determines whether or not the exception is printed to
553
+ stdout when caught.
524
554
  """
525
555
  ...
526
556
 
527
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
557
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
528
558
  """
529
- Decorator that helps cache, version and store models/datasets from huggingface hub.
559
+ Specifies that this step should execute on DGX cloud.
530
560
 
531
561
 
532
562
  Parameters
533
563
  ----------
534
- temp_dir_root : str, optional
535
- The root directory that will hold the temporary directory where objects will be downloaded.
536
-
537
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
538
- The list of repos (models/datasets) to load.
539
-
540
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
541
-
542
- - If repo (model/dataset) is not found in the datastore:
543
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
544
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
545
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
546
-
547
- - If repo is found in the datastore:
548
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
564
+ gpu : int
565
+ Number of GPUs to use.
566
+ gpu_type : str
567
+ Type of Nvidia GPU to use.
549
568
  """
550
569
  ...
551
570
 
@@ -582,64 +601,116 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
582
601
  """
583
602
  ...
584
603
 
604
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
605
+ """
606
+ Specifies that this step is used to deploy an instance of the app.
607
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
608
+
609
+
610
+ Parameters
611
+ ----------
612
+ app_port : int
613
+ Number of GPUs to use.
614
+ app_name : str
615
+ Name of the app to deploy.
616
+ """
617
+ ...
618
+
585
619
  @typing.overload
586
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
620
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
587
621
  """
588
- Enables checkpointing for a step.
622
+ Specifies the PyPI packages for the step.
589
623
 
624
+ Information in this decorator will augment any
625
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
626
+ you can use `@pypi_base` to set packages required by all
627
+ steps and use `@pypi` to specify step-specific overrides.
590
628
 
591
629
 
592
630
  Parameters
593
631
  ----------
594
- load_policy : str, default: "fresh"
595
- The policy for loading the checkpoint. The following policies are supported:
596
- - "eager": Loads the the latest available checkpoint within the namespace.
597
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
598
- will be loaded at the start of the task.
599
- - "none": Do not load any checkpoint
600
- - "fresh": Loads the lastest checkpoint created within the running Task.
601
- This mode helps loading checkpoints across various retry attempts of the same task.
602
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
603
- created within the task will be loaded when the task is retries execution on failure.
604
-
605
- temp_dir_root : str, default: None
606
- The root directory under which `current.checkpoint.directory` will be created.
632
+ packages : Dict[str, str], default: {}
633
+ Packages to use for this step. The key is the name of the package
634
+ and the value is the version to use.
635
+ python : str, optional, default: None
636
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
637
+ that the version used will correspond to the version of the Python interpreter used to start the run.
607
638
  """
608
639
  ...
609
640
 
610
641
  @typing.overload
611
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
642
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
612
643
  ...
613
644
 
614
645
  @typing.overload
615
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
646
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
616
647
  ...
617
648
 
618
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
649
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
619
650
  """
620
- Enables checkpointing for a step.
651
+ Specifies the PyPI packages for the step.
621
652
 
653
+ Information in this decorator will augment any
654
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
655
+ you can use `@pypi_base` to set packages required by all
656
+ steps and use `@pypi` to specify step-specific overrides.
622
657
 
623
658
 
624
659
  Parameters
625
660
  ----------
626
- load_policy : str, default: "fresh"
627
- The policy for loading the checkpoint. The following policies are supported:
628
- - "eager": Loads the the latest available checkpoint within the namespace.
629
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
630
- will be loaded at the start of the task.
631
- - "none": Do not load any checkpoint
632
- - "fresh": Loads the lastest checkpoint created within the running Task.
633
- This mode helps loading checkpoints across various retry attempts of the same task.
634
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
635
- created within the task will be loaded when the task is retries execution on failure.
661
+ packages : Dict[str, str], default: {}
662
+ Packages to use for this step. The key is the name of the package
663
+ and the value is the version to use.
664
+ python : str, optional, default: None
665
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
666
+ that the version used will correspond to the version of the Python interpreter used to start the run.
667
+ """
668
+ ...
669
+
670
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
671
+ """
672
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
636
673
 
637
- temp_dir_root : str, default: None
638
- The root directory under which `current.checkpoint.directory` will be created.
674
+ User code call
675
+ --------------
676
+ @ollama(
677
+ models=[...],
678
+ ...
679
+ )
680
+
681
+ Valid backend options
682
+ ---------------------
683
+ - 'local': Run as a separate process on the local task machine.
684
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
685
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
686
+
687
+ Valid model options
688
+ -------------------
689
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
690
+
691
+
692
+ Parameters
693
+ ----------
694
+ models: list[str]
695
+ List of Ollama containers running models in sidecars.
696
+ backend: str
697
+ Determines where and how to run the Ollama process.
698
+ force_pull: bool
699
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
700
+ cache_update_policy: str
701
+ Cache update policy: "auto", "force", or "never".
702
+ force_cache_update: bool
703
+ Simple override for "force" cache update policy.
704
+ debug: bool
705
+ Whether to turn on verbose debugging logs.
706
+ circuit_breaker_config: dict
707
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
708
+ timeout_config: dict
709
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
639
710
  """
640
711
  ...
641
712
 
642
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
713
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
643
714
  """
644
715
  Specifies that this step should execute on DGX cloud.
645
716
 
@@ -650,149 +721,318 @@ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Cal
650
721
  Number of GPUs to use.
651
722
  gpu_type : str
652
723
  Type of Nvidia GPU to use.
724
+ queue_timeout : int
725
+ Time to keep the job in NVCF's queue.
653
726
  """
654
727
  ...
655
728
 
656
729
  @typing.overload
657
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
658
731
  """
659
- Specifies the number of times the task corresponding
660
- to a step needs to be retried.
661
-
662
- This decorator is useful for handling transient errors, such as networking issues.
663
- If your task contains operations that can't be retried safely, e.g. database updates,
664
- it is advisable to annotate it with `@retry(times=0)`.
665
-
666
- This can be used in conjunction with the `@catch` decorator. The `@catch`
667
- decorator will execute a no-op task after all retries have been exhausted,
668
- ensuring that the flow execution can continue.
732
+ Specifies secrets to be retrieved and injected as environment variables prior to
733
+ the execution of a step.
669
734
 
670
735
 
671
736
  Parameters
672
737
  ----------
673
- times : int, default 3
674
- Number of times to retry this task.
675
- minutes_between_retries : int, default 2
676
- Number of minutes between retries.
738
+ sources : List[Union[str, Dict[str, Any]]], default: []
739
+ List of secret specs, defining how the secrets are to be retrieved
677
740
  """
678
741
  ...
679
742
 
680
743
  @typing.overload
681
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
744
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
682
745
  ...
683
746
 
684
747
  @typing.overload
685
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
748
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
686
749
  ...
687
750
 
688
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
751
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
689
752
  """
690
- Specifies the number of times the task corresponding
691
- to a step needs to be retried.
692
-
693
- This decorator is useful for handling transient errors, such as networking issues.
694
- If your task contains operations that can't be retried safely, e.g. database updates,
695
- it is advisable to annotate it with `@retry(times=0)`.
696
-
697
- This can be used in conjunction with the `@catch` decorator. The `@catch`
698
- decorator will execute a no-op task after all retries have been exhausted,
699
- ensuring that the flow execution can continue.
753
+ Specifies secrets to be retrieved and injected as environment variables prior to
754
+ the execution of a step.
700
755
 
701
756
 
702
757
  Parameters
703
758
  ----------
704
- times : int, default 3
705
- Number of times to retry this task.
706
- minutes_between_retries : int, default 2
707
- Number of minutes between retries.
759
+ sources : List[Union[str, Dict[str, Any]]], default: []
760
+ List of secret specs, defining how the secrets are to be retrieved
708
761
  """
709
762
  ...
710
763
 
711
764
  @typing.overload
712
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
765
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
713
766
  """
714
- Specifies that the step will success under all circumstances.
767
+ Creates a human-readable report, a Metaflow Card, after this step completes.
715
768
 
716
- The decorator will create an optional artifact, specified by `var`, which
717
- contains the exception raised. You can use it to detect the presence
718
- of errors, indicating that all happy-path artifacts produced by the step
719
- are missing.
769
+ Note that you may add multiple `@card` decorators in a step with different parameters.
720
770
 
721
771
 
722
772
  Parameters
723
773
  ----------
724
- var : str, optional, default None
725
- Name of the artifact in which to store the caught exception.
726
- If not specified, the exception is not stored.
727
- print_exception : bool, default True
728
- Determines whether or not the exception is printed to
729
- stdout when caught.
774
+ type : str, default 'default'
775
+ Card type.
776
+ id : str, optional, default None
777
+ If multiple cards are present, use this id to identify this card.
778
+ options : Dict[str, Any], default {}
779
+ Options passed to the card. The contents depend on the card type.
780
+ timeout : int, default 45
781
+ Interrupt reporting if it takes more than this many seconds.
730
782
  """
731
783
  ...
732
784
 
733
785
  @typing.overload
734
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
786
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
735
787
  ...
736
788
 
737
789
  @typing.overload
738
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
790
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
739
791
  ...
740
792
 
741
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
793
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
742
794
  """
743
- Specifies that the step will success under all circumstances.
795
+ Creates a human-readable report, a Metaflow Card, after this step completes.
744
796
 
745
- The decorator will create an optional artifact, specified by `var`, which
746
- contains the exception raised. You can use it to detect the presence
747
- of errors, indicating that all happy-path artifacts produced by the step
748
- are missing.
797
+ Note that you may add multiple `@card` decorators in a step with different parameters.
749
798
 
750
799
 
751
800
  Parameters
752
801
  ----------
753
- var : str, optional, default None
754
- Name of the artifact in which to store the caught exception.
755
- If not specified, the exception is not stored.
756
- print_exception : bool, default True
757
- Determines whether or not the exception is printed to
758
- stdout when caught.
802
+ type : str, default 'default'
803
+ Card type.
804
+ id : str, optional, default None
805
+ If multiple cards are present, use this id to identify this card.
806
+ options : Dict[str, Any], default {}
807
+ Options passed to the card. The contents depend on the card type.
808
+ timeout : int, default 45
809
+ Interrupt reporting if it takes more than this many seconds.
810
+ """
811
+ ...
812
+
813
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
814
+ """
815
+ Specifies that this step should execute on Kubernetes.
816
+
817
+
818
+ Parameters
819
+ ----------
820
+ cpu : int, default 1
821
+ Number of CPUs required for this step. If `@resources` is
822
+ also present, the maximum value from all decorators is used.
823
+ memory : int, default 4096
824
+ Memory size (in MB) required for this step. If
825
+ `@resources` is also present, the maximum value from all decorators is
826
+ used.
827
+ disk : int, default 10240
828
+ Disk size (in MB) required for this step. If
829
+ `@resources` is also present, the maximum value from all decorators is
830
+ used.
831
+ image : str, optional, default None
832
+ Docker image to use when launching on Kubernetes. If not specified, and
833
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
834
+ not, a default Docker image mapping to the current version of Python is used.
835
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
836
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
837
+ image_pull_secrets: List[str], default []
838
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
839
+ Kubernetes image pull secrets to use when pulling container images
840
+ in Kubernetes.
841
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
842
+ Kubernetes service account to use when launching pod in Kubernetes.
843
+ secrets : List[str], optional, default None
844
+ Kubernetes secrets to use when launching pod in Kubernetes. These
845
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
846
+ in Metaflow configuration.
847
+ node_selector: Union[Dict[str,str], str], optional, default None
848
+ Kubernetes node selector(s) to apply to the pod running the task.
849
+ Can be passed in as a comma separated string of values e.g.
850
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
851
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
852
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
853
+ Kubernetes namespace to use when launching pod in Kubernetes.
854
+ gpu : int, optional, default None
855
+ Number of GPUs required for this step. A value of zero implies that
856
+ the scheduled node should not have GPUs.
857
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
858
+ The vendor of the GPUs to be used for this step.
859
+ tolerations : List[str], default []
860
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
861
+ Kubernetes tolerations to use when launching pod in Kubernetes.
862
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
863
+ Kubernetes labels to use when launching pod in Kubernetes.
864
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
865
+ Kubernetes annotations to use when launching pod in Kubernetes.
866
+ use_tmpfs : bool, default False
867
+ This enables an explicit tmpfs mount for this step.
868
+ tmpfs_tempdir : bool, default True
869
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
870
+ tmpfs_size : int, optional, default: None
871
+ The value for the size (in MiB) of the tmpfs mount for this step.
872
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
873
+ memory allocated for this step.
874
+ tmpfs_path : str, optional, default /metaflow_temp
875
+ Path to tmpfs mount for this step.
876
+ persistent_volume_claims : Dict[str, str], optional, default None
877
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
878
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
879
+ shared_memory: int, optional
880
+ Shared memory size (in MiB) required for this step
881
+ port: int, optional
882
+ Port number to specify in the Kubernetes job object
883
+ compute_pool : str, optional, default None
884
+ Compute pool to be used for for this step.
885
+ If not specified, any accessible compute pool within the perimeter is used.
886
+ hostname_resolution_timeout: int, default 10 * 60
887
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
888
+ Only applicable when @parallel is used.
889
+ qos: str, default: Burstable
890
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
891
+
892
+ security_context: Dict[str, Any], optional, default None
893
+ Container security context. Applies to the task container. Allows the following keys:
894
+ - privileged: bool, optional, default None
895
+ - allow_privilege_escalation: bool, optional, default None
896
+ - run_as_user: int, optional, default None
897
+ - run_as_group: int, optional, default None
898
+ - run_as_non_root: bool, optional, default None
759
899
  """
760
900
  ...
761
901
 
762
902
  @typing.overload
763
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
903
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
764
904
  """
765
- Decorator prototype for all step decorators. This function gets specialized
766
- and imported for all decorators types by _import_plugin_decorators().
905
+ Specifies the resources needed when executing this step.
906
+
907
+ Use `@resources` to specify the resource requirements
908
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
909
+
910
+ You can choose the compute layer on the command line by executing e.g.
911
+ ```
912
+ python myflow.py run --with batch
913
+ ```
914
+ or
915
+ ```
916
+ python myflow.py run --with kubernetes
917
+ ```
918
+ which executes the flow on the desired system using the
919
+ requirements specified in `@resources`.
920
+
921
+
922
+ Parameters
923
+ ----------
924
+ cpu : int, default 1
925
+ Number of CPUs required for this step.
926
+ gpu : int, optional, default None
927
+ Number of GPUs required for this step.
928
+ disk : int, optional, default None
929
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
930
+ memory : int, default 4096
931
+ Memory size (in MB) required for this step.
932
+ shared_memory : int, optional, default None
933
+ The value for the size (in MiB) of the /dev/shm volume for this step.
934
+ This parameter maps to the `--shm-size` option in Docker.
767
935
  """
768
936
  ...
769
937
 
770
938
  @typing.overload
771
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
939
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
772
940
  ...
773
941
 
774
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
942
+ @typing.overload
943
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
944
+ ...
945
+
946
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
775
947
  """
776
- Decorator prototype for all step decorators. This function gets specialized
777
- and imported for all decorators types by _import_plugin_decorators().
948
+ Specifies the resources needed when executing this step.
949
+
950
+ Use `@resources` to specify the resource requirements
951
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
952
+
953
+ You can choose the compute layer on the command line by executing e.g.
954
+ ```
955
+ python myflow.py run --with batch
956
+ ```
957
+ or
958
+ ```
959
+ python myflow.py run --with kubernetes
960
+ ```
961
+ which executes the flow on the desired system using the
962
+ requirements specified in `@resources`.
963
+
964
+
965
+ Parameters
966
+ ----------
967
+ cpu : int, default 1
968
+ Number of CPUs required for this step.
969
+ gpu : int, optional, default None
970
+ Number of GPUs required for this step.
971
+ disk : int, optional, default None
972
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
973
+ memory : int, default 4096
974
+ Memory size (in MB) required for this step.
975
+ shared_memory : int, optional, default None
976
+ The value for the size (in MiB) of the /dev/shm volume for this step.
977
+ This parameter maps to the `--shm-size` option in Docker.
978
+ """
979
+ ...
980
+
981
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
982
+ """
983
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
984
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
985
+
986
+
987
+ Parameters
988
+ ----------
989
+ timeout : int
990
+ Time, in seconds before the task times out and fails. (Default: 3600)
991
+ poke_interval : int
992
+ Time in seconds that the job should wait in between each try. (Default: 60)
993
+ mode : str
994
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
995
+ exponential_backoff : bool
996
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
997
+ pool : str
998
+ the slot pool this task should run in,
999
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1000
+ soft_fail : bool
1001
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1002
+ name : str
1003
+ Name of the sensor on Airflow
1004
+ description : str
1005
+ Description of sensor in the Airflow UI
1006
+ external_dag_id : str
1007
+ The dag_id that contains the task you want to wait for.
1008
+ external_task_ids : List[str]
1009
+ The list of task_ids that you want to wait for.
1010
+ If None (default value) the sensor waits for the DAG. (Default: None)
1011
+ allowed_states : List[str]
1012
+ Iterable of allowed states, (Default: ['success'])
1013
+ failed_states : List[str]
1014
+ Iterable of failed or dis-allowed states. (Default: None)
1015
+ execution_delta : datetime.timedelta
1016
+ time difference with the previous execution to look at,
1017
+ the default is the same logical date as the current task or DAG. (Default: None)
1018
+ check_existence: bool
1019
+ Set to True to check if the external task exists or check if
1020
+ the DAG to wait for exists. (Default: True)
778
1021
  """
779
1022
  ...
780
1023
 
781
1024
  @typing.overload
782
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1025
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
783
1026
  """
784
- Specifies the PyPI packages for the step.
1027
+ Specifies the PyPI packages for all steps of the flow.
785
1028
 
786
- Information in this decorator will augment any
787
- attributes set in the `@pyi_base` flow-level decorator. Hence,
788
- you can use `@pypi_base` to set packages required by all
1029
+ Use `@pypi_base` to set common packages required by all
789
1030
  steps and use `@pypi` to specify step-specific overrides.
790
1031
 
791
-
792
1032
  Parameters
793
1033
  ----------
794
1034
  packages : Dict[str, str], default: {}
795
- Packages to use for this step. The key is the name of the package
1035
+ Packages to use for this flow. The key is the name of the package
796
1036
  and the value is the version to use.
797
1037
  python : str, optional, default: None
798
1038
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
@@ -801,27 +1041,20 @@ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] =
801
1041
  ...
802
1042
 
803
1043
  @typing.overload
804
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
805
- ...
806
-
807
- @typing.overload
808
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1044
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
809
1045
  ...
810
1046
 
811
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1047
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
812
1048
  """
813
- Specifies the PyPI packages for the step.
814
-
815
- Information in this decorator will augment any
816
- attributes set in the `@pyi_base` flow-level decorator. Hence,
817
- you can use `@pypi_base` to set packages required by all
818
- steps and use `@pypi` to specify step-specific overrides.
1049
+ Specifies the PyPI packages for all steps of the flow.
819
1050
 
1051
+ Use `@pypi_base` to set common packages required by all
1052
+ steps and use `@pypi` to specify step-specific overrides.
820
1053
 
821
1054
  Parameters
822
1055
  ----------
823
1056
  packages : Dict[str, str], default: {}
824
- Packages to use for this step. The key is the name of the package
1057
+ Packages to use for this flow. The key is the name of the package
825
1058
  and the value is the version to use.
826
1059
  python : str, optional, default: None
827
1060
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
@@ -829,110 +1062,46 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
829
1062
  """
830
1063
  ...
831
1064
 
832
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1065
+ @typing.overload
1066
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
833
1067
  """
834
- Specifies that this step should execute on Kubernetes.
1068
+ Specifies the Conda environment for all steps of the flow.
1069
+
1070
+ Use `@conda_base` to set common libraries required by all
1071
+ steps and use `@conda` to specify step-specific additions.
835
1072
 
836
1073
 
837
1074
  Parameters
838
1075
  ----------
839
- cpu : int, default 1
840
- Number of CPUs required for this step. If `@resources` is
841
- also present, the maximum value from all decorators is used.
842
- memory : int, default 4096
843
- Memory size (in MB) required for this step. If
844
- `@resources` is also present, the maximum value from all decorators is
845
- used.
846
- disk : int, default 10240
847
- Disk size (in MB) required for this step. If
848
- `@resources` is also present, the maximum value from all decorators is
849
- used.
850
- image : str, optional, default None
851
- Docker image to use when launching on Kubernetes. If not specified, and
852
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
853
- not, a default Docker image mapping to the current version of Python is used.
854
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
855
- If given, the imagePullPolicy to be applied to the Docker image of the step.
856
- image_pull_secrets: List[str], default []
857
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
858
- Kubernetes image pull secrets to use when pulling container images
859
- in Kubernetes.
860
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
861
- Kubernetes service account to use when launching pod in Kubernetes.
862
- secrets : List[str], optional, default None
863
- Kubernetes secrets to use when launching pod in Kubernetes. These
864
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
865
- in Metaflow configuration.
866
- node_selector: Union[Dict[str,str], str], optional, default None
867
- Kubernetes node selector(s) to apply to the pod running the task.
868
- Can be passed in as a comma separated string of values e.g.
869
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
870
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
871
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
872
- Kubernetes namespace to use when launching pod in Kubernetes.
873
- gpu : int, optional, default None
874
- Number of GPUs required for this step. A value of zero implies that
875
- the scheduled node should not have GPUs.
876
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
877
- The vendor of the GPUs to be used for this step.
878
- tolerations : List[str], default []
879
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
880
- Kubernetes tolerations to use when launching pod in Kubernetes.
881
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
882
- Kubernetes labels to use when launching pod in Kubernetes.
883
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
884
- Kubernetes annotations to use when launching pod in Kubernetes.
885
- use_tmpfs : bool, default False
886
- This enables an explicit tmpfs mount for this step.
887
- tmpfs_tempdir : bool, default True
888
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
889
- tmpfs_size : int, optional, default: None
890
- The value for the size (in MiB) of the tmpfs mount for this step.
891
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
892
- memory allocated for this step.
893
- tmpfs_path : str, optional, default /metaflow_temp
894
- Path to tmpfs mount for this step.
895
- persistent_volume_claims : Dict[str, str], optional, default None
896
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
897
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
898
- shared_memory: int, optional
899
- Shared memory size (in MiB) required for this step
900
- port: int, optional
901
- Port number to specify in the Kubernetes job object
902
- compute_pool : str, optional, default None
903
- Compute pool to be used for for this step.
904
- If not specified, any accessible compute pool within the perimeter is used.
905
- hostname_resolution_timeout: int, default 10 * 60
906
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
907
- Only applicable when @parallel is used.
908
- qos: str, default: Burstable
909
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
910
-
911
- security_context: Dict[str, Any], optional, default None
912
- Container security context. Applies to the task container. Allows the following keys:
913
- - privileged: bool, optional, default None
914
- - allow_privilege_escalation: bool, optional, default None
915
- - run_as_user: int, optional, default None
916
- - run_as_group: int, optional, default None
917
- - run_as_non_root: bool, optional, default None
1076
+ packages : Dict[str, str], default {}
1077
+ Packages to use for this flow. The key is the name of the package
1078
+ and the value is the version to use.
1079
+ libraries : Dict[str, str], default {}
1080
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1081
+ python : str, optional, default None
1082
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1083
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1084
+ disabled : bool, default False
1085
+ If set to True, disables Conda.
918
1086
  """
919
1087
  ...
920
1088
 
921
1089
  @typing.overload
922
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1090
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1091
+ ...
1092
+
1093
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
923
1094
  """
924
- Specifies the Conda environment for the step.
1095
+ Specifies the Conda environment for all steps of the flow.
925
1096
 
926
- Information in this decorator will augment any
927
- attributes set in the `@conda_base` flow-level decorator. Hence,
928
- you can use `@conda_base` to set packages required by all
929
- steps and use `@conda` to specify step-specific overrides.
1097
+ Use `@conda_base` to set common libraries required by all
1098
+ steps and use `@conda` to specify step-specific additions.
930
1099
 
931
1100
 
932
1101
  Parameters
933
1102
  ----------
934
1103
  packages : Dict[str, str], default {}
935
- Packages to use for this step. The key is the name of the package
1104
+ Packages to use for this flow. The key is the name of the package
936
1105
  and the value is the version to use.
937
1106
  libraries : Dict[str, str], default {}
938
1107
  Supported for backward compatibility. When used with packages, packages will take precedence.
@@ -940,40 +1109,100 @@ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, s
940
1109
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
941
1110
  that the version used will correspond to the version of the Python interpreter used to start the run.
942
1111
  disabled : bool, default False
943
- If set to True, disables @conda.
1112
+ If set to True, disables Conda.
944
1113
  """
945
1114
  ...
946
1115
 
947
1116
  @typing.overload
948
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1117
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1118
+ """
1119
+ Specifies the event(s) that this flow depends on.
1120
+
1121
+ ```
1122
+ @trigger(event='foo')
1123
+ ```
1124
+ or
1125
+ ```
1126
+ @trigger(events=['foo', 'bar'])
1127
+ ```
1128
+
1129
+ Additionally, you can specify the parameter mappings
1130
+ to map event payload to Metaflow parameters for the flow.
1131
+ ```
1132
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1133
+ ```
1134
+ or
1135
+ ```
1136
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1137
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1138
+ ```
1139
+
1140
+ 'parameters' can also be a list of strings and tuples like so:
1141
+ ```
1142
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1143
+ ```
1144
+ This is equivalent to:
1145
+ ```
1146
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1147
+ ```
1148
+
1149
+
1150
+ Parameters
1151
+ ----------
1152
+ event : Union[str, Dict[str, Any]], optional, default None
1153
+ Event dependency for this flow.
1154
+ events : List[Union[str, Dict[str, Any]]], default []
1155
+ Events dependency for this flow.
1156
+ options : Dict[str, Any], default {}
1157
+ Backend-specific configuration for tuning eventing behavior.
1158
+ """
949
1159
  ...
950
1160
 
951
1161
  @typing.overload
952
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1162
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
953
1163
  ...
954
1164
 
955
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1165
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
956
1166
  """
957
- Specifies the Conda environment for the step.
1167
+ Specifies the event(s) that this flow depends on.
958
1168
 
959
- Information in this decorator will augment any
960
- attributes set in the `@conda_base` flow-level decorator. Hence,
961
- you can use `@conda_base` to set packages required by all
962
- steps and use `@conda` to specify step-specific overrides.
1169
+ ```
1170
+ @trigger(event='foo')
1171
+ ```
1172
+ or
1173
+ ```
1174
+ @trigger(events=['foo', 'bar'])
1175
+ ```
1176
+
1177
+ Additionally, you can specify the parameter mappings
1178
+ to map event payload to Metaflow parameters for the flow.
1179
+ ```
1180
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1181
+ ```
1182
+ or
1183
+ ```
1184
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1185
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1186
+ ```
1187
+
1188
+ 'parameters' can also be a list of strings and tuples like so:
1189
+ ```
1190
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1191
+ ```
1192
+ This is equivalent to:
1193
+ ```
1194
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1195
+ ```
963
1196
 
964
1197
 
965
1198
  Parameters
966
1199
  ----------
967
- packages : Dict[str, str], default {}
968
- Packages to use for this step. The key is the name of the package
969
- and the value is the version to use.
970
- libraries : Dict[str, str], default {}
971
- Supported for backward compatibility. When used with packages, packages will take precedence.
972
- python : str, optional, default None
973
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
974
- that the version used will correspond to the version of the Python interpreter used to start the run.
975
- disabled : bool, default False
976
- If set to True, disables @conda.
1200
+ event : Union[str, Dict[str, Any]], optional, default None
1201
+ Event dependency for this flow.
1202
+ events : List[Union[str, Dict[str, Any]]], default []
1203
+ Events dependency for this flow.
1204
+ options : Dict[str, Any], default {}
1205
+ Backend-specific configuration for tuning eventing behavior.
977
1206
  """
978
1207
  ...
979
1208
 
@@ -1076,59 +1305,18 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1076
1305
  Parameters:
1077
1306
  ----------
1078
1307
 
1079
- type: str
1080
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1081
-
1082
- config: dict or Callable
1083
- Dictionary of configuration options for the datastore. The following keys are required:
1084
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1085
- - example: 's3://bucket-name/path/to/root'
1086
- - example: 'gs://bucket-name/path/to/root'
1087
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1088
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1089
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1090
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1091
- """
1092
- ...
1093
-
1094
- @typing.overload
1095
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1096
- """
1097
- Specifies the PyPI packages for all steps of the flow.
1098
-
1099
- Use `@pypi_base` to set common packages required by all
1100
- steps and use `@pypi` to specify step-specific overrides.
1101
-
1102
- Parameters
1103
- ----------
1104
- packages : Dict[str, str], default: {}
1105
- Packages to use for this flow. The key is the name of the package
1106
- and the value is the version to use.
1107
- python : str, optional, default: None
1108
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1109
- that the version used will correspond to the version of the Python interpreter used to start the run.
1110
- """
1111
- ...
1112
-
1113
- @typing.overload
1114
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1115
- ...
1116
-
1117
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1118
- """
1119
- Specifies the PyPI packages for all steps of the flow.
1120
-
1121
- Use `@pypi_base` to set common packages required by all
1122
- steps and use `@pypi` to specify step-specific overrides.
1308
+ type: str
1309
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1123
1310
 
1124
- Parameters
1125
- ----------
1126
- packages : Dict[str, str], default: {}
1127
- Packages to use for this flow. The key is the name of the package
1128
- and the value is the version to use.
1129
- python : str, optional, default: None
1130
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1131
- that the version used will correspond to the version of the Python interpreter used to start the run.
1311
+ config: dict or Callable
1312
+ Dictionary of configuration options for the datastore. The following keys are required:
1313
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1314
+ - example: 's3://bucket-name/path/to/root'
1315
+ - example: 'gs://bucket-name/path/to/root'
1316
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1317
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1318
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1319
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1132
1320
  """
1133
1321
  ...
1134
1322
 
@@ -1175,97 +1363,38 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1175
1363
  """
1176
1364
  ...
1177
1365
 
1178
- @typing.overload
1179
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1180
- """
1181
- Specifies the Conda environment for all steps of the flow.
1182
-
1183
- Use `@conda_base` to set common libraries required by all
1184
- steps and use `@conda` to specify step-specific additions.
1185
-
1186
-
1187
- Parameters
1188
- ----------
1189
- packages : Dict[str, str], default {}
1190
- Packages to use for this flow. The key is the name of the package
1191
- and the value is the version to use.
1192
- libraries : Dict[str, str], default {}
1193
- Supported for backward compatibility. When used with packages, packages will take precedence.
1194
- python : str, optional, default None
1195
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1196
- that the version used will correspond to the version of the Python interpreter used to start the run.
1197
- disabled : bool, default False
1198
- If set to True, disables Conda.
1199
- """
1200
- ...
1201
-
1202
- @typing.overload
1203
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1204
- ...
1205
-
1206
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1366
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1207
1367
  """
1208
- Specifies the Conda environment for all steps of the flow.
1368
+ Specifies what flows belong to the same project.
1209
1369
 
1210
- Use `@conda_base` to set common libraries required by all
1211
- steps and use `@conda` to specify step-specific additions.
1370
+ A project-specific namespace is created for all flows that
1371
+ use the same `@project(name)`.
1212
1372
 
1213
1373
 
1214
1374
  Parameters
1215
1375
  ----------
1216
- packages : Dict[str, str], default {}
1217
- Packages to use for this flow. The key is the name of the package
1218
- and the value is the version to use.
1219
- libraries : Dict[str, str], default {}
1220
- Supported for backward compatibility. When used with packages, packages will take precedence.
1221
- python : str, optional, default None
1222
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1223
- that the version used will correspond to the version of the Python interpreter used to start the run.
1224
- disabled : bool, default False
1225
- If set to True, disables Conda.
1226
- """
1227
- ...
1228
-
1229
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1230
- """
1231
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1232
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1376
+ name : str
1377
+ Project name. Make sure that the name is unique amongst all
1378
+ projects that use the same production scheduler. The name may
1379
+ contain only lowercase alphanumeric characters and underscores.
1233
1380
 
1381
+ branch : Optional[str], default None
1382
+ The branch to use. If not specified, the branch is set to
1383
+ `user.<username>` unless `production` is set to `True`. This can
1384
+ also be set on the command line using `--branch` as a top-level option.
1385
+ It is an error to specify `branch` in the decorator and on the command line.
1234
1386
 
1235
- Parameters
1236
- ----------
1237
- timeout : int
1238
- Time, in seconds before the task times out and fails. (Default: 3600)
1239
- poke_interval : int
1240
- Time in seconds that the job should wait in between each try. (Default: 60)
1241
- mode : str
1242
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1243
- exponential_backoff : bool
1244
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1245
- pool : str
1246
- the slot pool this task should run in,
1247
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1248
- soft_fail : bool
1249
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1250
- name : str
1251
- Name of the sensor on Airflow
1252
- description : str
1253
- Description of sensor in the Airflow UI
1254
- external_dag_id : str
1255
- The dag_id that contains the task you want to wait for.
1256
- external_task_ids : List[str]
1257
- The list of task_ids that you want to wait for.
1258
- If None (default value) the sensor waits for the DAG. (Default: None)
1259
- allowed_states : List[str]
1260
- Iterable of allowed states, (Default: ['success'])
1261
- failed_states : List[str]
1262
- Iterable of failed or dis-allowed states. (Default: None)
1263
- execution_delta : datetime.timedelta
1264
- time difference with the previous execution to look at,
1265
- the default is the same logical date as the current task or DAG. (Default: None)
1266
- check_existence: bool
1267
- Set to True to check if the external task exists or check if
1268
- the DAG to wait for exists. (Default: True)
1387
+ production : bool, default False
1388
+ Whether or not the branch is the production branch. This can also be set on the
1389
+ command line using `--production` as a top-level option. It is an error to specify
1390
+ `production` in the decorator and on the command line.
1391
+ The project branch name will be:
1392
+ - if `branch` is specified:
1393
+ - if `production` is True: `prod.<branch>`
1394
+ - if `production` is False: `test.<branch>`
1395
+ - if `branch` is not specified:
1396
+ - if `production` is True: `prod`
1397
+ - if `production` is False: `user.<username>`
1269
1398
  """
1270
1399
  ...
1271
1400
 
@@ -1320,41 +1449,6 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1320
1449
  """
1321
1450
  ...
1322
1451
 
1323
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1324
- """
1325
- Specifies what flows belong to the same project.
1326
-
1327
- A project-specific namespace is created for all flows that
1328
- use the same `@project(name)`.
1329
-
1330
-
1331
- Parameters
1332
- ----------
1333
- name : str
1334
- Project name. Make sure that the name is unique amongst all
1335
- projects that use the same production scheduler. The name may
1336
- contain only lowercase alphanumeric characters and underscores.
1337
-
1338
- branch : Optional[str], default None
1339
- The branch to use. If not specified, the branch is set to
1340
- `user.<username>` unless `production` is set to `True`. This can
1341
- also be set on the command line using `--branch` as a top-level option.
1342
- It is an error to specify `branch` in the decorator and on the command line.
1343
-
1344
- production : bool, default False
1345
- Whether or not the branch is the production branch. This can also be set on the
1346
- command line using `--production` as a top-level option. It is an error to specify
1347
- `production` in the decorator and on the command line.
1348
- The project branch name will be:
1349
- - if `branch` is specified:
1350
- - if `production` is True: `prod.<branch>`
1351
- - if `production` is False: `test.<branch>`
1352
- - if `branch` is not specified:
1353
- - if `production` is True: `prod`
1354
- - if `production` is False: `user.<username>`
1355
- """
1356
- ...
1357
-
1358
1452
  @typing.overload
1359
1453
  def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1360
1454
  """
@@ -1456,98 +1550,5 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1456
1550
  """
1457
1551
  ...
1458
1552
 
1459
- @typing.overload
1460
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1461
- """
1462
- Specifies the event(s) that this flow depends on.
1463
-
1464
- ```
1465
- @trigger(event='foo')
1466
- ```
1467
- or
1468
- ```
1469
- @trigger(events=['foo', 'bar'])
1470
- ```
1471
-
1472
- Additionally, you can specify the parameter mappings
1473
- to map event payload to Metaflow parameters for the flow.
1474
- ```
1475
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1476
- ```
1477
- or
1478
- ```
1479
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1480
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1481
- ```
1482
-
1483
- 'parameters' can also be a list of strings and tuples like so:
1484
- ```
1485
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1486
- ```
1487
- This is equivalent to:
1488
- ```
1489
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1490
- ```
1491
-
1492
-
1493
- Parameters
1494
- ----------
1495
- event : Union[str, Dict[str, Any]], optional, default None
1496
- Event dependency for this flow.
1497
- events : List[Union[str, Dict[str, Any]]], default []
1498
- Events dependency for this flow.
1499
- options : Dict[str, Any], default {}
1500
- Backend-specific configuration for tuning eventing behavior.
1501
- """
1502
- ...
1503
-
1504
- @typing.overload
1505
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1506
- ...
1507
-
1508
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1509
- """
1510
- Specifies the event(s) that this flow depends on.
1511
-
1512
- ```
1513
- @trigger(event='foo')
1514
- ```
1515
- or
1516
- ```
1517
- @trigger(events=['foo', 'bar'])
1518
- ```
1519
-
1520
- Additionally, you can specify the parameter mappings
1521
- to map event payload to Metaflow parameters for the flow.
1522
- ```
1523
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1524
- ```
1525
- or
1526
- ```
1527
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1528
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1529
- ```
1530
-
1531
- 'parameters' can also be a list of strings and tuples like so:
1532
- ```
1533
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1534
- ```
1535
- This is equivalent to:
1536
- ```
1537
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1538
- ```
1539
-
1540
-
1541
- Parameters
1542
- ----------
1543
- event : Union[str, Dict[str, Any]], optional, default None
1544
- Event dependency for this flow.
1545
- events : List[Union[str, Dict[str, Any]]], default []
1546
- Events dependency for this flow.
1547
- options : Dict[str, Any], default {}
1548
- Backend-specific configuration for tuning eventing behavior.
1549
- """
1550
- ...
1551
-
1552
1553
  pkg_name: str
1553
1554