ob-metaflow-stubs 6.0.4.5__py2.py3-none-any.whl → 6.0.4.6rc1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (249) hide show
  1. metaflow-stubs/__init__.pyi +854 -854
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +1 -1
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +1 -1
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +33 -33
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +2 -2
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +2 -2
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +2 -2
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +1 -1
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +1 -1
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +1 -1
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +1 -1
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +2 -2
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +1 -1
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +1 -1
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +1 -1
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +2 -2
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +2 -2
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +3 -3
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +3 -3
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +2 -2
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +2 -2
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  115. metaflow-stubs/multicore_utils.pyi +1 -1
  116. metaflow-stubs/ob_internal.pyi +1 -1
  117. metaflow-stubs/parameters.pyi +2 -2
  118. metaflow-stubs/plugins/__init__.pyi +9 -9
  119. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  120. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  121. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  122. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  123. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  124. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  125. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  126. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  128. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  129. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  130. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  131. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  132. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  133. metaflow-stubs/plugins/argo/exit_hooks.pyi +1 -1
  134. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  135. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  136. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  137. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  138. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  139. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  140. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  141. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  142. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  143. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  144. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  145. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  146. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  147. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  148. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  149. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  150. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  151. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  152. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  153. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  154. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  155. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  156. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  157. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  158. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  159. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  160. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  161. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  162. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  163. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  164. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  165. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  166. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  167. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  168. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  169. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  170. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  171. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  172. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  173. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  174. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  175. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  176. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  177. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  178. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  179. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  180. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  181. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  182. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  183. metaflow-stubs/plugins/exit_hook/__init__.pyi +1 -1
  184. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +1 -1
  185. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  186. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  187. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  188. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  189. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  190. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  191. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  192. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  193. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  194. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  195. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  196. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  197. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  198. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  199. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  200. metaflow-stubs/plugins/ollama/__init__.pyi +1 -1
  201. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  202. metaflow-stubs/plugins/perimeters.pyi +1 -1
  203. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  204. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  205. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  206. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  207. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  208. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  209. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  210. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  211. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  212. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  213. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  214. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  215. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  216. metaflow-stubs/plugins/secrets/secrets_func.pyi +1 -1
  217. metaflow-stubs/plugins/secrets/secrets_spec.pyi +1 -1
  218. metaflow-stubs/plugins/secrets/utils.pyi +1 -1
  219. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  220. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  221. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  222. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  223. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  224. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  225. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  226. metaflow-stubs/profilers/__init__.pyi +1 -1
  227. metaflow-stubs/pylint_wrapper.pyi +1 -1
  228. metaflow-stubs/runner/__init__.pyi +1 -1
  229. metaflow-stubs/runner/deployer.pyi +29 -29
  230. metaflow-stubs/runner/deployer_impl.pyi +1 -1
  231. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  232. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  233. metaflow-stubs/runner/nbrun.pyi +1 -1
  234. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  235. metaflow-stubs/runner/utils.pyi +2 -2
  236. metaflow-stubs/system/__init__.pyi +1 -1
  237. metaflow-stubs/system/system_logger.pyi +1 -1
  238. metaflow-stubs/system/system_monitor.pyi +1 -1
  239. metaflow-stubs/tagging_util.pyi +1 -1
  240. metaflow-stubs/tuple_util.pyi +1 -1
  241. metaflow-stubs/user_configs/__init__.pyi +1 -1
  242. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  243. metaflow-stubs/user_configs/config_options.pyi +1 -1
  244. metaflow-stubs/user_configs/config_parameters.pyi +4 -4
  245. {ob_metaflow_stubs-6.0.4.5.dist-info → ob_metaflow_stubs-6.0.4.6rc1.dist-info}/METADATA +1 -1
  246. ob_metaflow_stubs-6.0.4.6rc1.dist-info/RECORD +249 -0
  247. ob_metaflow_stubs-6.0.4.5.dist-info/RECORD +0 -249
  248. {ob_metaflow_stubs-6.0.4.5.dist-info → ob_metaflow_stubs-6.0.4.6rc1.dist-info}/WHEEL +0 -0
  249. {ob_metaflow_stubs-6.0.4.5.dist-info → ob_metaflow_stubs-6.0.4.6rc1.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.21.2+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-07-16T21:13:36.500276 #
4
+ # Generated on 2025-07-16T22:24:21.902352 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -37,8 +37,8 @@ from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDec
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
38
  from . import cards as cards
39
39
  from . import tuple_util as tuple_util
40
- from . import metaflow_git as metaflow_git
41
40
  from . import events as events
41
+ from . import metaflow_git as metaflow_git
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
@@ -156,6 +156,25 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
156
156
  """
157
157
  ...
158
158
 
159
+ @typing.overload
160
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
161
+ """
162
+ Decorator prototype for all step decorators. This function gets specialized
163
+ and imported for all decorators types by _import_plugin_decorators().
164
+ """
165
+ ...
166
+
167
+ @typing.overload
168
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
169
+ ...
170
+
171
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
172
+ """
173
+ Decorator prototype for all step decorators. This function gets specialized
174
+ and imported for all decorators types by _import_plugin_decorators().
175
+ """
176
+ ...
177
+
159
178
  @typing.overload
160
179
  def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
161
180
  """
@@ -176,61 +195,53 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
176
195
  ...
177
196
 
178
197
  @typing.overload
179
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
198
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
180
199
  """
181
- Specifies a timeout for your step.
182
-
183
- This decorator is useful if this step may hang indefinitely.
184
-
185
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
186
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
187
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
200
+ Specifies that the step will success under all circumstances.
188
201
 
189
- Note that all the values specified in parameters are added together so if you specify
190
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
202
+ The decorator will create an optional artifact, specified by `var`, which
203
+ contains the exception raised. You can use it to detect the presence
204
+ of errors, indicating that all happy-path artifacts produced by the step
205
+ are missing.
191
206
 
192
207
 
193
208
  Parameters
194
209
  ----------
195
- seconds : int, default 0
196
- Number of seconds to wait prior to timing out.
197
- minutes : int, default 0
198
- Number of minutes to wait prior to timing out.
199
- hours : int, default 0
200
- Number of hours to wait prior to timing out.
210
+ var : str, optional, default None
211
+ Name of the artifact in which to store the caught exception.
212
+ If not specified, the exception is not stored.
213
+ print_exception : bool, default True
214
+ Determines whether or not the exception is printed to
215
+ stdout when caught.
201
216
  """
202
217
  ...
203
218
 
204
219
  @typing.overload
205
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
220
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
206
221
  ...
207
222
 
208
223
  @typing.overload
209
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
224
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
210
225
  ...
211
226
 
212
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
227
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
213
228
  """
214
- Specifies a timeout for your step.
215
-
216
- This decorator is useful if this step may hang indefinitely.
217
-
218
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
219
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
220
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
229
+ Specifies that the step will success under all circumstances.
221
230
 
222
- Note that all the values specified in parameters are added together so if you specify
223
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
231
+ The decorator will create an optional artifact, specified by `var`, which
232
+ contains the exception raised. You can use it to detect the presence
233
+ of errors, indicating that all happy-path artifacts produced by the step
234
+ are missing.
224
235
 
225
236
 
226
237
  Parameters
227
238
  ----------
228
- seconds : int, default 0
229
- Number of seconds to wait prior to timing out.
230
- minutes : int, default 0
231
- Number of minutes to wait prior to timing out.
232
- hours : int, default 0
233
- Number of hours to wait prior to timing out.
239
+ var : str, optional, default None
240
+ Name of the artifact in which to store the caught exception.
241
+ If not specified, the exception is not stored.
242
+ print_exception : bool, default True
243
+ Determines whether or not the exception is printed to
244
+ stdout when caught.
234
245
  """
235
246
  ...
236
247
 
@@ -313,193 +324,116 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
313
324
  """
314
325
  ...
315
326
 
316
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
327
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
317
328
  """
318
- Decorator that helps cache, version and store models/datasets from huggingface hub.
319
-
320
- > Examples
321
-
322
- **Usage: creating references of models from huggingface that may be loaded in downstream steps**
323
- ```python
324
- @huggingface_hub
325
- @step
326
- def pull_model_from_huggingface(self):
327
- # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
328
- # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
329
- # with the same `repo_id` in the backend storage, it will not download the model again. The return
330
- # value of the function is a reference to the model in the backend storage.
331
- # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
332
-
333
- self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
334
- self.llama_model = current.huggingface_hub.snapshot_download(
335
- repo_id=self.model_id,
336
- allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
337
- )
338
- self.next(self.train)
339
- ```
329
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
340
330
 
341
- **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
342
- ```python
343
- @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
344
- @step
345
- def pull_model_from_huggingface(self):
346
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
347
- ```
331
+ User code call
332
+ --------------
333
+ @ollama(
334
+ models=[...],
335
+ ...
336
+ )
348
337
 
349
- ```python
350
- @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
351
- @step
352
- def finetune_model(self):
353
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
354
- # path_to_model will be /my-directory
355
- ```
338
+ Valid backend options
339
+ ---------------------
340
+ - 'local': Run as a separate process on the local task machine.
341
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
342
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
356
343
 
357
- ```python
358
- # Takes all the arguments passed to `snapshot_download`
359
- # except for `local_dir`
360
- @huggingface_hub(load=[
361
- {
362
- "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
363
- },
364
- {
365
- "repo_id": "myorg/mistral-lora",
366
- "repo_type": "model",
367
- },
368
- ])
369
- @step
370
- def finetune_model(self):
371
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
372
- # path_to_model will be /my-directory
373
- ```
344
+ Valid model options
345
+ -------------------
346
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
374
347
 
375
348
 
376
349
  Parameters
377
350
  ----------
378
- temp_dir_root : str, optional
379
- The root directory that will hold the temporary directory where objects will be downloaded.
380
-
381
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
382
- The list of repos (models/datasets) to load.
383
-
384
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
385
-
386
- - If repo (model/dataset) is not found in the datastore:
387
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
388
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
389
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
390
-
391
- - If repo is found in the datastore:
392
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
351
+ models: list[str]
352
+ List of Ollama containers running models in sidecars.
353
+ backend: str
354
+ Determines where and how to run the Ollama process.
355
+ force_pull: bool
356
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
357
+ cache_update_policy: str
358
+ Cache update policy: "auto", "force", or "never".
359
+ force_cache_update: bool
360
+ Simple override for "force" cache update policy.
361
+ debug: bool
362
+ Whether to turn on verbose debugging logs.
363
+ circuit_breaker_config: dict
364
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
365
+ timeout_config: dict
366
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
393
367
  """
394
368
  ...
395
369
 
396
370
  @typing.overload
397
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
371
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
398
372
  """
399
- Creates a human-readable report, a Metaflow Card, after this step completes.
373
+ Specifies a timeout for your step.
400
374
 
401
- Note that you may add multiple `@card` decorators in a step with different parameters.
375
+ This decorator is useful if this step may hang indefinitely.
376
+
377
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
378
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
379
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
380
+
381
+ Note that all the values specified in parameters are added together so if you specify
382
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
402
383
 
403
384
 
404
385
  Parameters
405
386
  ----------
406
- type : str, default 'default'
407
- Card type.
408
- id : str, optional, default None
409
- If multiple cards are present, use this id to identify this card.
410
- options : Dict[str, Any], default {}
411
- Options passed to the card. The contents depend on the card type.
412
- timeout : int, default 45
413
- Interrupt reporting if it takes more than this many seconds.
387
+ seconds : int, default 0
388
+ Number of seconds to wait prior to timing out.
389
+ minutes : int, default 0
390
+ Number of minutes to wait prior to timing out.
391
+ hours : int, default 0
392
+ Number of hours to wait prior to timing out.
414
393
  """
415
394
  ...
416
395
 
417
396
  @typing.overload
418
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
397
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
419
398
  ...
420
399
 
421
400
  @typing.overload
422
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
401
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
423
402
  ...
424
403
 
425
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
404
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
426
405
  """
427
- Creates a human-readable report, a Metaflow Card, after this step completes.
406
+ Specifies a timeout for your step.
428
407
 
429
- Note that you may add multiple `@card` decorators in a step with different parameters.
408
+ This decorator is useful if this step may hang indefinitely.
409
+
410
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
411
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
412
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
413
+
414
+ Note that all the values specified in parameters are added together so if you specify
415
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
430
416
 
431
417
 
432
418
  Parameters
433
419
  ----------
434
- type : str, default 'default'
435
- Card type.
436
- id : str, optional, default None
437
- If multiple cards are present, use this id to identify this card.
438
- options : Dict[str, Any], default {}
439
- Options passed to the card. The contents depend on the card type.
440
- timeout : int, default 45
441
- Interrupt reporting if it takes more than this many seconds.
420
+ seconds : int, default 0
421
+ Number of seconds to wait prior to timing out.
422
+ minutes : int, default 0
423
+ Number of minutes to wait prior to timing out.
424
+ hours : int, default 0
425
+ Number of hours to wait prior to timing out.
442
426
  """
443
427
  ...
444
428
 
445
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
429
+ @typing.overload
430
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
446
431
  """
447
- This decorator is used to run vllm APIs as Metaflow task sidecars.
432
+ Enables checkpointing for a step.
448
433
 
449
- User code call
450
- --------------
451
- @vllm(
452
- model="...",
453
- ...
454
- )
434
+ > Examples
455
435
 
456
- Valid backend options
457
- ---------------------
458
- - 'local': Run as a separate process on the local task machine.
459
-
460
- Valid model options
461
- -------------------
462
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
463
-
464
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
465
- If you need multiple models, you must create multiple @vllm decorators.
466
-
467
-
468
- Parameters
469
- ----------
470
- model: str
471
- HuggingFace model identifier to be served by vLLM.
472
- backend: str
473
- Determines where and how to run the vLLM process.
474
- openai_api_server: bool
475
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
476
- Default is False (uses native engine).
477
- Set to True for backward compatibility with existing code.
478
- debug: bool
479
- Whether to turn on verbose debugging logs.
480
- card_refresh_interval: int
481
- Interval in seconds for refreshing the vLLM status card.
482
- Only used when openai_api_server=True.
483
- max_retries: int
484
- Maximum number of retries checking for vLLM server startup.
485
- Only used when openai_api_server=True.
486
- retry_alert_frequency: int
487
- Frequency of alert logs for vLLM server startup retries.
488
- Only used when openai_api_server=True.
489
- engine_args : dict
490
- Additional keyword arguments to pass to the vLLM engine.
491
- For example, `tensor_parallel_size=2`.
492
- """
493
- ...
494
-
495
- @typing.overload
496
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
497
- """
498
- Enables checkpointing for a step.
499
-
500
- > Examples
501
-
502
- - Saving Checkpoints
436
+ - Saving Checkpoints
503
437
 
504
438
  ```python
505
439
  @checkpoint
@@ -640,400 +574,220 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
640
574
  ...
641
575
 
642
576
  @typing.overload
643
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
577
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
644
578
  """
645
- Specifies secrets to be retrieved and injected as environment variables prior to
646
- the execution of a step.
579
+ Specifies environment variables to be set prior to the execution of a step.
647
580
 
648
581
 
649
582
  Parameters
650
583
  ----------
651
- sources : List[Union[str, Dict[str, Any]]], default: []
652
- List of secret specs, defining how the secrets are to be retrieved
653
- role : str, optional, default: None
654
- Role to use for fetching secrets
584
+ vars : Dict[str, str], default {}
585
+ Dictionary of environment variables to set.
655
586
  """
656
587
  ...
657
588
 
658
589
  @typing.overload
659
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
590
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
660
591
  ...
661
592
 
662
593
  @typing.overload
663
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
594
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
664
595
  ...
665
596
 
666
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
597
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
667
598
  """
668
- Specifies secrets to be retrieved and injected as environment variables prior to
669
- the execution of a step.
599
+ Specifies environment variables to be set prior to the execution of a step.
670
600
 
671
601
 
672
602
  Parameters
673
603
  ----------
674
- sources : List[Union[str, Dict[str, Any]]], default: []
675
- List of secret specs, defining how the secrets are to be retrieved
676
- role : str, optional, default: None
677
- Role to use for fetching secrets
604
+ vars : Dict[str, str], default {}
605
+ Dictionary of environment variables to set.
678
606
  """
679
607
  ...
680
608
 
681
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
609
+ @typing.overload
610
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
682
611
  """
683
- Specifies that this step should execute on Kubernetes.
684
-
685
-
686
- Parameters
687
- ----------
688
- cpu : int, default 1
689
- Number of CPUs required for this step. If `@resources` is
690
- also present, the maximum value from all decorators is used.
691
- memory : int, default 4096
692
- Memory size (in MB) required for this step. If
693
- `@resources` is also present, the maximum value from all decorators is
694
- used.
695
- disk : int, default 10240
696
- Disk size (in MB) required for this step. If
697
- `@resources` is also present, the maximum value from all decorators is
698
- used.
699
- image : str, optional, default None
700
- Docker image to use when launching on Kubernetes. If not specified, and
701
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
702
- not, a default Docker image mapping to the current version of Python is used.
703
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
704
- If given, the imagePullPolicy to be applied to the Docker image of the step.
705
- image_pull_secrets: List[str], default []
706
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
707
- Kubernetes image pull secrets to use when pulling container images
708
- in Kubernetes.
709
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
710
- Kubernetes service account to use when launching pod in Kubernetes.
711
- secrets : List[str], optional, default None
712
- Kubernetes secrets to use when launching pod in Kubernetes. These
713
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
714
- in Metaflow configuration.
715
- node_selector: Union[Dict[str,str], str], optional, default None
716
- Kubernetes node selector(s) to apply to the pod running the task.
717
- Can be passed in as a comma separated string of values e.g.
718
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
719
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
720
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
721
- Kubernetes namespace to use when launching pod in Kubernetes.
722
- gpu : int, optional, default None
723
- Number of GPUs required for this step. A value of zero implies that
724
- the scheduled node should not have GPUs.
725
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
726
- The vendor of the GPUs to be used for this step.
727
- tolerations : List[str], default []
728
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
729
- Kubernetes tolerations to use when launching pod in Kubernetes.
730
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
731
- Kubernetes labels to use when launching pod in Kubernetes.
732
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
733
- Kubernetes annotations to use when launching pod in Kubernetes.
734
- use_tmpfs : bool, default False
735
- This enables an explicit tmpfs mount for this step.
736
- tmpfs_tempdir : bool, default True
737
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
738
- tmpfs_size : int, optional, default: None
739
- The value for the size (in MiB) of the tmpfs mount for this step.
740
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
741
- memory allocated for this step.
742
- tmpfs_path : str, optional, default /metaflow_temp
743
- Path to tmpfs mount for this step.
744
- persistent_volume_claims : Dict[str, str], optional, default None
745
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
746
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
747
- shared_memory: int, optional
748
- Shared memory size (in MiB) required for this step
749
- port: int, optional
750
- Port number to specify in the Kubernetes job object
751
- compute_pool : str, optional, default None
752
- Compute pool to be used for for this step.
753
- If not specified, any accessible compute pool within the perimeter is used.
754
- hostname_resolution_timeout: int, default 10 * 60
755
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
756
- Only applicable when @parallel is used.
757
- qos: str, default: Burstable
758
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
759
-
760
- security_context: Dict[str, Any], optional, default None
761
- Container security context. Applies to the task container. Allows the following keys:
762
- - privileged: bool, optional, default None
763
- - allow_privilege_escalation: bool, optional, default None
764
- - run_as_user: int, optional, default None
765
- - run_as_group: int, optional, default None
766
- - run_as_non_root: bool, optional, default None
612
+ Internal decorator to support Fast bakery
767
613
  """
768
614
  ...
769
615
 
770
616
  @typing.overload
771
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
617
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
618
+ ...
619
+
620
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
772
621
  """
773
- Specifies the Conda environment for the step.
622
+ Internal decorator to support Fast bakery
623
+ """
624
+ ...
625
+
626
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
627
+ """
628
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
774
629
 
775
- Information in this decorator will augment any
776
- attributes set in the `@conda_base` flow-level decorator. Hence,
777
- you can use `@conda_base` to set packages required by all
778
- steps and use `@conda` to specify step-specific overrides.
630
+ User code call
631
+ --------------
632
+ @vllm(
633
+ model="...",
634
+ ...
635
+ )
636
+
637
+ Valid backend options
638
+ ---------------------
639
+ - 'local': Run as a separate process on the local task machine.
640
+
641
+ Valid model options
642
+ -------------------
643
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
644
+
645
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
646
+ If you need multiple models, you must create multiple @vllm decorators.
779
647
 
780
648
 
781
649
  Parameters
782
650
  ----------
783
- packages : Dict[str, str], default {}
784
- Packages to use for this step. The key is the name of the package
785
- and the value is the version to use.
786
- libraries : Dict[str, str], default {}
787
- Supported for backward compatibility. When used with packages, packages will take precedence.
788
- python : str, optional, default None
789
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
790
- that the version used will correspond to the version of the Python interpreter used to start the run.
791
- disabled : bool, default False
792
- If set to True, disables @conda.
651
+ model: str
652
+ HuggingFace model identifier to be served by vLLM.
653
+ backend: str
654
+ Determines where and how to run the vLLM process.
655
+ openai_api_server: bool
656
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
657
+ Default is False (uses native engine).
658
+ Set to True for backward compatibility with existing code.
659
+ debug: bool
660
+ Whether to turn on verbose debugging logs.
661
+ card_refresh_interval: int
662
+ Interval in seconds for refreshing the vLLM status card.
663
+ Only used when openai_api_server=True.
664
+ max_retries: int
665
+ Maximum number of retries checking for vLLM server startup.
666
+ Only used when openai_api_server=True.
667
+ retry_alert_frequency: int
668
+ Frequency of alert logs for vLLM server startup retries.
669
+ Only used when openai_api_server=True.
670
+ engine_args : dict
671
+ Additional keyword arguments to pass to the vLLM engine.
672
+ For example, `tensor_parallel_size=2`.
793
673
  """
794
674
  ...
795
675
 
796
- @typing.overload
797
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
798
- ...
799
-
800
- @typing.overload
801
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
802
- ...
803
-
804
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
676
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
805
677
  """
806
- Specifies the Conda environment for the step.
807
-
808
- Information in this decorator will augment any
809
- attributes set in the `@conda_base` flow-level decorator. Hence,
810
- you can use `@conda_base` to set packages required by all
811
- steps and use `@conda` to specify step-specific overrides.
678
+ Specifies that this step should execute on DGX cloud.
812
679
 
813
680
 
814
681
  Parameters
815
682
  ----------
816
- packages : Dict[str, str], default {}
817
- Packages to use for this step. The key is the name of the package
818
- and the value is the version to use.
819
- libraries : Dict[str, str], default {}
820
- Supported for backward compatibility. When used with packages, packages will take precedence.
821
- python : str, optional, default None
822
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
823
- that the version used will correspond to the version of the Python interpreter used to start the run.
824
- disabled : bool, default False
825
- If set to True, disables @conda.
683
+ gpu : int
684
+ Number of GPUs to use.
685
+ gpu_type : str
686
+ Type of Nvidia GPU to use.
826
687
  """
827
688
  ...
828
689
 
829
- @typing.overload
830
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
690
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
831
691
  """
832
- Enables loading / saving of models within a step.
833
-
834
- > Examples
835
- - Saving Models
836
- ```python
837
- @model
838
- @step
839
- def train(self):
840
- # current.model.save returns a dictionary reference to the model saved
841
- self.my_model = current.model.save(
842
- path_to_my_model,
843
- label="my_model",
844
- metadata={
845
- "epochs": 10,
846
- "batch-size": 32,
847
- "learning-rate": 0.001,
848
- }
849
- )
850
- self.next(self.test)
851
-
852
- @model(load="my_model")
853
- @step
854
- def test(self):
855
- # `current.model.loaded` returns a dictionary of the loaded models
856
- # where the key is the name of the artifact and the value is the path to the model
857
- print(os.listdir(current.model.loaded["my_model"]))
858
- self.next(self.end)
859
- ```
860
-
861
- - Loading models
862
- ```python
863
- @step
864
- def train(self):
865
- # current.model.load returns the path to the model loaded
866
- checkpoint_path = current.model.load(
867
- self.checkpoint_key,
868
- )
869
- model_path = current.model.load(
870
- self.model,
871
- )
872
- self.next(self.test)
873
- ```
692
+ Specifies that this step should execute on DGX cloud.
874
693
 
875
694
 
876
695
  Parameters
877
696
  ----------
878
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
879
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
880
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
881
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
882
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
883
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
884
-
885
- temp_dir_root : str, default: None
886
- The root directory under which `current.model.loaded` will store loaded models
697
+ gpu : int
698
+ Number of GPUs to use.
699
+ gpu_type : str
700
+ Type of Nvidia GPU to use.
701
+ queue_timeout : int
702
+ Time to keep the job in NVCF's queue.
887
703
  """
888
704
  ...
889
705
 
890
706
  @typing.overload
891
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
892
- ...
893
-
894
- @typing.overload
895
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
896
- ...
897
-
898
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
707
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
899
708
  """
900
- Enables loading / saving of models within a step.
901
-
902
- > Examples
903
- - Saving Models
904
- ```python
905
- @model
906
- @step
907
- def train(self):
908
- # current.model.save returns a dictionary reference to the model saved
909
- self.my_model = current.model.save(
910
- path_to_my_model,
911
- label="my_model",
912
- metadata={
913
- "epochs": 10,
914
- "batch-size": 32,
915
- "learning-rate": 0.001,
916
- }
917
- )
918
- self.next(self.test)
919
-
920
- @model(load="my_model")
921
- @step
922
- def test(self):
923
- # `current.model.loaded` returns a dictionary of the loaded models
924
- # where the key is the name of the artifact and the value is the path to the model
925
- print(os.listdir(current.model.loaded["my_model"]))
926
- self.next(self.end)
927
- ```
709
+ Creates a human-readable report, a Metaflow Card, after this step completes.
928
710
 
929
- - Loading models
930
- ```python
931
- @step
932
- def train(self):
933
- # current.model.load returns the path to the model loaded
934
- checkpoint_path = current.model.load(
935
- self.checkpoint_key,
936
- )
937
- model_path = current.model.load(
938
- self.model,
939
- )
940
- self.next(self.test)
941
- ```
711
+ Note that you may add multiple `@card` decorators in a step with different parameters.
942
712
 
943
713
 
944
714
  Parameters
945
715
  ----------
946
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
947
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
948
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
949
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
950
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
951
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
952
-
953
- temp_dir_root : str, default: None
954
- The root directory under which `current.model.loaded` will store loaded models
716
+ type : str, default 'default'
717
+ Card type.
718
+ id : str, optional, default None
719
+ If multiple cards are present, use this id to identify this card.
720
+ options : Dict[str, Any], default {}
721
+ Options passed to the card. The contents depend on the card type.
722
+ timeout : int, default 45
723
+ Interrupt reporting if it takes more than this many seconds.
955
724
  """
956
725
  ...
957
726
 
958
727
  @typing.overload
959
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
960
- """
961
- Internal decorator to support Fast bakery
962
- """
728
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
963
729
  ...
964
730
 
965
731
  @typing.overload
966
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
967
- ...
968
-
969
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
970
- """
971
- Internal decorator to support Fast bakery
972
- """
732
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
973
733
  ...
974
734
 
975
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
735
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
976
736
  """
977
- Specifies that this step should execute on DGX cloud.
737
+ Creates a human-readable report, a Metaflow Card, after this step completes.
738
+
739
+ Note that you may add multiple `@card` decorators in a step with different parameters.
978
740
 
979
741
 
980
742
  Parameters
981
743
  ----------
982
- gpu : int
983
- Number of GPUs to use.
984
- gpu_type : str
985
- Type of Nvidia GPU to use.
744
+ type : str, default 'default'
745
+ Card type.
746
+ id : str, optional, default None
747
+ If multiple cards are present, use this id to identify this card.
748
+ options : Dict[str, Any], default {}
749
+ Options passed to the card. The contents depend on the card type.
750
+ timeout : int, default 45
751
+ Interrupt reporting if it takes more than this many seconds.
986
752
  """
987
753
  ...
988
754
 
989
755
  @typing.overload
990
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
756
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
991
757
  """
992
- Specifies the PyPI packages for the step.
993
-
994
- Information in this decorator will augment any
995
- attributes set in the `@pyi_base` flow-level decorator. Hence,
996
- you can use `@pypi_base` to set packages required by all
997
- steps and use `@pypi` to specify step-specific overrides.
758
+ Specifies secrets to be retrieved and injected as environment variables prior to
759
+ the execution of a step.
998
760
 
999
761
 
1000
762
  Parameters
1001
763
  ----------
1002
- packages : Dict[str, str], default: {}
1003
- Packages to use for this step. The key is the name of the package
1004
- and the value is the version to use.
1005
- python : str, optional, default: None
1006
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1007
- that the version used will correspond to the version of the Python interpreter used to start the run.
764
+ sources : List[Union[str, Dict[str, Any]]], default: []
765
+ List of secret specs, defining how the secrets are to be retrieved
766
+ role : str, optional, default: None
767
+ Role to use for fetching secrets
1008
768
  """
1009
769
  ...
1010
770
 
1011
771
  @typing.overload
1012
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
772
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1013
773
  ...
1014
774
 
1015
775
  @typing.overload
1016
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
776
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1017
777
  ...
1018
778
 
1019
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
779
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
1020
780
  """
1021
- Specifies the PyPI packages for the step.
1022
-
1023
- Information in this decorator will augment any
1024
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1025
- you can use `@pypi_base` to set packages required by all
1026
- steps and use `@pypi` to specify step-specific overrides.
781
+ Specifies secrets to be retrieved and injected as environment variables prior to
782
+ the execution of a step.
1027
783
 
1028
784
 
1029
785
  Parameters
1030
786
  ----------
1031
- packages : Dict[str, str], default: {}
1032
- Packages to use for this step. The key is the name of the package
1033
- and the value is the version to use.
1034
- python : str, optional, default: None
1035
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1036
- that the version used will correspond to the version of the Python interpreter used to start the run.
787
+ sources : List[Union[str, Dict[str, Any]]], default: []
788
+ List of secret specs, defining how the secrets are to be retrieved
789
+ role : str, optional, default: None
790
+ Role to use for fetching secrets
1037
791
  """
1038
792
  ...
1039
793
 
@@ -1092,317 +846,454 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1092
846
  """
1093
847
  ...
1094
848
 
1095
- @typing.overload
1096
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1097
- """
1098
- Specifies environment variables to be set prior to the execution of a step.
1099
-
1100
-
1101
- Parameters
1102
- ----------
1103
- vars : Dict[str, str], default {}
1104
- Dictionary of environment variables to set.
1105
- """
1106
- ...
1107
-
1108
- @typing.overload
1109
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1110
- ...
1111
-
1112
- @typing.overload
1113
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1114
- ...
1115
-
1116
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1117
- """
1118
- Specifies environment variables to be set prior to the execution of a step.
1119
-
1120
-
1121
- Parameters
1122
- ----------
1123
- vars : Dict[str, str], default {}
1124
- Dictionary of environment variables to set.
1125
- """
1126
- ...
1127
-
1128
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
849
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1129
850
  """
1130
- Specifies that this step should execute on DGX cloud.
851
+ Specifies that this step should execute on Kubernetes.
1131
852
 
1132
853
 
1133
854
  Parameters
1134
855
  ----------
1135
- gpu : int
1136
- Number of GPUs to use.
1137
- gpu_type : str
1138
- Type of Nvidia GPU to use.
1139
- queue_timeout : int
1140
- Time to keep the job in NVCF's queue.
1141
- """
1142
- ...
1143
-
1144
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1145
- """
1146
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
1147
-
1148
- User code call
1149
- --------------
1150
- @ollama(
1151
- models=[...],
1152
- ...
1153
- )
856
+ cpu : int, default 1
857
+ Number of CPUs required for this step. If `@resources` is
858
+ also present, the maximum value from all decorators is used.
859
+ memory : int, default 4096
860
+ Memory size (in MB) required for this step. If
861
+ `@resources` is also present, the maximum value from all decorators is
862
+ used.
863
+ disk : int, default 10240
864
+ Disk size (in MB) required for this step. If
865
+ `@resources` is also present, the maximum value from all decorators is
866
+ used.
867
+ image : str, optional, default None
868
+ Docker image to use when launching on Kubernetes. If not specified, and
869
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
870
+ not, a default Docker image mapping to the current version of Python is used.
871
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
872
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
873
+ image_pull_secrets: List[str], default []
874
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
875
+ Kubernetes image pull secrets to use when pulling container images
876
+ in Kubernetes.
877
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
878
+ Kubernetes service account to use when launching pod in Kubernetes.
879
+ secrets : List[str], optional, default None
880
+ Kubernetes secrets to use when launching pod in Kubernetes. These
881
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
882
+ in Metaflow configuration.
883
+ node_selector: Union[Dict[str,str], str], optional, default None
884
+ Kubernetes node selector(s) to apply to the pod running the task.
885
+ Can be passed in as a comma separated string of values e.g.
886
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
887
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
888
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
889
+ Kubernetes namespace to use when launching pod in Kubernetes.
890
+ gpu : int, optional, default None
891
+ Number of GPUs required for this step. A value of zero implies that
892
+ the scheduled node should not have GPUs.
893
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
894
+ The vendor of the GPUs to be used for this step.
895
+ tolerations : List[str], default []
896
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
897
+ Kubernetes tolerations to use when launching pod in Kubernetes.
898
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
899
+ Kubernetes labels to use when launching pod in Kubernetes.
900
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
901
+ Kubernetes annotations to use when launching pod in Kubernetes.
902
+ use_tmpfs : bool, default False
903
+ This enables an explicit tmpfs mount for this step.
904
+ tmpfs_tempdir : bool, default True
905
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
906
+ tmpfs_size : int, optional, default: None
907
+ The value for the size (in MiB) of the tmpfs mount for this step.
908
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
909
+ memory allocated for this step.
910
+ tmpfs_path : str, optional, default /metaflow_temp
911
+ Path to tmpfs mount for this step.
912
+ persistent_volume_claims : Dict[str, str], optional, default None
913
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
914
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
915
+ shared_memory: int, optional
916
+ Shared memory size (in MiB) required for this step
917
+ port: int, optional
918
+ Port number to specify in the Kubernetes job object
919
+ compute_pool : str, optional, default None
920
+ Compute pool to be used for for this step.
921
+ If not specified, any accessible compute pool within the perimeter is used.
922
+ hostname_resolution_timeout: int, default 10 * 60
923
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
924
+ Only applicable when @parallel is used.
925
+ qos: str, default: Burstable
926
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
1154
927
 
1155
- Valid backend options
1156
- ---------------------
1157
- - 'local': Run as a separate process on the local task machine.
1158
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
1159
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
928
+ security_context: Dict[str, Any], optional, default None
929
+ Container security context. Applies to the task container. Allows the following keys:
930
+ - privileged: bool, optional, default None
931
+ - allow_privilege_escalation: bool, optional, default None
932
+ - run_as_user: int, optional, default None
933
+ - run_as_group: int, optional, default None
934
+ - run_as_non_root: bool, optional, default None
935
+ """
936
+ ...
937
+
938
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
939
+ """
940
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
1160
941
 
1161
- Valid model options
1162
- -------------------
1163
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
942
+ > Examples
943
+
944
+ **Usage: creating references of models from huggingface that may be loaded in downstream steps**
945
+ ```python
946
+ @huggingface_hub
947
+ @step
948
+ def pull_model_from_huggingface(self):
949
+ # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
950
+ # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
951
+ # with the same `repo_id` in the backend storage, it will not download the model again. The return
952
+ # value of the function is a reference to the model in the backend storage.
953
+ # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
954
+
955
+ self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
956
+ self.llama_model = current.huggingface_hub.snapshot_download(
957
+ repo_id=self.model_id,
958
+ allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
959
+ )
960
+ self.next(self.train)
961
+ ```
962
+
963
+ **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
964
+ ```python
965
+ @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
966
+ @step
967
+ def pull_model_from_huggingface(self):
968
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
969
+ ```
970
+
971
+ ```python
972
+ @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
973
+ @step
974
+ def finetune_model(self):
975
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
976
+ # path_to_model will be /my-directory
977
+ ```
978
+
979
+ ```python
980
+ # Takes all the arguments passed to `snapshot_download`
981
+ # except for `local_dir`
982
+ @huggingface_hub(load=[
983
+ {
984
+ "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
985
+ },
986
+ {
987
+ "repo_id": "myorg/mistral-lora",
988
+ "repo_type": "model",
989
+ },
990
+ ])
991
+ @step
992
+ def finetune_model(self):
993
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
994
+ # path_to_model will be /my-directory
995
+ ```
1164
996
 
1165
997
 
1166
998
  Parameters
1167
999
  ----------
1168
- models: list[str]
1169
- List of Ollama containers running models in sidecars.
1170
- backend: str
1171
- Determines where and how to run the Ollama process.
1172
- force_pull: bool
1173
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1174
- cache_update_policy: str
1175
- Cache update policy: "auto", "force", or "never".
1176
- force_cache_update: bool
1177
- Simple override for "force" cache update policy.
1178
- debug: bool
1179
- Whether to turn on verbose debugging logs.
1180
- circuit_breaker_config: dict
1181
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1182
- timeout_config: dict
1183
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1000
+ temp_dir_root : str, optional
1001
+ The root directory that will hold the temporary directory where objects will be downloaded.
1002
+
1003
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
1004
+ The list of repos (models/datasets) to load.
1005
+
1006
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
1007
+
1008
+ - If repo (model/dataset) is not found in the datastore:
1009
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
1010
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
1011
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
1012
+
1013
+ - If repo is found in the datastore:
1014
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
1184
1015
  """
1185
1016
  ...
1186
1017
 
1187
1018
  @typing.overload
1188
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1019
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1189
1020
  """
1190
- Decorator prototype for all step decorators. This function gets specialized
1191
- and imported for all decorators types by _import_plugin_decorators().
1021
+ Specifies the Conda environment for the step.
1022
+
1023
+ Information in this decorator will augment any
1024
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1025
+ you can use `@conda_base` to set packages required by all
1026
+ steps and use `@conda` to specify step-specific overrides.
1027
+
1028
+
1029
+ Parameters
1030
+ ----------
1031
+ packages : Dict[str, str], default {}
1032
+ Packages to use for this step. The key is the name of the package
1033
+ and the value is the version to use.
1034
+ libraries : Dict[str, str], default {}
1035
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1036
+ python : str, optional, default None
1037
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1038
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1039
+ disabled : bool, default False
1040
+ If set to True, disables @conda.
1192
1041
  """
1193
1042
  ...
1194
1043
 
1195
1044
  @typing.overload
1196
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1045
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1197
1046
  ...
1198
1047
 
1199
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1048
+ @typing.overload
1049
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1050
+ ...
1051
+
1052
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1200
1053
  """
1201
- Decorator prototype for all step decorators. This function gets specialized
1202
- and imported for all decorators types by _import_plugin_decorators().
1054
+ Specifies the Conda environment for the step.
1055
+
1056
+ Information in this decorator will augment any
1057
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1058
+ you can use `@conda_base` to set packages required by all
1059
+ steps and use `@conda` to specify step-specific overrides.
1060
+
1061
+
1062
+ Parameters
1063
+ ----------
1064
+ packages : Dict[str, str], default {}
1065
+ Packages to use for this step. The key is the name of the package
1066
+ and the value is the version to use.
1067
+ libraries : Dict[str, str], default {}
1068
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1069
+ python : str, optional, default None
1070
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1071
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1072
+ disabled : bool, default False
1073
+ If set to True, disables @conda.
1203
1074
  """
1204
1075
  ...
1205
1076
 
1206
1077
  @typing.overload
1207
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1078
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1208
1079
  """
1209
- Specifies that the step will success under all circumstances.
1080
+ Specifies the PyPI packages for the step.
1210
1081
 
1211
- The decorator will create an optional artifact, specified by `var`, which
1212
- contains the exception raised. You can use it to detect the presence
1213
- of errors, indicating that all happy-path artifacts produced by the step
1214
- are missing.
1082
+ Information in this decorator will augment any
1083
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1084
+ you can use `@pypi_base` to set packages required by all
1085
+ steps and use `@pypi` to specify step-specific overrides.
1215
1086
 
1216
1087
 
1217
1088
  Parameters
1218
1089
  ----------
1219
- var : str, optional, default None
1220
- Name of the artifact in which to store the caught exception.
1221
- If not specified, the exception is not stored.
1222
- print_exception : bool, default True
1223
- Determines whether or not the exception is printed to
1224
- stdout when caught.
1090
+ packages : Dict[str, str], default: {}
1091
+ Packages to use for this step. The key is the name of the package
1092
+ and the value is the version to use.
1093
+ python : str, optional, default: None
1094
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1095
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1225
1096
  """
1226
1097
  ...
1227
1098
 
1228
1099
  @typing.overload
1229
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1100
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1230
1101
  ...
1231
1102
 
1232
1103
  @typing.overload
1233
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1104
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1234
1105
  ...
1235
1106
 
1236
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1107
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1237
1108
  """
1238
- Specifies that the step will success under all circumstances.
1109
+ Specifies the PyPI packages for the step.
1239
1110
 
1240
- The decorator will create an optional artifact, specified by `var`, which
1241
- contains the exception raised. You can use it to detect the presence
1242
- of errors, indicating that all happy-path artifacts produced by the step
1243
- are missing.
1111
+ Information in this decorator will augment any
1112
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1113
+ you can use `@pypi_base` to set packages required by all
1114
+ steps and use `@pypi` to specify step-specific overrides.
1244
1115
 
1245
1116
 
1246
1117
  Parameters
1247
1118
  ----------
1248
- var : str, optional, default None
1249
- Name of the artifact in which to store the caught exception.
1250
- If not specified, the exception is not stored.
1251
- print_exception : bool, default True
1252
- Determines whether or not the exception is printed to
1253
- stdout when caught.
1119
+ packages : Dict[str, str], default: {}
1120
+ Packages to use for this step. The key is the name of the package
1121
+ and the value is the version to use.
1122
+ python : str, optional, default: None
1123
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1124
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1254
1125
  """
1255
1126
  ...
1256
1127
 
1257
1128
  @typing.overload
1258
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1129
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1259
1130
  """
1260
- Specifies the flow(s) that this flow depends on.
1131
+ Enables loading / saving of models within a step.
1261
1132
 
1262
- ```
1263
- @trigger_on_finish(flow='FooFlow')
1264
- ```
1265
- or
1266
- ```
1267
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1268
- ```
1269
- This decorator respects the @project decorator and triggers the flow
1270
- when upstream runs within the same namespace complete successfully
1133
+ > Examples
1134
+ - Saving Models
1135
+ ```python
1136
+ @model
1137
+ @step
1138
+ def train(self):
1139
+ # current.model.save returns a dictionary reference to the model saved
1140
+ self.my_model = current.model.save(
1141
+ path_to_my_model,
1142
+ label="my_model",
1143
+ metadata={
1144
+ "epochs": 10,
1145
+ "batch-size": 32,
1146
+ "learning-rate": 0.001,
1147
+ }
1148
+ )
1149
+ self.next(self.test)
1271
1150
 
1272
- Additionally, you can specify project aware upstream flow dependencies
1273
- by specifying the fully qualified project_flow_name.
1274
- ```
1275
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1276
- ```
1277
- or
1278
- ```
1279
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1151
+ @model(load="my_model")
1152
+ @step
1153
+ def test(self):
1154
+ # `current.model.loaded` returns a dictionary of the loaded models
1155
+ # where the key is the name of the artifact and the value is the path to the model
1156
+ print(os.listdir(current.model.loaded["my_model"]))
1157
+ self.next(self.end)
1280
1158
  ```
1281
1159
 
1282
- You can also specify just the project or project branch (other values will be
1283
- inferred from the current project or project branch):
1284
- ```
1285
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1160
+ - Loading models
1161
+ ```python
1162
+ @step
1163
+ def train(self):
1164
+ # current.model.load returns the path to the model loaded
1165
+ checkpoint_path = current.model.load(
1166
+ self.checkpoint_key,
1167
+ )
1168
+ model_path = current.model.load(
1169
+ self.model,
1170
+ )
1171
+ self.next(self.test)
1286
1172
  ```
1287
1173
 
1288
- Note that `branch` is typically one of:
1289
- - `prod`
1290
- - `user.bob`
1291
- - `test.my_experiment`
1292
- - `prod.staging`
1293
-
1294
1174
 
1295
1175
  Parameters
1296
1176
  ----------
1297
- flow : Union[str, Dict[str, str]], optional, default None
1298
- Upstream flow dependency for this flow.
1299
- flows : List[Union[str, Dict[str, str]]], default []
1300
- Upstream flow dependencies for this flow.
1301
- options : Dict[str, Any], default {}
1302
- Backend-specific configuration for tuning eventing behavior.
1177
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1178
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1179
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1180
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1181
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1182
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1183
+
1184
+ temp_dir_root : str, default: None
1185
+ The root directory under which `current.model.loaded` will store loaded models
1303
1186
  """
1304
1187
  ...
1305
1188
 
1306
1189
  @typing.overload
1307
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1190
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1308
1191
  ...
1309
1192
 
1310
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1193
+ @typing.overload
1194
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1195
+ ...
1196
+
1197
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1311
1198
  """
1312
- Specifies the flow(s) that this flow depends on.
1313
-
1314
- ```
1315
- @trigger_on_finish(flow='FooFlow')
1316
- ```
1317
- or
1318
- ```
1319
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1320
- ```
1321
- This decorator respects the @project decorator and triggers the flow
1322
- when upstream runs within the same namespace complete successfully
1199
+ Enables loading / saving of models within a step.
1323
1200
 
1324
- Additionally, you can specify project aware upstream flow dependencies
1325
- by specifying the fully qualified project_flow_name.
1326
- ```
1327
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1328
- ```
1329
- or
1330
- ```
1331
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1332
- ```
1201
+ > Examples
1202
+ - Saving Models
1203
+ ```python
1204
+ @model
1205
+ @step
1206
+ def train(self):
1207
+ # current.model.save returns a dictionary reference to the model saved
1208
+ self.my_model = current.model.save(
1209
+ path_to_my_model,
1210
+ label="my_model",
1211
+ metadata={
1212
+ "epochs": 10,
1213
+ "batch-size": 32,
1214
+ "learning-rate": 0.001,
1215
+ }
1216
+ )
1217
+ self.next(self.test)
1333
1218
 
1334
- You can also specify just the project or project branch (other values will be
1335
- inferred from the current project or project branch):
1336
- ```
1337
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1219
+ @model(load="my_model")
1220
+ @step
1221
+ def test(self):
1222
+ # `current.model.loaded` returns a dictionary of the loaded models
1223
+ # where the key is the name of the artifact and the value is the path to the model
1224
+ print(os.listdir(current.model.loaded["my_model"]))
1225
+ self.next(self.end)
1338
1226
  ```
1339
1227
 
1340
- Note that `branch` is typically one of:
1341
- - `prod`
1342
- - `user.bob`
1343
- - `test.my_experiment`
1344
- - `prod.staging`
1228
+ - Loading models
1229
+ ```python
1230
+ @step
1231
+ def train(self):
1232
+ # current.model.load returns the path to the model loaded
1233
+ checkpoint_path = current.model.load(
1234
+ self.checkpoint_key,
1235
+ )
1236
+ model_path = current.model.load(
1237
+ self.model,
1238
+ )
1239
+ self.next(self.test)
1240
+ ```
1345
1241
 
1346
1242
 
1347
1243
  Parameters
1348
1244
  ----------
1349
- flow : Union[str, Dict[str, str]], optional, default None
1350
- Upstream flow dependency for this flow.
1351
- flows : List[Union[str, Dict[str, str]]], default []
1352
- Upstream flow dependencies for this flow.
1353
- options : Dict[str, Any], default {}
1354
- Backend-specific configuration for tuning eventing behavior.
1355
- """
1356
- ...
1357
-
1358
- @typing.overload
1359
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1360
- """
1361
- Specifies the times when the flow should be run when running on a
1362
- production scheduler.
1363
-
1245
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1246
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1247
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1248
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1249
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1250
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1364
1251
 
1365
- Parameters
1366
- ----------
1367
- hourly : bool, default False
1368
- Run the workflow hourly.
1369
- daily : bool, default True
1370
- Run the workflow daily.
1371
- weekly : bool, default False
1372
- Run the workflow weekly.
1373
- cron : str, optional, default None
1374
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1375
- specified by this expression.
1376
- timezone : str, optional, default None
1377
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1378
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1252
+ temp_dir_root : str, default: None
1253
+ The root directory under which `current.model.loaded` will store loaded models
1379
1254
  """
1380
1255
  ...
1381
1256
 
1382
- @typing.overload
1383
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1384
- ...
1385
-
1386
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1257
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1387
1258
  """
1388
- Specifies the times when the flow should be run when running on a
1389
- production scheduler.
1259
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1260
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1261
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1262
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1263
+ starts only after all sensors finish.
1390
1264
 
1391
1265
 
1392
1266
  Parameters
1393
1267
  ----------
1394
- hourly : bool, default False
1395
- Run the workflow hourly.
1396
- daily : bool, default True
1397
- Run the workflow daily.
1398
- weekly : bool, default False
1399
- Run the workflow weekly.
1400
- cron : str, optional, default None
1401
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1402
- specified by this expression.
1403
- timezone : str, optional, default None
1404
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1405
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1268
+ timeout : int
1269
+ Time, in seconds before the task times out and fails. (Default: 3600)
1270
+ poke_interval : int
1271
+ Time in seconds that the job should wait in between each try. (Default: 60)
1272
+ mode : str
1273
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1274
+ exponential_backoff : bool
1275
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1276
+ pool : str
1277
+ the slot pool this task should run in,
1278
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1279
+ soft_fail : bool
1280
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1281
+ name : str
1282
+ Name of the sensor on Airflow
1283
+ description : str
1284
+ Description of sensor in the Airflow UI
1285
+ bucket_key : Union[str, List[str]]
1286
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1287
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1288
+ bucket_name : str
1289
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1290
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1291
+ wildcard_match : bool
1292
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1293
+ aws_conn_id : str
1294
+ a reference to the s3 connection on Airflow. (Default: None)
1295
+ verify : bool
1296
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1406
1297
  """
1407
1298
  ...
1408
1299
 
@@ -1497,153 +1388,26 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1497
1388
  "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1498
1389
  },
1499
1390
  }):
1500
- load_model(
1501
- task.data.model_ref,
1502
- "test-models"
1503
- )
1504
- ```
1505
- Parameters:
1506
- ----------
1507
-
1508
- type: str
1509
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1510
-
1511
- config: dict or Callable
1512
- Dictionary of configuration options for the datastore. The following keys are required:
1513
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1514
- - example: 's3://bucket-name/path/to/root'
1515
- - example: 'gs://bucket-name/path/to/root'
1516
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1517
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1518
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1519
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1520
- """
1521
- ...
1522
-
1523
- @typing.overload
1524
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1525
- """
1526
- Specifies the PyPI packages for all steps of the flow.
1527
-
1528
- Use `@pypi_base` to set common packages required by all
1529
- steps and use `@pypi` to specify step-specific overrides.
1530
-
1531
- Parameters
1532
- ----------
1533
- packages : Dict[str, str], default: {}
1534
- Packages to use for this flow. The key is the name of the package
1535
- and the value is the version to use.
1536
- python : str, optional, default: None
1537
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1538
- that the version used will correspond to the version of the Python interpreter used to start the run.
1539
- """
1540
- ...
1541
-
1542
- @typing.overload
1543
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1544
- ...
1545
-
1546
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1547
- """
1548
- Specifies the PyPI packages for all steps of the flow.
1549
-
1550
- Use `@pypi_base` to set common packages required by all
1551
- steps and use `@pypi` to specify step-specific overrides.
1552
-
1553
- Parameters
1554
- ----------
1555
- packages : Dict[str, str], default: {}
1556
- Packages to use for this flow. The key is the name of the package
1557
- and the value is the version to use.
1558
- python : str, optional, default: None
1559
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1560
- that the version used will correspond to the version of the Python interpreter used to start the run.
1561
- """
1562
- ...
1563
-
1564
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1565
- """
1566
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1567
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1568
-
1569
-
1570
- Parameters
1571
- ----------
1572
- timeout : int
1573
- Time, in seconds before the task times out and fails. (Default: 3600)
1574
- poke_interval : int
1575
- Time in seconds that the job should wait in between each try. (Default: 60)
1576
- mode : str
1577
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1578
- exponential_backoff : bool
1579
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1580
- pool : str
1581
- the slot pool this task should run in,
1582
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1583
- soft_fail : bool
1584
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1585
- name : str
1586
- Name of the sensor on Airflow
1587
- description : str
1588
- Description of sensor in the Airflow UI
1589
- external_dag_id : str
1590
- The dag_id that contains the task you want to wait for.
1591
- external_task_ids : List[str]
1592
- The list of task_ids that you want to wait for.
1593
- If None (default value) the sensor waits for the DAG. (Default: None)
1594
- allowed_states : List[str]
1595
- Iterable of allowed states, (Default: ['success'])
1596
- failed_states : List[str]
1597
- Iterable of failed or dis-allowed states. (Default: None)
1598
- execution_delta : datetime.timedelta
1599
- time difference with the previous execution to look at,
1600
- the default is the same logical date as the current task or DAG. (Default: None)
1601
- check_existence: bool
1602
- Set to True to check if the external task exists or check if
1603
- the DAG to wait for exists. (Default: True)
1604
- """
1605
- ...
1606
-
1607
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1608
- """
1609
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1610
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1611
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1612
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1613
- starts only after all sensors finish.
1614
-
1615
-
1616
- Parameters
1617
- ----------
1618
- timeout : int
1619
- Time, in seconds before the task times out and fails. (Default: 3600)
1620
- poke_interval : int
1621
- Time in seconds that the job should wait in between each try. (Default: 60)
1622
- mode : str
1623
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1624
- exponential_backoff : bool
1625
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1626
- pool : str
1627
- the slot pool this task should run in,
1628
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1629
- soft_fail : bool
1630
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1631
- name : str
1632
- Name of the sensor on Airflow
1633
- description : str
1634
- Description of sensor in the Airflow UI
1635
- bucket_key : Union[str, List[str]]
1636
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1637
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1638
- bucket_name : str
1639
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1640
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1641
- wildcard_match : bool
1642
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1643
- aws_conn_id : str
1644
- a reference to the s3 connection on Airflow. (Default: None)
1645
- verify : bool
1646
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1391
+ load_model(
1392
+ task.data.model_ref,
1393
+ "test-models"
1394
+ )
1395
+ ```
1396
+ Parameters:
1397
+ ----------
1398
+
1399
+ type: str
1400
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1401
+
1402
+ config: dict or Callable
1403
+ Dictionary of configuration options for the datastore. The following keys are required:
1404
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1405
+ - example: 's3://bucket-name/path/to/root'
1406
+ - example: 'gs://bucket-name/path/to/root'
1407
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1408
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1409
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1410
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1647
1411
  """
1648
1412
  ...
1649
1413
 
@@ -1698,6 +1462,57 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1698
1462
  """
1699
1463
  ...
1700
1464
 
1465
+ @typing.overload
1466
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1467
+ """
1468
+ Specifies the times when the flow should be run when running on a
1469
+ production scheduler.
1470
+
1471
+
1472
+ Parameters
1473
+ ----------
1474
+ hourly : bool, default False
1475
+ Run the workflow hourly.
1476
+ daily : bool, default True
1477
+ Run the workflow daily.
1478
+ weekly : bool, default False
1479
+ Run the workflow weekly.
1480
+ cron : str, optional, default None
1481
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1482
+ specified by this expression.
1483
+ timezone : str, optional, default None
1484
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1485
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1486
+ """
1487
+ ...
1488
+
1489
+ @typing.overload
1490
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1491
+ ...
1492
+
1493
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1494
+ """
1495
+ Specifies the times when the flow should be run when running on a
1496
+ production scheduler.
1497
+
1498
+
1499
+ Parameters
1500
+ ----------
1501
+ hourly : bool, default False
1502
+ Run the workflow hourly.
1503
+ daily : bool, default True
1504
+ Run the workflow daily.
1505
+ weekly : bool, default False
1506
+ Run the workflow weekly.
1507
+ cron : str, optional, default None
1508
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1509
+ specified by this expression.
1510
+ timezone : str, optional, default None
1511
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1512
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1513
+ """
1514
+ ...
1515
+
1701
1516
  @typing.overload
1702
1517
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1703
1518
  """
@@ -1826,5 +1641,190 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1826
1641
  """
1827
1642
  ...
1828
1643
 
1644
+ @typing.overload
1645
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1646
+ """
1647
+ Specifies the flow(s) that this flow depends on.
1648
+
1649
+ ```
1650
+ @trigger_on_finish(flow='FooFlow')
1651
+ ```
1652
+ or
1653
+ ```
1654
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1655
+ ```
1656
+ This decorator respects the @project decorator and triggers the flow
1657
+ when upstream runs within the same namespace complete successfully
1658
+
1659
+ Additionally, you can specify project aware upstream flow dependencies
1660
+ by specifying the fully qualified project_flow_name.
1661
+ ```
1662
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1663
+ ```
1664
+ or
1665
+ ```
1666
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1667
+ ```
1668
+
1669
+ You can also specify just the project or project branch (other values will be
1670
+ inferred from the current project or project branch):
1671
+ ```
1672
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1673
+ ```
1674
+
1675
+ Note that `branch` is typically one of:
1676
+ - `prod`
1677
+ - `user.bob`
1678
+ - `test.my_experiment`
1679
+ - `prod.staging`
1680
+
1681
+
1682
+ Parameters
1683
+ ----------
1684
+ flow : Union[str, Dict[str, str]], optional, default None
1685
+ Upstream flow dependency for this flow.
1686
+ flows : List[Union[str, Dict[str, str]]], default []
1687
+ Upstream flow dependencies for this flow.
1688
+ options : Dict[str, Any], default {}
1689
+ Backend-specific configuration for tuning eventing behavior.
1690
+ """
1691
+ ...
1692
+
1693
+ @typing.overload
1694
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1695
+ ...
1696
+
1697
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1698
+ """
1699
+ Specifies the flow(s) that this flow depends on.
1700
+
1701
+ ```
1702
+ @trigger_on_finish(flow='FooFlow')
1703
+ ```
1704
+ or
1705
+ ```
1706
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1707
+ ```
1708
+ This decorator respects the @project decorator and triggers the flow
1709
+ when upstream runs within the same namespace complete successfully
1710
+
1711
+ Additionally, you can specify project aware upstream flow dependencies
1712
+ by specifying the fully qualified project_flow_name.
1713
+ ```
1714
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1715
+ ```
1716
+ or
1717
+ ```
1718
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1719
+ ```
1720
+
1721
+ You can also specify just the project or project branch (other values will be
1722
+ inferred from the current project or project branch):
1723
+ ```
1724
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1725
+ ```
1726
+
1727
+ Note that `branch` is typically one of:
1728
+ - `prod`
1729
+ - `user.bob`
1730
+ - `test.my_experiment`
1731
+ - `prod.staging`
1732
+
1733
+
1734
+ Parameters
1735
+ ----------
1736
+ flow : Union[str, Dict[str, str]], optional, default None
1737
+ Upstream flow dependency for this flow.
1738
+ flows : List[Union[str, Dict[str, str]]], default []
1739
+ Upstream flow dependencies for this flow.
1740
+ options : Dict[str, Any], default {}
1741
+ Backend-specific configuration for tuning eventing behavior.
1742
+ """
1743
+ ...
1744
+
1745
+ @typing.overload
1746
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1747
+ """
1748
+ Specifies the PyPI packages for all steps of the flow.
1749
+
1750
+ Use `@pypi_base` to set common packages required by all
1751
+ steps and use `@pypi` to specify step-specific overrides.
1752
+
1753
+ Parameters
1754
+ ----------
1755
+ packages : Dict[str, str], default: {}
1756
+ Packages to use for this flow. The key is the name of the package
1757
+ and the value is the version to use.
1758
+ python : str, optional, default: None
1759
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1760
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1761
+ """
1762
+ ...
1763
+
1764
+ @typing.overload
1765
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1766
+ ...
1767
+
1768
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1769
+ """
1770
+ Specifies the PyPI packages for all steps of the flow.
1771
+
1772
+ Use `@pypi_base` to set common packages required by all
1773
+ steps and use `@pypi` to specify step-specific overrides.
1774
+
1775
+ Parameters
1776
+ ----------
1777
+ packages : Dict[str, str], default: {}
1778
+ Packages to use for this flow. The key is the name of the package
1779
+ and the value is the version to use.
1780
+ python : str, optional, default: None
1781
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1782
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1783
+ """
1784
+ ...
1785
+
1786
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1787
+ """
1788
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1789
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1790
+
1791
+
1792
+ Parameters
1793
+ ----------
1794
+ timeout : int
1795
+ Time, in seconds before the task times out and fails. (Default: 3600)
1796
+ poke_interval : int
1797
+ Time in seconds that the job should wait in between each try. (Default: 60)
1798
+ mode : str
1799
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1800
+ exponential_backoff : bool
1801
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1802
+ pool : str
1803
+ the slot pool this task should run in,
1804
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1805
+ soft_fail : bool
1806
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1807
+ name : str
1808
+ Name of the sensor on Airflow
1809
+ description : str
1810
+ Description of sensor in the Airflow UI
1811
+ external_dag_id : str
1812
+ The dag_id that contains the task you want to wait for.
1813
+ external_task_ids : List[str]
1814
+ The list of task_ids that you want to wait for.
1815
+ If None (default value) the sensor waits for the DAG. (Default: None)
1816
+ allowed_states : List[str]
1817
+ Iterable of allowed states, (Default: ['success'])
1818
+ failed_states : List[str]
1819
+ Iterable of failed or dis-allowed states. (Default: None)
1820
+ execution_delta : datetime.timedelta
1821
+ time difference with the previous execution to look at,
1822
+ the default is the same logical date as the current task or DAG. (Default: None)
1823
+ check_existence: bool
1824
+ Set to True to check if the external task exists or check if
1825
+ the DAG to wait for exists. (Default: True)
1826
+ """
1827
+ ...
1828
+
1829
1829
  pkg_name: str
1830
1830