ob-metaflow-stubs 6.0.10.17__py2.py3-none-any.whl → 6.0.10.18__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-stubs might be problematic. Click here for more details.

Files changed (266) hide show
  1. metaflow-stubs/__init__.pyi +1050 -1050
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +75 -75
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +3 -3
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +5 -5
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +5 -5
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +4 -4
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +3 -3
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/cards/__init__.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/cards/hf_hub_card.pyi +4 -4
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +3 -3
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  64. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  65. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +3 -3
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +3 -3
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +4 -4
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +3 -3
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +3 -3
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +3 -3
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +3 -3
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +4 -4
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +5 -5
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +3 -3
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +3 -3
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +3 -3
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  110. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  111. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  112. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  113. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  114. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +3 -3
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  116. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  117. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
  118. metaflow-stubs/multicore_utils.pyi +2 -2
  119. metaflow-stubs/ob_internal.pyi +2 -2
  120. metaflow-stubs/packaging_sys/__init__.pyi +6 -6
  121. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  122. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  123. metaflow-stubs/packaging_sys/tar_backend.pyi +6 -6
  124. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  125. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  126. metaflow-stubs/parameters.pyi +3 -3
  127. metaflow-stubs/plugins/__init__.pyi +12 -12
  128. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  129. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  130. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  131. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  132. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  133. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  134. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  135. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  136. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  137. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  138. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  139. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  140. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  141. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  142. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  143. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  144. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  145. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  146. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  147. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  148. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  149. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  150. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  151. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  152. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  153. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  154. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  155. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  157. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  158. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  159. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  160. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  161. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  162. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  163. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  164. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  165. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  166. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  167. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  168. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  169. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  170. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  171. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  172. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  173. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  174. metaflow-stubs/plugins/cards/card_modules/components.pyi +17 -5
  175. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  176. metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +3 -3
  177. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  178. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  179. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  180. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  181. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  182. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  183. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  184. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  185. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  186. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  187. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  188. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  189. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  190. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  191. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  192. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  193. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  194. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  195. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  196. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  197. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  198. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  199. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  200. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  201. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  202. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  203. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  204. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  205. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  206. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  207. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  208. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  209. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  210. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  211. metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
  212. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  213. metaflow-stubs/plugins/parsers.pyi +2 -2
  214. metaflow-stubs/plugins/perimeters.pyi +2 -2
  215. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  216. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  217. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  218. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  219. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  220. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  221. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  222. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  223. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  224. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  225. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  226. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  227. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  228. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  229. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  230. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  231. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  232. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  233. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  234. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  235. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  236. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  237. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  238. metaflow-stubs/profilers/__init__.pyi +2 -2
  239. metaflow-stubs/pylint_wrapper.pyi +2 -2
  240. metaflow-stubs/runner/__init__.pyi +2 -2
  241. metaflow-stubs/runner/deployer.pyi +33 -33
  242. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  243. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  244. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  245. metaflow-stubs/runner/nbrun.pyi +2 -2
  246. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  247. metaflow-stubs/runner/utils.pyi +3 -3
  248. metaflow-stubs/system/__init__.pyi +2 -2
  249. metaflow-stubs/system/system_logger.pyi +3 -3
  250. metaflow-stubs/system/system_monitor.pyi +2 -2
  251. metaflow-stubs/tagging_util.pyi +2 -2
  252. metaflow-stubs/tuple_util.pyi +2 -2
  253. metaflow-stubs/user_configs/__init__.pyi +2 -2
  254. metaflow-stubs/user_configs/config_options.pyi +3 -3
  255. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  256. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  257. metaflow-stubs/user_decorators/common.pyi +2 -2
  258. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  259. metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
  260. metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
  261. metaflow-stubs/user_decorators/user_step_decorator.pyi +4 -4
  262. {ob_metaflow_stubs-6.0.10.17.dist-info → ob_metaflow_stubs-6.0.10.18.dist-info}/METADATA +1 -1
  263. ob_metaflow_stubs-6.0.10.18.dist-info/RECORD +266 -0
  264. ob_metaflow_stubs-6.0.10.17.dist-info/RECORD +0 -266
  265. {ob_metaflow_stubs-6.0.10.17.dist-info → ob_metaflow_stubs-6.0.10.18.dist-info}/WHEEL +0 -0
  266. {ob_metaflow_stubs-6.0.10.17.dist-info → ob_metaflow_stubs-6.0.10.18.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.11.1+obcheckpoint(0.2.8);ob(v1) #
4
- # Generated on 2025-10-13T21:06:57.979951 #
3
+ # MF version: 2.18.12.1+obcheckpoint(0.2.8);ob(v1) #
4
+ # Generated on 2025-10-20T19:13:33.388213 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -39,19 +39,19 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
+ from . import cards as cards
42
43
  from . import metaflow_git as metaflow_git
43
44
  from . import tuple_util as tuple_util
44
- from . import cards as cards
45
45
  from . import events as events
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
- from .plugins.parsers import yaml_parser as yaml_parser
52
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
54
51
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
54
+ from .plugins.parsers import yaml_parser as yaml_parser
55
55
  from . import client as client
56
56
  from .client.core import namespace as namespace
57
57
  from .client.core import get_namespace as get_namespace
@@ -169,57 +169,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
169
169
  """
170
170
  ...
171
171
 
172
- @typing.overload
173
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
174
- """
175
- Specifies the PyPI packages for the step.
176
-
177
- Information in this decorator will augment any
178
- attributes set in the `@pyi_base` flow-level decorator. Hence,
179
- you can use `@pypi_base` to set packages required by all
180
- steps and use `@pypi` to specify step-specific overrides.
181
-
182
-
183
- Parameters
184
- ----------
185
- packages : Dict[str, str], default: {}
186
- Packages to use for this step. The key is the name of the package
187
- and the value is the version to use.
188
- python : str, optional, default: None
189
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
190
- that the version used will correspond to the version of the Python interpreter used to start the run.
191
- """
192
- ...
193
-
194
- @typing.overload
195
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
196
- ...
197
-
198
- @typing.overload
199
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
200
- ...
201
-
202
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
203
- """
204
- Specifies the PyPI packages for the step.
205
-
206
- Information in this decorator will augment any
207
- attributes set in the `@pyi_base` flow-level decorator. Hence,
208
- you can use `@pypi_base` to set packages required by all
209
- steps and use `@pypi` to specify step-specific overrides.
210
-
211
-
212
- Parameters
213
- ----------
214
- packages : Dict[str, str], default: {}
215
- Packages to use for this step. The key is the name of the package
216
- and the value is the version to use.
217
- python : str, optional, default: None
218
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
219
- that the version used will correspond to the version of the Python interpreter used to start the run.
220
- """
221
- ...
222
-
223
172
  def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
224
173
  """
225
174
  `@nebius_s3_proxy` is a Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
@@ -284,6 +233,65 @@ def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode
284
233
  """
285
234
  ...
286
235
 
236
+ @typing.overload
237
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
238
+ """
239
+ Specifies the Conda environment for the step.
240
+
241
+ Information in this decorator will augment any
242
+ attributes set in the `@conda_base` flow-level decorator. Hence,
243
+ you can use `@conda_base` to set packages required by all
244
+ steps and use `@conda` to specify step-specific overrides.
245
+
246
+
247
+ Parameters
248
+ ----------
249
+ packages : Dict[str, str], default {}
250
+ Packages to use for this step. The key is the name of the package
251
+ and the value is the version to use.
252
+ libraries : Dict[str, str], default {}
253
+ Supported for backward compatibility. When used with packages, packages will take precedence.
254
+ python : str, optional, default None
255
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
256
+ that the version used will correspond to the version of the Python interpreter used to start the run.
257
+ disabled : bool, default False
258
+ If set to True, disables @conda.
259
+ """
260
+ ...
261
+
262
+ @typing.overload
263
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
264
+ ...
265
+
266
+ @typing.overload
267
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
268
+ ...
269
+
270
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
271
+ """
272
+ Specifies the Conda environment for the step.
273
+
274
+ Information in this decorator will augment any
275
+ attributes set in the `@conda_base` flow-level decorator. Hence,
276
+ you can use `@conda_base` to set packages required by all
277
+ steps and use `@conda` to specify step-specific overrides.
278
+
279
+
280
+ Parameters
281
+ ----------
282
+ packages : Dict[str, str], default {}
283
+ Packages to use for this step. The key is the name of the package
284
+ and the value is the version to use.
285
+ libraries : Dict[str, str], default {}
286
+ Supported for backward compatibility. When used with packages, packages will take precedence.
287
+ python : str, optional, default None
288
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
289
+ that the version used will correspond to the version of the Python interpreter used to start the run.
290
+ disabled : bool, default False
291
+ If set to True, disables @conda.
292
+ """
293
+ ...
294
+
287
295
  @typing.overload
288
296
  def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
289
297
  """
@@ -432,61 +440,51 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
432
440
  ...
433
441
 
434
442
  @typing.overload
435
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
443
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
436
444
  """
437
- Specifies the Conda environment for the step.
445
+ Creates a human-readable report, a Metaflow Card, after this step completes.
438
446
 
439
- Information in this decorator will augment any
440
- attributes set in the `@conda_base` flow-level decorator. Hence,
441
- you can use `@conda_base` to set packages required by all
442
- steps and use `@conda` to specify step-specific overrides.
447
+ Note that you may add multiple `@card` decorators in a step with different parameters.
443
448
 
444
449
 
445
450
  Parameters
446
451
  ----------
447
- packages : Dict[str, str], default {}
448
- Packages to use for this step. The key is the name of the package
449
- and the value is the version to use.
450
- libraries : Dict[str, str], default {}
451
- Supported for backward compatibility. When used with packages, packages will take precedence.
452
- python : str, optional, default None
453
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
454
- that the version used will correspond to the version of the Python interpreter used to start the run.
455
- disabled : bool, default False
456
- If set to True, disables @conda.
452
+ type : str, default 'default'
453
+ Card type.
454
+ id : str, optional, default None
455
+ If multiple cards are present, use this id to identify this card.
456
+ options : Dict[str, Any], default {}
457
+ Options passed to the card. The contents depend on the card type.
458
+ timeout : int, default 45
459
+ Interrupt reporting if it takes more than this many seconds.
457
460
  """
458
461
  ...
459
462
 
460
463
  @typing.overload
461
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
464
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
462
465
  ...
463
466
 
464
467
  @typing.overload
465
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
468
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
466
469
  ...
467
470
 
468
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
471
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
469
472
  """
470
- Specifies the Conda environment for the step.
473
+ Creates a human-readable report, a Metaflow Card, after this step completes.
471
474
 
472
- Information in this decorator will augment any
473
- attributes set in the `@conda_base` flow-level decorator. Hence,
474
- you can use `@conda_base` to set packages required by all
475
- steps and use `@conda` to specify step-specific overrides.
475
+ Note that you may add multiple `@card` decorators in a step with different parameters.
476
476
 
477
477
 
478
478
  Parameters
479
479
  ----------
480
- packages : Dict[str, str], default {}
481
- Packages to use for this step. The key is the name of the package
482
- and the value is the version to use.
483
- libraries : Dict[str, str], default {}
484
- Supported for backward compatibility. When used with packages, packages will take precedence.
485
- python : str, optional, default None
486
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
487
- that the version used will correspond to the version of the Python interpreter used to start the run.
488
- disabled : bool, default False
489
- If set to True, disables @conda.
480
+ type : str, default 'default'
481
+ Card type.
482
+ id : str, optional, default None
483
+ If multiple cards are present, use this id to identify this card.
484
+ options : Dict[str, Any], default {}
485
+ Options passed to the card. The contents depend on the card type.
486
+ timeout : int, default 45
487
+ Interrupt reporting if it takes more than this many seconds.
490
488
  """
491
489
  ...
492
490
 
@@ -508,133 +506,72 @@ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepF
508
506
  ...
509
507
 
510
508
  @typing.overload
511
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
509
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
512
510
  """
513
- Specifies secrets to be retrieved and injected as environment variables prior to
514
- the execution of a step.
515
-
516
-
517
- Parameters
518
- ----------
519
- sources : List[Union[str, Dict[str, Any]]], default: []
520
- List of secret specs, defining how the secrets are to be retrieved
521
- role : str, optional, default: None
522
- Role to use for fetching secrets
511
+ Decorator prototype for all step decorators. This function gets specialized
512
+ and imported for all decorators types by _import_plugin_decorators().
523
513
  """
524
514
  ...
525
515
 
526
516
  @typing.overload
527
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
528
- ...
529
-
530
- @typing.overload
531
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
517
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
532
518
  ...
533
519
 
534
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
520
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
535
521
  """
536
- Specifies secrets to be retrieved and injected as environment variables prior to
537
- the execution of a step.
538
-
539
-
540
- Parameters
541
- ----------
542
- sources : List[Union[str, Dict[str, Any]]], default: []
543
- List of secret specs, defining how the secrets are to be retrieved
544
- role : str, optional, default: None
545
- Role to use for fetching secrets
522
+ Decorator prototype for all step decorators. This function gets specialized
523
+ and imported for all decorators types by _import_plugin_decorators().
546
524
  """
547
525
  ...
548
526
 
549
527
  @typing.overload
550
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
528
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
551
529
  """
552
- Specifies a timeout for your step.
553
-
554
- This decorator is useful if this step may hang indefinitely.
555
-
556
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
557
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
558
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
530
+ Specifies the PyPI packages for the step.
559
531
 
560
- Note that all the values specified in parameters are added together so if you specify
561
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
532
+ Information in this decorator will augment any
533
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
534
+ you can use `@pypi_base` to set packages required by all
535
+ steps and use `@pypi` to specify step-specific overrides.
562
536
 
563
537
 
564
538
  Parameters
565
539
  ----------
566
- seconds : int, default 0
567
- Number of seconds to wait prior to timing out.
568
- minutes : int, default 0
569
- Number of minutes to wait prior to timing out.
570
- hours : int, default 0
571
- Number of hours to wait prior to timing out.
540
+ packages : Dict[str, str], default: {}
541
+ Packages to use for this step. The key is the name of the package
542
+ and the value is the version to use.
543
+ python : str, optional, default: None
544
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
545
+ that the version used will correspond to the version of the Python interpreter used to start the run.
572
546
  """
573
547
  ...
574
548
 
575
549
  @typing.overload
576
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
550
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
577
551
  ...
578
552
 
579
553
  @typing.overload
580
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
554
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
581
555
  ...
582
556
 
583
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
557
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
584
558
  """
585
- Specifies a timeout for your step.
559
+ Specifies the PyPI packages for the step.
586
560
 
587
- This decorator is useful if this step may hang indefinitely.
588
-
589
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
590
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
591
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
592
-
593
- Note that all the values specified in parameters are added together so if you specify
594
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
595
-
596
-
597
- Parameters
598
- ----------
599
- seconds : int, default 0
600
- Number of seconds to wait prior to timing out.
601
- minutes : int, default 0
602
- Number of minutes to wait prior to timing out.
603
- hours : int, default 0
604
- Number of hours to wait prior to timing out.
605
- """
606
- ...
607
-
608
- @typing.overload
609
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
610
- """
611
- Specifies environment variables to be set prior to the execution of a step.
612
-
613
-
614
- Parameters
615
- ----------
616
- vars : Dict[str, str], default {}
617
- Dictionary of environment variables to set.
618
- """
619
- ...
620
-
621
- @typing.overload
622
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
623
- ...
624
-
625
- @typing.overload
626
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
627
- ...
628
-
629
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
630
- """
631
- Specifies environment variables to be set prior to the execution of a step.
561
+ Information in this decorator will augment any
562
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
563
+ you can use `@pypi_base` to set packages required by all
564
+ steps and use `@pypi` to specify step-specific overrides.
632
565
 
633
566
 
634
567
  Parameters
635
568
  ----------
636
- vars : Dict[str, str], default {}
637
- Dictionary of environment variables to set.
569
+ packages : Dict[str, str], default: {}
570
+ Packages to use for this step. The key is the name of the package
571
+ and the value is the version to use.
572
+ python : str, optional, default: None
573
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
574
+ that the version used will correspond to the version of the Python interpreter used to start the run.
638
575
  """
639
576
  ...
640
577
 
@@ -702,19 +639,73 @@ def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_m
702
639
  """
703
640
  ...
704
641
 
705
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
642
+ @typing.overload
643
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
706
644
  """
707
- Specifies that this step should execute on DGX cloud.
645
+ A simple decorator that demonstrates using CardDecoratorInjector
646
+ to inject a card and render simple markdown content.
647
+ """
648
+ ...
649
+
650
+ @typing.overload
651
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
652
+ ...
653
+
654
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
655
+ """
656
+ A simple decorator that demonstrates using CardDecoratorInjector
657
+ to inject a card and render simple markdown content.
658
+ """
659
+ ...
660
+
661
+ @typing.overload
662
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
663
+ """
664
+ Specifies that the step will success under all circumstances.
665
+
666
+ The decorator will create an optional artifact, specified by `var`, which
667
+ contains the exception raised. You can use it to detect the presence
668
+ of errors, indicating that all happy-path artifacts produced by the step
669
+ are missing.
708
670
 
709
671
 
710
672
  Parameters
711
673
  ----------
712
- gpu : int
713
- Number of GPUs to use.
714
- gpu_type : str
715
- Type of Nvidia GPU to use.
716
- queue_timeout : int
717
- Time to keep the job in NVCF's queue.
674
+ var : str, optional, default None
675
+ Name of the artifact in which to store the caught exception.
676
+ If not specified, the exception is not stored.
677
+ print_exception : bool, default True
678
+ Determines whether or not the exception is printed to
679
+ stdout when caught.
680
+ """
681
+ ...
682
+
683
+ @typing.overload
684
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
685
+ ...
686
+
687
+ @typing.overload
688
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
689
+ ...
690
+
691
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
692
+ """
693
+ Specifies that the step will success under all circumstances.
694
+
695
+ The decorator will create an optional artifact, specified by `var`, which
696
+ contains the exception raised. You can use it to detect the presence
697
+ of errors, indicating that all happy-path artifacts produced by the step
698
+ are missing.
699
+
700
+
701
+ Parameters
702
+ ----------
703
+ var : str, optional, default None
704
+ Name of the artifact in which to store the caught exception.
705
+ If not specified, the exception is not stored.
706
+ print_exception : bool, default True
707
+ Determines whether or not the exception is printed to
708
+ stdout when caught.
718
709
  """
719
710
  ...
720
711
 
@@ -773,275 +764,132 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
773
764
  """
774
765
  ...
775
766
 
776
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
767
+ @typing.overload
768
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
777
769
  """
778
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
770
+ Enables loading / saving of models within a step.
779
771
 
780
- User code call
781
- --------------
782
- @ollama(
783
- models=[...],
784
- ...
785
- )
772
+ > Examples
773
+ - Saving Models
774
+ ```python
775
+ @model
776
+ @step
777
+ def train(self):
778
+ # current.model.save returns a dictionary reference to the model saved
779
+ self.my_model = current.model.save(
780
+ path_to_my_model,
781
+ label="my_model",
782
+ metadata={
783
+ "epochs": 10,
784
+ "batch-size": 32,
785
+ "learning-rate": 0.001,
786
+ }
787
+ )
788
+ self.next(self.test)
786
789
 
787
- Valid backend options
788
- ---------------------
789
- - 'local': Run as a separate process on the local task machine.
790
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
791
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
790
+ @model(load="my_model")
791
+ @step
792
+ def test(self):
793
+ # `current.model.loaded` returns a dictionary of the loaded models
794
+ # where the key is the name of the artifact and the value is the path to the model
795
+ print(os.listdir(current.model.loaded["my_model"]))
796
+ self.next(self.end)
797
+ ```
792
798
 
793
- Valid model options
794
- -------------------
795
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
799
+ - Loading models
800
+ ```python
801
+ @step
802
+ def train(self):
803
+ # current.model.load returns the path to the model loaded
804
+ checkpoint_path = current.model.load(
805
+ self.checkpoint_key,
806
+ )
807
+ model_path = current.model.load(
808
+ self.model,
809
+ )
810
+ self.next(self.test)
811
+ ```
796
812
 
797
813
 
798
814
  Parameters
799
815
  ----------
800
- models: list[str]
801
- List of Ollama containers running models in sidecars.
802
- backend: str
803
- Determines where and how to run the Ollama process.
804
- force_pull: bool
805
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
806
- cache_update_policy: str
807
- Cache update policy: "auto", "force", or "never".
808
- force_cache_update: bool
809
- Simple override for "force" cache update policy.
810
- debug: bool
811
- Whether to turn on verbose debugging logs.
812
- circuit_breaker_config: dict
813
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
814
- timeout_config: dict
815
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
816
- """
817
- ...
818
-
819
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
820
- """
821
- Specifies that this step should execute on DGX cloud.
822
-
816
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
817
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
818
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
819
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
820
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
821
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
823
822
 
824
- Parameters
825
- ----------
826
- gpu : int
827
- Number of GPUs to use.
828
- gpu_type : str
829
- Type of Nvidia GPU to use.
823
+ temp_dir_root : str, default: None
824
+ The root directory under which `current.model.loaded` will store loaded models
830
825
  """
831
826
  ...
832
827
 
833
828
  @typing.overload
834
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
835
- """
836
- Decorator prototype for all step decorators. This function gets specialized
837
- and imported for all decorators types by _import_plugin_decorators().
838
- """
829
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
839
830
  ...
840
831
 
841
832
  @typing.overload
842
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
843
- ...
844
-
845
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
846
- """
847
- Decorator prototype for all step decorators. This function gets specialized
848
- and imported for all decorators types by _import_plugin_decorators().
849
- """
833
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
850
834
  ...
851
835
 
852
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
836
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
853
837
  """
854
- Decorator that helps cache, version, and store models/datasets from the Hugging Face Hub.
855
-
856
- Examples
857
- --------
838
+ Enables loading / saving of models within a step.
858
839
 
840
+ > Examples
841
+ - Saving Models
859
842
  ```python
860
- # **Usage: creating references to models from the Hugging Face Hub that may be loaded in downstream steps**
861
- @huggingface_hub
843
+ @model
862
844
  @step
863
- def pull_model_from_huggingface(self):
864
- # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
865
- # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
866
- # with the same `repo_id` in the backend storage, it will not download the model again. The return
867
- # value of the function is a reference to the model in the backend storage.
868
- # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
869
-
870
- self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
871
- self.llama_model = current.huggingface_hub.snapshot_download(
872
- repo_id=self.model_id,
873
- allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
874
- )
875
- self.next(self.train)
876
-
877
- # **Usage: explicitly loading models at runtime from the Hugging Face Hub or from cache (from Metaflow's datastore)**
878
- @huggingface_hub
879
- @step
880
- def run_training(self):
881
- # Temporary directory (auto-cleaned on exit)
882
- with current.huggingface_hub.load(
883
- repo_id="google-bert/bert-base-uncased",
884
- allow_patterns=["*.bin"],
885
- ) as local_path:
886
- # Use files under local_path
887
- train_model(local_path)
888
- ...
889
-
890
- # **Usage: loading models directly from the Hugging Face Hub or from cache (from Metaflow's datastore)**
891
-
892
- @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
893
- @step
894
- def pull_model_from_huggingface(self):
895
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
896
-
897
- @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora", "/my-lora-directory")])
898
- @step
899
- def finetune_model(self):
900
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
901
- # path_to_model will be /my-directory
902
-
845
+ def train(self):
846
+ # current.model.save returns a dictionary reference to the model saved
847
+ self.my_model = current.model.save(
848
+ path_to_my_model,
849
+ label="my_model",
850
+ metadata={
851
+ "epochs": 10,
852
+ "batch-size": 32,
853
+ "learning-rate": 0.001,
854
+ }
855
+ )
856
+ self.next(self.test)
903
857
 
904
- # Takes all the arguments passed to `snapshot_download`
905
- # except for `local_dir`
906
- @huggingface_hub(load=[
907
- {
908
- "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
909
- },
910
- {
911
- "repo_id": "myorg/mistral-lora",
912
- "repo_type": "model",
913
- },
914
- ])
858
+ @model(load="my_model")
915
859
  @step
916
- def finetune_model(self):
917
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
918
- # path_to_model will be /my-directory
860
+ def test(self):
861
+ # `current.model.loaded` returns a dictionary of the loaded models
862
+ # where the key is the name of the artifact and the value is the path to the model
863
+ print(os.listdir(current.model.loaded["my_model"]))
864
+ self.next(self.end)
919
865
  ```
920
866
 
921
-
922
- Parameters
923
- ----------
924
- temp_dir_root : str, optional
925
- The root directory that will hold the temporary directory where objects will be downloaded.
926
-
927
- cache_scope : str, optional
928
- The scope of the cache. Can be `checkpoint` / `flow` / `global`.
929
- - `checkpoint` (default): All repos are stored like objects saved by `@checkpoint`.
930
- i.e., the cached path is derived from the namespace, flow, step, and Metaflow foreach iteration.
931
- Any repo downloaded under this scope will only be retrieved from the cache when the step runs under the same namespace in the same flow (at the same foreach index).
932
-
933
- - `flow`: All repos are cached under the flow, regardless of namespace.
934
- i.e., the cached path is derived solely from the flow name.
935
- When to use this mode: (1) Multiple users are executing the same flow and want shared access to the repos cached by the decorator. (2) Multiple versions of a flow are deployed, all needing access to the same repos cached by the decorator.
936
-
937
- - `global`: All repos are cached under a globally static path.
938
- i.e., the base path of the cache is static and all repos are stored under it.
939
- When to use this mode:
940
- - All repos from the Hugging Face Hub need to be shared by users across all flow executions.
941
- - Each caching scope comes with its own trade-offs:
942
- - `checkpoint`:
943
- - Has explicit control over when caches are populated (controlled by the same flow that has the `@huggingface_hub` decorator) but ends up hitting the Hugging Face Hub more often if there are many users/namespaces/steps.
944
- - Since objects are written on a `namespace/flow/step` basis, the blast radius of a bad checkpoint is limited to a particular flow in a namespace.
945
- - `flow`:
946
- - Has less control over when caches are populated (can be written by any execution instance of a flow from any namespace) but results in more cache hits.
947
- - The blast radius of a bad checkpoint is limited to all runs of a particular flow.
948
- - It doesn't promote cache reuse across flows.
949
- - `global`:
950
- - Has no control over when caches are populated (can be written by any flow execution) but has the highest cache hit rate.
951
- - It promotes cache reuse across flows.
952
- - The blast radius of a bad checkpoint spans every flow that could be using a particular repo.
953
-
954
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
955
- The list of repos (models/datasets) to load.
956
-
957
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
958
-
959
- - If repo (model/dataset) is not found in the datastore:
960
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
961
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
962
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
963
-
964
- - If repo is found in the datastore:
965
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
966
- """
967
- ...
968
-
969
- @typing.overload
970
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
971
- """
972
- Specifies the resources needed when executing this step.
973
-
974
- Use `@resources` to specify the resource requirements
975
- independently of the specific compute layer (`@batch`, `@kubernetes`).
976
-
977
- You can choose the compute layer on the command line by executing e.g.
978
- ```
979
- python myflow.py run --with batch
980
- ```
981
- or
982
- ```
983
- python myflow.py run --with kubernetes
867
+ - Loading models
868
+ ```python
869
+ @step
870
+ def train(self):
871
+ # current.model.load returns the path to the model loaded
872
+ checkpoint_path = current.model.load(
873
+ self.checkpoint_key,
874
+ )
875
+ model_path = current.model.load(
876
+ self.model,
877
+ )
878
+ self.next(self.test)
984
879
  ```
985
- which executes the flow on the desired system using the
986
- requirements specified in `@resources`.
987
880
 
988
881
 
989
882
  Parameters
990
883
  ----------
991
- cpu : int, default 1
992
- Number of CPUs required for this step.
993
- gpu : int, optional, default None
994
- Number of GPUs required for this step.
995
- disk : int, optional, default None
996
- Disk size (in MB) required for this step. Only applies on Kubernetes.
997
- memory : int, default 4096
998
- Memory size (in MB) required for this step.
999
- shared_memory : int, optional, default None
1000
- The value for the size (in MiB) of the /dev/shm volume for this step.
1001
- This parameter maps to the `--shm-size` option in Docker.
1002
- """
1003
- ...
1004
-
1005
- @typing.overload
1006
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1007
- ...
1008
-
1009
- @typing.overload
1010
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1011
- ...
1012
-
1013
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1014
- """
1015
- Specifies the resources needed when executing this step.
1016
-
1017
- Use `@resources` to specify the resource requirements
1018
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1019
-
1020
- You can choose the compute layer on the command line by executing e.g.
1021
- ```
1022
- python myflow.py run --with batch
1023
- ```
1024
- or
1025
- ```
1026
- python myflow.py run --with kubernetes
1027
- ```
1028
- which executes the flow on the desired system using the
1029
- requirements specified in `@resources`.
1030
-
884
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
885
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
886
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
887
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
888
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
889
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1031
890
 
1032
- Parameters
1033
- ----------
1034
- cpu : int, default 1
1035
- Number of CPUs required for this step.
1036
- gpu : int, optional, default None
1037
- Number of GPUs required for this step.
1038
- disk : int, optional, default None
1039
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1040
- memory : int, default 4096
1041
- Memory size (in MB) required for this step.
1042
- shared_memory : int, optional, default None
1043
- The value for the size (in MiB) of the /dev/shm volume for this step.
1044
- This parameter maps to the `--shm-size` option in Docker.
891
+ temp_dir_root : str, default: None
892
+ The root directory under which `current.model.loaded` will store loaded models
1045
893
  """
1046
894
  ...
1047
895
 
@@ -1134,320 +982,187 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1134
982
  """
1135
983
  ...
1136
984
 
1137
- @typing.overload
1138
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
985
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1139
986
  """
1140
- Specifies that the step will success under all circumstances.
987
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
1141
988
 
1142
- The decorator will create an optional artifact, specified by `var`, which
1143
- contains the exception raised. You can use it to detect the presence
1144
- of errors, indicating that all happy-path artifacts produced by the step
1145
- are missing.
989
+ User code call
990
+ --------------
991
+ @ollama(
992
+ models=[...],
993
+ ...
994
+ )
995
+
996
+ Valid backend options
997
+ ---------------------
998
+ - 'local': Run as a separate process on the local task machine.
999
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
1000
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
1001
+
1002
+ Valid model options
1003
+ -------------------
1004
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
1146
1005
 
1147
1006
 
1148
1007
  Parameters
1149
1008
  ----------
1150
- var : str, optional, default None
1151
- Name of the artifact in which to store the caught exception.
1152
- If not specified, the exception is not stored.
1153
- print_exception : bool, default True
1154
- Determines whether or not the exception is printed to
1155
- stdout when caught.
1009
+ models: list[str]
1010
+ List of Ollama containers running models in sidecars.
1011
+ backend: str
1012
+ Determines where and how to run the Ollama process.
1013
+ force_pull: bool
1014
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1015
+ cache_update_policy: str
1016
+ Cache update policy: "auto", "force", or "never".
1017
+ force_cache_update: bool
1018
+ Simple override for "force" cache update policy.
1019
+ debug: bool
1020
+ Whether to turn on verbose debugging logs.
1021
+ circuit_breaker_config: dict
1022
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1023
+ timeout_config: dict
1024
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1156
1025
  """
1157
1026
  ...
1158
1027
 
1159
1028
  @typing.overload
1160
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1029
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1030
+ """
1031
+ Decorator prototype for all step decorators. This function gets specialized
1032
+ and imported for all decorators types by _import_plugin_decorators().
1033
+ """
1161
1034
  ...
1162
1035
 
1163
1036
  @typing.overload
1164
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1037
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1165
1038
  ...
1166
1039
 
1167
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1040
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1168
1041
  """
1169
- Specifies that the step will success under all circumstances.
1170
-
1171
- The decorator will create an optional artifact, specified by `var`, which
1172
- contains the exception raised. You can use it to detect the presence
1173
- of errors, indicating that all happy-path artifacts produced by the step
1174
- are missing.
1175
-
1176
-
1177
- Parameters
1178
- ----------
1179
- var : str, optional, default None
1180
- Name of the artifact in which to store the caught exception.
1181
- If not specified, the exception is not stored.
1182
- print_exception : bool, default True
1183
- Determines whether or not the exception is printed to
1184
- stdout when caught.
1042
+ Decorator prototype for all step decorators. This function gets specialized
1043
+ and imported for all decorators types by _import_plugin_decorators().
1185
1044
  """
1186
1045
  ...
1187
1046
 
1188
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1047
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1189
1048
  """
1190
- This decorator is used to run vllm APIs as Metaflow task sidecars.
1191
-
1192
- User code call
1193
- --------------
1194
- @vllm(
1195
- model="...",
1196
- ...
1197
- )
1198
-
1199
- Valid backend options
1200
- ---------------------
1201
- - 'local': Run as a separate process on the local task machine.
1202
-
1203
- Valid model options
1204
- -------------------
1205
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1206
-
1207
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1208
- If you need multiple models, you must create multiple @vllm decorators.
1049
+ Specifies that this step should execute on DGX cloud.
1209
1050
 
1210
1051
 
1211
1052
  Parameters
1212
1053
  ----------
1213
- model: str
1214
- HuggingFace model identifier to be served by vLLM.
1215
- backend: str
1216
- Determines where and how to run the vLLM process.
1217
- openai_api_server: bool
1218
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1219
- Default is False (uses native engine).
1220
- Set to True for backward compatibility with existing code.
1221
- debug: bool
1222
- Whether to turn on verbose debugging logs.
1223
- card_refresh_interval: int
1224
- Interval in seconds for refreshing the vLLM status card.
1225
- Only used when openai_api_server=True.
1226
- max_retries: int
1227
- Maximum number of retries checking for vLLM server startup.
1228
- Only used when openai_api_server=True.
1229
- retry_alert_frequency: int
1230
- Frequency of alert logs for vLLM server startup retries.
1231
- Only used when openai_api_server=True.
1232
- engine_args : dict
1233
- Additional keyword arguments to pass to the vLLM engine.
1234
- For example, `tensor_parallel_size=2`.
1054
+ gpu : int
1055
+ Number of GPUs to use.
1056
+ gpu_type : str
1057
+ Type of Nvidia GPU to use.
1058
+ queue_timeout : int
1059
+ Time to keep the job in NVCF's queue.
1235
1060
  """
1236
1061
  ...
1237
1062
 
1238
- @typing.overload
1239
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1063
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1240
1064
  """
1241
- Enables loading / saving of models within a step.
1242
-
1243
- > Examples
1244
- - Saving Models
1245
- ```python
1246
- @model
1247
- @step
1248
- def train(self):
1249
- # current.model.save returns a dictionary reference to the model saved
1250
- self.my_model = current.model.save(
1251
- path_to_my_model,
1252
- label="my_model",
1253
- metadata={
1254
- "epochs": 10,
1255
- "batch-size": 32,
1256
- "learning-rate": 0.001,
1257
- }
1258
- )
1259
- self.next(self.test)
1260
-
1261
- @model(load="my_model")
1262
- @step
1263
- def test(self):
1264
- # `current.model.loaded` returns a dictionary of the loaded models
1265
- # where the key is the name of the artifact and the value is the path to the model
1266
- print(os.listdir(current.model.loaded["my_model"]))
1267
- self.next(self.end)
1268
- ```
1269
-
1270
- - Loading models
1271
- ```python
1272
- @step
1273
- def train(self):
1274
- # current.model.load returns the path to the model loaded
1275
- checkpoint_path = current.model.load(
1276
- self.checkpoint_key,
1277
- )
1278
- model_path = current.model.load(
1279
- self.model,
1280
- )
1281
- self.next(self.test)
1282
- ```
1065
+ Specifies that this step should execute on DGX cloud.
1283
1066
 
1284
1067
 
1285
1068
  Parameters
1286
1069
  ----------
1287
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1288
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1289
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1290
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1291
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1292
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1293
-
1294
- temp_dir_root : str, default: None
1295
- The root directory under which `current.model.loaded` will store loaded models
1070
+ gpu : int
1071
+ Number of GPUs to use.
1072
+ gpu_type : str
1073
+ Type of Nvidia GPU to use.
1296
1074
  """
1297
1075
  ...
1298
1076
 
1299
1077
  @typing.overload
1300
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1301
- ...
1302
-
1303
- @typing.overload
1304
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1305
- ...
1306
-
1307
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1078
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1308
1079
  """
1309
- Enables loading / saving of models within a step.
1310
-
1311
- > Examples
1312
- - Saving Models
1313
- ```python
1314
- @model
1315
- @step
1316
- def train(self):
1317
- # current.model.save returns a dictionary reference to the model saved
1318
- self.my_model = current.model.save(
1319
- path_to_my_model,
1320
- label="my_model",
1321
- metadata={
1322
- "epochs": 10,
1323
- "batch-size": 32,
1324
- "learning-rate": 0.001,
1325
- }
1326
- )
1327
- self.next(self.test)
1328
-
1329
- @model(load="my_model")
1330
- @step
1331
- def test(self):
1332
- # `current.model.loaded` returns a dictionary of the loaded models
1333
- # where the key is the name of the artifact and the value is the path to the model
1334
- print(os.listdir(current.model.loaded["my_model"]))
1335
- self.next(self.end)
1336
- ```
1337
-
1338
- - Loading models
1339
- ```python
1340
- @step
1341
- def train(self):
1342
- # current.model.load returns the path to the model loaded
1343
- checkpoint_path = current.model.load(
1344
- self.checkpoint_key,
1345
- )
1346
- model_path = current.model.load(
1347
- self.model,
1348
- )
1349
- self.next(self.test)
1350
- ```
1080
+ Specifies environment variables to be set prior to the execution of a step.
1351
1081
 
1352
1082
 
1353
1083
  Parameters
1354
1084
  ----------
1355
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1356
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1357
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1358
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1359
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1360
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1361
-
1362
- temp_dir_root : str, default: None
1363
- The root directory under which `current.model.loaded` will store loaded models
1085
+ vars : Dict[str, str], default {}
1086
+ Dictionary of environment variables to set.
1364
1087
  """
1365
1088
  ...
1366
1089
 
1367
1090
  @typing.overload
1368
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1369
- """
1370
- Decorator prototype for all step decorators. This function gets specialized
1371
- and imported for all decorators types by _import_plugin_decorators().
1372
- """
1091
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1373
1092
  ...
1374
1093
 
1375
1094
  @typing.overload
1376
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1095
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1377
1096
  ...
1378
1097
 
1379
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1098
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1380
1099
  """
1381
- Decorator prototype for all step decorators. This function gets specialized
1382
- and imported for all decorators types by _import_plugin_decorators().
1100
+ Specifies environment variables to be set prior to the execution of a step.
1101
+
1102
+
1103
+ Parameters
1104
+ ----------
1105
+ vars : Dict[str, str], default {}
1106
+ Dictionary of environment variables to set.
1383
1107
  """
1384
1108
  ...
1385
1109
 
1386
1110
  @typing.overload
1387
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1111
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1388
1112
  """
1389
- Creates a human-readable report, a Metaflow Card, after this step completes.
1113
+ Specifies a timeout for your step.
1390
1114
 
1391
- Note that you may add multiple `@card` decorators in a step with different parameters.
1115
+ This decorator is useful if this step may hang indefinitely.
1116
+
1117
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1118
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1119
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1120
+
1121
+ Note that all the values specified in parameters are added together so if you specify
1122
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1392
1123
 
1393
1124
 
1394
1125
  Parameters
1395
1126
  ----------
1396
- type : str, default 'default'
1397
- Card type.
1398
- id : str, optional, default None
1399
- If multiple cards are present, use this id to identify this card.
1400
- options : Dict[str, Any], default {}
1401
- Options passed to the card. The contents depend on the card type.
1402
- timeout : int, default 45
1403
- Interrupt reporting if it takes more than this many seconds.
1127
+ seconds : int, default 0
1128
+ Number of seconds to wait prior to timing out.
1129
+ minutes : int, default 0
1130
+ Number of minutes to wait prior to timing out.
1131
+ hours : int, default 0
1132
+ Number of hours to wait prior to timing out.
1404
1133
  """
1405
1134
  ...
1406
1135
 
1407
1136
  @typing.overload
1408
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1137
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1409
1138
  ...
1410
1139
 
1411
1140
  @typing.overload
1412
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1141
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1413
1142
  ...
1414
1143
 
1415
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1144
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1416
1145
  """
1417
- Creates a human-readable report, a Metaflow Card, after this step completes.
1146
+ Specifies a timeout for your step.
1418
1147
 
1419
- Note that you may add multiple `@card` decorators in a step with different parameters.
1148
+ This decorator is useful if this step may hang indefinitely.
1149
+
1150
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1151
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1152
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1153
+
1154
+ Note that all the values specified in parameters are added together so if you specify
1155
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1420
1156
 
1421
1157
 
1422
1158
  Parameters
1423
1159
  ----------
1424
- type : str, default 'default'
1425
- Card type.
1426
- id : str, optional, default None
1427
- If multiple cards are present, use this id to identify this card.
1428
- options : Dict[str, Any], default {}
1429
- Options passed to the card. The contents depend on the card type.
1430
- timeout : int, default 45
1431
- Interrupt reporting if it takes more than this many seconds.
1432
- """
1433
- ...
1434
-
1435
- @typing.overload
1436
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1437
- """
1438
- A simple decorator that demonstrates using CardDecoratorInjector
1439
- to inject a card and render simple markdown content.
1440
- """
1441
- ...
1442
-
1443
- @typing.overload
1444
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1445
- ...
1446
-
1447
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1448
- """
1449
- A simple decorator that demonstrates using CardDecoratorInjector
1450
- to inject a card and render simple markdown content.
1160
+ seconds : int, default 0
1161
+ Number of seconds to wait prior to timing out.
1162
+ minutes : int, default 0
1163
+ Number of minutes to wait prior to timing out.
1164
+ hours : int, default 0
1165
+ Number of hours to wait prior to timing out.
1451
1166
  """
1452
1167
  ...
1453
1168
 
@@ -1511,217 +1226,551 @@ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typin
1511
1226
  ...
1512
1227
 
1513
1228
  @typing.overload
1514
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1229
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1515
1230
  """
1516
- Specifies the flow(s) that this flow depends on.
1231
+ Specifies secrets to be retrieved and injected as environment variables prior to
1232
+ the execution of a step.
1517
1233
 
1518
- ```
1519
- @trigger_on_finish(flow='FooFlow')
1520
- ```
1521
- or
1522
- ```
1523
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1524
- ```
1525
- This decorator respects the @project decorator and triggers the flow
1526
- when upstream runs within the same namespace complete successfully
1527
1234
 
1528
- Additionally, you can specify project aware upstream flow dependencies
1529
- by specifying the fully qualified project_flow_name.
1235
+ Parameters
1236
+ ----------
1237
+ sources : List[Union[str, Dict[str, Any]]], default: []
1238
+ List of secret specs, defining how the secrets are to be retrieved
1239
+ role : str, optional, default: None
1240
+ Role to use for fetching secrets
1241
+ """
1242
+ ...
1243
+
1244
+ @typing.overload
1245
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1246
+ ...
1247
+
1248
+ @typing.overload
1249
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1250
+ ...
1251
+
1252
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
1253
+ """
1254
+ Specifies secrets to be retrieved and injected as environment variables prior to
1255
+ the execution of a step.
1256
+
1257
+
1258
+ Parameters
1259
+ ----------
1260
+ sources : List[Union[str, Dict[str, Any]]], default: []
1261
+ List of secret specs, defining how the secrets are to be retrieved
1262
+ role : str, optional, default: None
1263
+ Role to use for fetching secrets
1264
+ """
1265
+ ...
1266
+
1267
+ @typing.overload
1268
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1269
+ """
1270
+ Specifies the resources needed when executing this step.
1271
+
1272
+ Use `@resources` to specify the resource requirements
1273
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1274
+
1275
+ You can choose the compute layer on the command line by executing e.g.
1530
1276
  ```
1531
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1277
+ python myflow.py run --with batch
1532
1278
  ```
1533
1279
  or
1534
1280
  ```
1535
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1536
- ```
1537
-
1538
- You can also specify just the project or project branch (other values will be
1539
- inferred from the current project or project branch):
1540
- ```
1541
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1281
+ python myflow.py run --with kubernetes
1542
1282
  ```
1543
-
1544
- Note that `branch` is typically one of:
1545
- - `prod`
1546
- - `user.bob`
1547
- - `test.my_experiment`
1548
- - `prod.staging`
1283
+ which executes the flow on the desired system using the
1284
+ requirements specified in `@resources`.
1549
1285
 
1550
1286
 
1551
1287
  Parameters
1552
1288
  ----------
1553
- flow : Union[str, Dict[str, str]], optional, default None
1554
- Upstream flow dependency for this flow.
1555
- flows : List[Union[str, Dict[str, str]]], default []
1556
- Upstream flow dependencies for this flow.
1557
- options : Dict[str, Any], default {}
1558
- Backend-specific configuration for tuning eventing behavior.
1289
+ cpu : int, default 1
1290
+ Number of CPUs required for this step.
1291
+ gpu : int, optional, default None
1292
+ Number of GPUs required for this step.
1293
+ disk : int, optional, default None
1294
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1295
+ memory : int, default 4096
1296
+ Memory size (in MB) required for this step.
1297
+ shared_memory : int, optional, default None
1298
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1299
+ This parameter maps to the `--shm-size` option in Docker.
1559
1300
  """
1560
1301
  ...
1561
1302
 
1562
1303
  @typing.overload
1563
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1304
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1564
1305
  ...
1565
1306
 
1566
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1307
+ @typing.overload
1308
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1309
+ ...
1310
+
1311
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1567
1312
  """
1568
- Specifies the flow(s) that this flow depends on.
1313
+ Specifies the resources needed when executing this step.
1569
1314
 
1570
- ```
1571
- @trigger_on_finish(flow='FooFlow')
1572
- ```
1573
- or
1574
- ```
1575
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1576
- ```
1577
- This decorator respects the @project decorator and triggers the flow
1578
- when upstream runs within the same namespace complete successfully
1315
+ Use `@resources` to specify the resource requirements
1316
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1579
1317
 
1580
- Additionally, you can specify project aware upstream flow dependencies
1581
- by specifying the fully qualified project_flow_name.
1318
+ You can choose the compute layer on the command line by executing e.g.
1582
1319
  ```
1583
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1320
+ python myflow.py run --with batch
1584
1321
  ```
1585
1322
  or
1586
1323
  ```
1587
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1588
- ```
1589
-
1590
- You can also specify just the project or project branch (other values will be
1591
- inferred from the current project or project branch):
1592
- ```
1593
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1324
+ python myflow.py run --with kubernetes
1594
1325
  ```
1595
-
1596
- Note that `branch` is typically one of:
1597
- - `prod`
1598
- - `user.bob`
1599
- - `test.my_experiment`
1600
- - `prod.staging`
1326
+ which executes the flow on the desired system using the
1327
+ requirements specified in `@resources`.
1601
1328
 
1602
1329
 
1603
1330
  Parameters
1604
1331
  ----------
1605
- flow : Union[str, Dict[str, str]], optional, default None
1606
- Upstream flow dependency for this flow.
1607
- flows : List[Union[str, Dict[str, str]]], default []
1608
- Upstream flow dependencies for this flow.
1609
- options : Dict[str, Any], default {}
1610
- Backend-specific configuration for tuning eventing behavior.
1332
+ cpu : int, default 1
1333
+ Number of CPUs required for this step.
1334
+ gpu : int, optional, default None
1335
+ Number of GPUs required for this step.
1336
+ disk : int, optional, default None
1337
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1338
+ memory : int, default 4096
1339
+ Memory size (in MB) required for this step.
1340
+ shared_memory : int, optional, default None
1341
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1342
+ This parameter maps to the `--shm-size` option in Docker.
1611
1343
  """
1612
1344
  ...
1613
1345
 
1614
- def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1346
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1615
1347
  """
1616
- Allows setting external datastores to save data for the
1617
- `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1618
-
1619
- This decorator is useful when users wish to save data to a different datastore
1620
- than what is configured in Metaflow. This can be for variety of reasons:
1348
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
1621
1349
 
1622
- 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1623
- 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1624
- - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1625
- 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1626
- - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1350
+ User code call
1351
+ --------------
1352
+ @vllm(
1353
+ model="...",
1354
+ ...
1355
+ )
1627
1356
 
1628
- Usage:
1629
- ----------
1357
+ Valid backend options
1358
+ ---------------------
1359
+ - 'local': Run as a separate process on the local task machine.
1630
1360
 
1631
- - Using a custom IAM role to access the datastore.
1361
+ Valid model options
1362
+ -------------------
1363
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1632
1364
 
1633
- ```python
1634
- @with_artifact_store(
1635
- type="s3",
1636
- config=lambda: {
1637
- "root": "s3://my-bucket-foo/path/to/root",
1638
- "role_arn": ROLE,
1639
- },
1640
- )
1641
- class MyFlow(FlowSpec):
1365
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1366
+ If you need multiple models, you must create multiple @vllm decorators.
1642
1367
 
1643
- @checkpoint
1644
- @step
1645
- def start(self):
1646
- with open("my_file.txt", "w") as f:
1647
- f.write("Hello, World!")
1648
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1649
- self.next(self.end)
1650
1368
 
1651
- ```
1369
+ Parameters
1370
+ ----------
1371
+ model: str
1372
+ HuggingFace model identifier to be served by vLLM.
1373
+ backend: str
1374
+ Determines where and how to run the vLLM process.
1375
+ openai_api_server: bool
1376
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1377
+ Default is False (uses native engine).
1378
+ Set to True for backward compatibility with existing code.
1379
+ debug: bool
1380
+ Whether to turn on verbose debugging logs.
1381
+ card_refresh_interval: int
1382
+ Interval in seconds for refreshing the vLLM status card.
1383
+ Only used when openai_api_server=True.
1384
+ max_retries: int
1385
+ Maximum number of retries checking for vLLM server startup.
1386
+ Only used when openai_api_server=True.
1387
+ retry_alert_frequency: int
1388
+ Frequency of alert logs for vLLM server startup retries.
1389
+ Only used when openai_api_server=True.
1390
+ engine_args : dict
1391
+ Additional keyword arguments to pass to the vLLM engine.
1392
+ For example, `tensor_parallel_size=2`.
1393
+ """
1394
+ ...
1395
+
1396
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1397
+ """
1398
+ Decorator that helps cache, version, and store models/datasets from the Hugging Face Hub.
1652
1399
 
1653
- - Using credentials to access the s3-compatible datastore.
1400
+ Examples
1401
+ --------
1654
1402
 
1655
- ```python
1656
- @with_artifact_store(
1657
- type="s3",
1658
- config=lambda: {
1659
- "root": "s3://my-bucket-foo/path/to/root",
1660
- "client_params": {
1661
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1662
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1663
- },
1664
- },
1403
+ ```python
1404
+ # **Usage: creating references to models from the Hugging Face Hub that may be loaded in downstream steps**
1405
+ @huggingface_hub
1406
+ @step
1407
+ def pull_model_from_huggingface(self):
1408
+ # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
1409
+ # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
1410
+ # with the same `repo_id` in the backend storage, it will not download the model again. The return
1411
+ # value of the function is a reference to the model in the backend storage.
1412
+ # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
1413
+
1414
+ self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
1415
+ self.llama_model = current.huggingface_hub.snapshot_download(
1416
+ repo_id=self.model_id,
1417
+ allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
1665
1418
  )
1666
- class MyFlow(FlowSpec):
1419
+ self.next(self.train)
1420
+
1421
+ # **Usage: explicitly loading models at runtime from the Hugging Face Hub or from cache (from Metaflow's datastore)**
1422
+ @huggingface_hub
1423
+ @step
1424
+ def run_training(self):
1425
+ # Temporary directory (auto-cleaned on exit)
1426
+ with current.huggingface_hub.load(
1427
+ repo_id="google-bert/bert-base-uncased",
1428
+ allow_patterns=["*.bin"],
1429
+ ) as local_path:
1430
+ # Use files under local_path
1431
+ train_model(local_path)
1432
+ ...
1433
+
1434
+ # **Usage: loading models directly from the Hugging Face Hub or from cache (from Metaflow's datastore)**
1435
+
1436
+ @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
1437
+ @step
1438
+ def pull_model_from_huggingface(self):
1439
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
1440
+
1441
+ @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora", "/my-lora-directory")])
1442
+ @step
1443
+ def finetune_model(self):
1444
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
1445
+ # path_to_model will be /my-directory
1446
+
1447
+
1448
+ # Takes all the arguments passed to `snapshot_download`
1449
+ # except for `local_dir`
1450
+ @huggingface_hub(load=[
1451
+ {
1452
+ "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
1453
+ },
1454
+ {
1455
+ "repo_id": "myorg/mistral-lora",
1456
+ "repo_type": "model",
1457
+ },
1458
+ ])
1459
+ @step
1460
+ def finetune_model(self):
1461
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
1462
+ # path_to_model will be /my-directory
1463
+ ```
1464
+
1465
+
1466
+ Parameters
1467
+ ----------
1468
+ temp_dir_root : str, optional
1469
+ The root directory that will hold the temporary directory where objects will be downloaded.
1470
+
1471
+ cache_scope : str, optional
1472
+ The scope of the cache. Can be `checkpoint` / `flow` / `global`.
1473
+ - `checkpoint` (default): All repos are stored like objects saved by `@checkpoint`.
1474
+ i.e., the cached path is derived from the namespace, flow, step, and Metaflow foreach iteration.
1475
+ Any repo downloaded under this scope will only be retrieved from the cache when the step runs under the same namespace in the same flow (at the same foreach index).
1476
+
1477
+ - `flow`: All repos are cached under the flow, regardless of namespace.
1478
+ i.e., the cached path is derived solely from the flow name.
1479
+ When to use this mode: (1) Multiple users are executing the same flow and want shared access to the repos cached by the decorator. (2) Multiple versions of a flow are deployed, all needing access to the same repos cached by the decorator.
1480
+
1481
+ - `global`: All repos are cached under a globally static path.
1482
+ i.e., the base path of the cache is static and all repos are stored under it.
1483
+ When to use this mode:
1484
+ - All repos from the Hugging Face Hub need to be shared by users across all flow executions.
1485
+ - Each caching scope comes with its own trade-offs:
1486
+ - `checkpoint`:
1487
+ - Has explicit control over when caches are populated (controlled by the same flow that has the `@huggingface_hub` decorator) but ends up hitting the Hugging Face Hub more often if there are many users/namespaces/steps.
1488
+ - Since objects are written on a `namespace/flow/step` basis, the blast radius of a bad checkpoint is limited to a particular flow in a namespace.
1489
+ - `flow`:
1490
+ - Has less control over when caches are populated (can be written by any execution instance of a flow from any namespace) but results in more cache hits.
1491
+ - The blast radius of a bad checkpoint is limited to all runs of a particular flow.
1492
+ - It doesn't promote cache reuse across flows.
1493
+ - `global`:
1494
+ - Has no control over when caches are populated (can be written by any flow execution) but has the highest cache hit rate.
1495
+ - It promotes cache reuse across flows.
1496
+ - The blast radius of a bad checkpoint spans every flow that could be using a particular repo.
1497
+
1498
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
1499
+ The list of repos (models/datasets) to load.
1500
+
1501
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
1502
+
1503
+ - If repo (model/dataset) is not found in the datastore:
1504
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
1505
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
1506
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
1507
+
1508
+ - If repo is found in the datastore:
1509
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
1510
+ """
1511
+ ...
1512
+
1513
+ @typing.overload
1514
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1515
+ """
1516
+ Specifies the times when the flow should be run when running on a
1517
+ production scheduler.
1518
+
1519
+
1520
+ Parameters
1521
+ ----------
1522
+ hourly : bool, default False
1523
+ Run the workflow hourly.
1524
+ daily : bool, default True
1525
+ Run the workflow daily.
1526
+ weekly : bool, default False
1527
+ Run the workflow weekly.
1528
+ cron : str, optional, default None
1529
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1530
+ specified by this expression.
1531
+ timezone : str, optional, default None
1532
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1533
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1534
+ """
1535
+ ...
1536
+
1537
+ @typing.overload
1538
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1539
+ ...
1540
+
1541
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1542
+ """
1543
+ Specifies the times when the flow should be run when running on a
1544
+ production scheduler.
1545
+
1546
+
1547
+ Parameters
1548
+ ----------
1549
+ hourly : bool, default False
1550
+ Run the workflow hourly.
1551
+ daily : bool, default True
1552
+ Run the workflow daily.
1553
+ weekly : bool, default False
1554
+ Run the workflow weekly.
1555
+ cron : str, optional, default None
1556
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1557
+ specified by this expression.
1558
+ timezone : str, optional, default None
1559
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1560
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1561
+ """
1562
+ ...
1563
+
1564
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1565
+ """
1566
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1567
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1568
+
1569
+
1570
+ Parameters
1571
+ ----------
1572
+ timeout : int
1573
+ Time, in seconds before the task times out and fails. (Default: 3600)
1574
+ poke_interval : int
1575
+ Time in seconds that the job should wait in between each try. (Default: 60)
1576
+ mode : str
1577
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1578
+ exponential_backoff : bool
1579
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1580
+ pool : str
1581
+ the slot pool this task should run in,
1582
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1583
+ soft_fail : bool
1584
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1585
+ name : str
1586
+ Name of the sensor on Airflow
1587
+ description : str
1588
+ Description of sensor in the Airflow UI
1589
+ external_dag_id : str
1590
+ The dag_id that contains the task you want to wait for.
1591
+ external_task_ids : List[str]
1592
+ The list of task_ids that you want to wait for.
1593
+ If None (default value) the sensor waits for the DAG. (Default: None)
1594
+ allowed_states : List[str]
1595
+ Iterable of allowed states, (Default: ['success'])
1596
+ failed_states : List[str]
1597
+ Iterable of failed or dis-allowed states. (Default: None)
1598
+ execution_delta : datetime.timedelta
1599
+ time difference with the previous execution to look at,
1600
+ the default is the same logical date as the current task or DAG. (Default: None)
1601
+ check_existence: bool
1602
+ Set to True to check if the external task exists or check if
1603
+ the DAG to wait for exists. (Default: True)
1604
+ """
1605
+ ...
1606
+
1607
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1608
+ """
1609
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1610
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1611
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1612
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1613
+ starts only after all sensors finish.
1614
+
1615
+
1616
+ Parameters
1617
+ ----------
1618
+ timeout : int
1619
+ Time, in seconds before the task times out and fails. (Default: 3600)
1620
+ poke_interval : int
1621
+ Time in seconds that the job should wait in between each try. (Default: 60)
1622
+ mode : str
1623
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1624
+ exponential_backoff : bool
1625
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1626
+ pool : str
1627
+ the slot pool this task should run in,
1628
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1629
+ soft_fail : bool
1630
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1631
+ name : str
1632
+ Name of the sensor on Airflow
1633
+ description : str
1634
+ Description of sensor in the Airflow UI
1635
+ bucket_key : Union[str, List[str]]
1636
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1637
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1638
+ bucket_name : str
1639
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1640
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1641
+ wildcard_match : bool
1642
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1643
+ aws_conn_id : str
1644
+ a reference to the s3 connection on Airflow. (Default: None)
1645
+ verify : bool
1646
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1647
+ """
1648
+ ...
1649
+
1650
+ @typing.overload
1651
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1652
+ """
1653
+ Specifies the PyPI packages for all steps of the flow.
1654
+
1655
+ Use `@pypi_base` to set common packages required by all
1656
+ steps and use `@pypi` to specify step-specific overrides.
1657
+
1658
+ Parameters
1659
+ ----------
1660
+ packages : Dict[str, str], default: {}
1661
+ Packages to use for this flow. The key is the name of the package
1662
+ and the value is the version to use.
1663
+ python : str, optional, default: None
1664
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1665
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1666
+ """
1667
+ ...
1668
+
1669
+ @typing.overload
1670
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1671
+ ...
1672
+
1673
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1674
+ """
1675
+ Specifies the PyPI packages for all steps of the flow.
1676
+
1677
+ Use `@pypi_base` to set common packages required by all
1678
+ steps and use `@pypi` to specify step-specific overrides.
1679
+
1680
+ Parameters
1681
+ ----------
1682
+ packages : Dict[str, str], default: {}
1683
+ Packages to use for this flow. The key is the name of the package
1684
+ and the value is the version to use.
1685
+ python : str, optional, default: None
1686
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1687
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1688
+ """
1689
+ ...
1690
+
1691
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1692
+ """
1693
+ Specifies what flows belong to the same project.
1694
+
1695
+ A project-specific namespace is created for all flows that
1696
+ use the same `@project(name)`.
1667
1697
 
1668
- @checkpoint
1669
- @step
1670
- def start(self):
1671
- with open("my_file.txt", "w") as f:
1672
- f.write("Hello, World!")
1673
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1674
- self.next(self.end)
1675
1698
 
1676
- ```
1699
+ Parameters
1700
+ ----------
1701
+ name : str
1702
+ Project name. Make sure that the name is unique amongst all
1703
+ projects that use the same production scheduler. The name may
1704
+ contain only lowercase alphanumeric characters and underscores.
1677
1705
 
1678
- - Accessing objects stored in external datastores after task execution.
1706
+ branch : Optional[str], default None
1707
+ The branch to use. If not specified, the branch is set to
1708
+ `user.<username>` unless `production` is set to `True`. This can
1709
+ also be set on the command line using `--branch` as a top-level option.
1710
+ It is an error to specify `branch` in the decorator and on the command line.
1679
1711
 
1680
- ```python
1681
- run = Run("CheckpointsTestsFlow/8992")
1682
- with artifact_store_from(run=run, config={
1683
- "client_params": {
1684
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1685
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1686
- },
1687
- }):
1688
- with Checkpoint() as cp:
1689
- latest = cp.list(
1690
- task=run["start"].task
1691
- )[0]
1692
- print(latest)
1693
- cp.load(
1694
- latest,
1695
- "test-checkpoints"
1696
- )
1712
+ production : bool, default False
1713
+ Whether or not the branch is the production branch. This can also be set on the
1714
+ command line using `--production` as a top-level option. It is an error to specify
1715
+ `production` in the decorator and on the command line.
1716
+ The project branch name will be:
1717
+ - if `branch` is specified:
1718
+ - if `production` is True: `prod.<branch>`
1719
+ - if `production` is False: `test.<branch>`
1720
+ - if `branch` is not specified:
1721
+ - if `production` is True: `prod`
1722
+ - if `production` is False: `user.<username>`
1723
+ """
1724
+ ...
1725
+
1726
+ @typing.overload
1727
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1728
+ """
1729
+ Specifies the Conda environment for all steps of the flow.
1697
1730
 
1698
- task = Task("TorchTuneFlow/8484/train/53673")
1699
- with artifact_store_from(run=run, config={
1700
- "client_params": {
1701
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1702
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1703
- },
1704
- }):
1705
- load_model(
1706
- task.data.model_ref,
1707
- "test-models"
1708
- )
1709
- ```
1710
- Parameters:
1731
+ Use `@conda_base` to set common libraries required by all
1732
+ steps and use `@conda` to specify step-specific additions.
1733
+
1734
+
1735
+ Parameters
1711
1736
  ----------
1737
+ packages : Dict[str, str], default {}
1738
+ Packages to use for this flow. The key is the name of the package
1739
+ and the value is the version to use.
1740
+ libraries : Dict[str, str], default {}
1741
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1742
+ python : str, optional, default None
1743
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1744
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1745
+ disabled : bool, default False
1746
+ If set to True, disables Conda.
1747
+ """
1748
+ ...
1749
+
1750
+ @typing.overload
1751
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1752
+ ...
1753
+
1754
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1755
+ """
1756
+ Specifies the Conda environment for all steps of the flow.
1712
1757
 
1713
- type: str
1714
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1758
+ Use `@conda_base` to set common libraries required by all
1759
+ steps and use `@conda` to specify step-specific additions.
1715
1760
 
1716
- config: dict or Callable
1717
- Dictionary of configuration options for the datastore. The following keys are required:
1718
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1719
- - example: 's3://bucket-name/path/to/root'
1720
- - example: 'gs://bucket-name/path/to/root'
1721
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1722
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1723
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1724
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1761
+
1762
+ Parameters
1763
+ ----------
1764
+ packages : Dict[str, str], default {}
1765
+ Packages to use for this flow. The key is the name of the package
1766
+ and the value is the version to use.
1767
+ libraries : Dict[str, str], default {}
1768
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1769
+ python : str, optional, default None
1770
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1771
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1772
+ disabled : bool, default False
1773
+ If set to True, disables Conda.
1725
1774
  """
1726
1775
  ...
1727
1776
 
@@ -1818,267 +1867,218 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1818
1867
  """
1819
1868
  ...
1820
1869
 
1821
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1870
+ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1822
1871
  """
1823
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1824
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1825
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1826
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1827
- starts only after all sensors finish.
1872
+ Allows setting external datastores to save data for the
1873
+ `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1828
1874
 
1875
+ This decorator is useful when users wish to save data to a different datastore
1876
+ than what is configured in Metaflow. This can be for variety of reasons:
1829
1877
 
1830
- Parameters
1878
+ 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1879
+ 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1880
+ - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1881
+ 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1882
+ - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1883
+
1884
+ Usage:
1831
1885
  ----------
1832
- timeout : int
1833
- Time, in seconds before the task times out and fails. (Default: 3600)
1834
- poke_interval : int
1835
- Time in seconds that the job should wait in between each try. (Default: 60)
1836
- mode : str
1837
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1838
- exponential_backoff : bool
1839
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1840
- pool : str
1841
- the slot pool this task should run in,
1842
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1843
- soft_fail : bool
1844
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1845
- name : str
1846
- Name of the sensor on Airflow
1847
- description : str
1848
- Description of sensor in the Airflow UI
1849
- bucket_key : Union[str, List[str]]
1850
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1851
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1852
- bucket_name : str
1853
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1854
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1855
- wildcard_match : bool
1856
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1857
- aws_conn_id : str
1858
- a reference to the s3 connection on Airflow. (Default: None)
1859
- verify : bool
1860
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1861
- """
1862
- ...
1863
-
1864
- @typing.overload
1865
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1866
- """
1867
- Specifies the times when the flow should be run when running on a
1868
- production scheduler.
1869
1886
 
1887
+ - Using a custom IAM role to access the datastore.
1870
1888
 
1871
- Parameters
1872
- ----------
1873
- hourly : bool, default False
1874
- Run the workflow hourly.
1875
- daily : bool, default True
1876
- Run the workflow daily.
1877
- weekly : bool, default False
1878
- Run the workflow weekly.
1879
- cron : str, optional, default None
1880
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1881
- specified by this expression.
1882
- timezone : str, optional, default None
1883
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1884
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1885
- """
1886
- ...
1887
-
1888
- @typing.overload
1889
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1890
- ...
1891
-
1892
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1893
- """
1894
- Specifies the times when the flow should be run when running on a
1895
- production scheduler.
1889
+ ```python
1890
+ @with_artifact_store(
1891
+ type="s3",
1892
+ config=lambda: {
1893
+ "root": "s3://my-bucket-foo/path/to/root",
1894
+ "role_arn": ROLE,
1895
+ },
1896
+ )
1897
+ class MyFlow(FlowSpec):
1896
1898
 
1899
+ @checkpoint
1900
+ @step
1901
+ def start(self):
1902
+ with open("my_file.txt", "w") as f:
1903
+ f.write("Hello, World!")
1904
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1905
+ self.next(self.end)
1897
1906
 
1898
- Parameters
1899
- ----------
1900
- hourly : bool, default False
1901
- Run the workflow hourly.
1902
- daily : bool, default True
1903
- Run the workflow daily.
1904
- weekly : bool, default False
1905
- Run the workflow weekly.
1906
- cron : str, optional, default None
1907
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1908
- specified by this expression.
1909
- timezone : str, optional, default None
1910
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1911
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1912
- """
1913
- ...
1914
-
1915
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1916
- """
1917
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1918
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1907
+ ```
1908
+
1909
+ - Using credentials to access the s3-compatible datastore.
1910
+
1911
+ ```python
1912
+ @with_artifact_store(
1913
+ type="s3",
1914
+ config=lambda: {
1915
+ "root": "s3://my-bucket-foo/path/to/root",
1916
+ "client_params": {
1917
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1918
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1919
+ },
1920
+ },
1921
+ )
1922
+ class MyFlow(FlowSpec):
1923
+
1924
+ @checkpoint
1925
+ @step
1926
+ def start(self):
1927
+ with open("my_file.txt", "w") as f:
1928
+ f.write("Hello, World!")
1929
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1930
+ self.next(self.end)
1919
1931
 
1932
+ ```
1920
1933
 
1921
- Parameters
1922
- ----------
1923
- timeout : int
1924
- Time, in seconds before the task times out and fails. (Default: 3600)
1925
- poke_interval : int
1926
- Time in seconds that the job should wait in between each try. (Default: 60)
1927
- mode : str
1928
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1929
- exponential_backoff : bool
1930
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1931
- pool : str
1932
- the slot pool this task should run in,
1933
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1934
- soft_fail : bool
1935
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1936
- name : str
1937
- Name of the sensor on Airflow
1938
- description : str
1939
- Description of sensor in the Airflow UI
1940
- external_dag_id : str
1941
- The dag_id that contains the task you want to wait for.
1942
- external_task_ids : List[str]
1943
- The list of task_ids that you want to wait for.
1944
- If None (default value) the sensor waits for the DAG. (Default: None)
1945
- allowed_states : List[str]
1946
- Iterable of allowed states, (Default: ['success'])
1947
- failed_states : List[str]
1948
- Iterable of failed or dis-allowed states. (Default: None)
1949
- execution_delta : datetime.timedelta
1950
- time difference with the previous execution to look at,
1951
- the default is the same logical date as the current task or DAG. (Default: None)
1952
- check_existence: bool
1953
- Set to True to check if the external task exists or check if
1954
- the DAG to wait for exists. (Default: True)
1955
- """
1956
- ...
1957
-
1958
- @typing.overload
1959
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1960
- """
1961
- Specifies the PyPI packages for all steps of the flow.
1934
+ - Accessing objects stored in external datastores after task execution.
1962
1935
 
1963
- Use `@pypi_base` to set common packages required by all
1964
- steps and use `@pypi` to specify step-specific overrides.
1936
+ ```python
1937
+ run = Run("CheckpointsTestsFlow/8992")
1938
+ with artifact_store_from(run=run, config={
1939
+ "client_params": {
1940
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1941
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1942
+ },
1943
+ }):
1944
+ with Checkpoint() as cp:
1945
+ latest = cp.list(
1946
+ task=run["start"].task
1947
+ )[0]
1948
+ print(latest)
1949
+ cp.load(
1950
+ latest,
1951
+ "test-checkpoints"
1952
+ )
1965
1953
 
1966
- Parameters
1954
+ task = Task("TorchTuneFlow/8484/train/53673")
1955
+ with artifact_store_from(run=run, config={
1956
+ "client_params": {
1957
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1958
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1959
+ },
1960
+ }):
1961
+ load_model(
1962
+ task.data.model_ref,
1963
+ "test-models"
1964
+ )
1965
+ ```
1966
+ Parameters:
1967
1967
  ----------
1968
- packages : Dict[str, str], default: {}
1969
- Packages to use for this flow. The key is the name of the package
1970
- and the value is the version to use.
1971
- python : str, optional, default: None
1972
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1973
- that the version used will correspond to the version of the Python interpreter used to start the run.
1974
- """
1975
- ...
1976
-
1977
- @typing.overload
1978
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1979
- ...
1980
-
1981
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1982
- """
1983
- Specifies the PyPI packages for all steps of the flow.
1984
1968
 
1985
- Use `@pypi_base` to set common packages required by all
1986
- steps and use `@pypi` to specify step-specific overrides.
1969
+ type: str
1970
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1987
1971
 
1988
- Parameters
1989
- ----------
1990
- packages : Dict[str, str], default: {}
1991
- Packages to use for this flow. The key is the name of the package
1992
- and the value is the version to use.
1993
- python : str, optional, default: None
1994
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1995
- that the version used will correspond to the version of the Python interpreter used to start the run.
1972
+ config: dict or Callable
1973
+ Dictionary of configuration options for the datastore. The following keys are required:
1974
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1975
+ - example: 's3://bucket-name/path/to/root'
1976
+ - example: 'gs://bucket-name/path/to/root'
1977
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1978
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1979
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1980
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1996
1981
  """
1997
1982
  ...
1998
1983
 
1999
1984
  @typing.overload
2000
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1985
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
2001
1986
  """
2002
- Specifies the Conda environment for all steps of the flow.
1987
+ Specifies the flow(s) that this flow depends on.
2003
1988
 
2004
- Use `@conda_base` to set common libraries required by all
2005
- steps and use `@conda` to specify step-specific additions.
1989
+ ```
1990
+ @trigger_on_finish(flow='FooFlow')
1991
+ ```
1992
+ or
1993
+ ```
1994
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1995
+ ```
1996
+ This decorator respects the @project decorator and triggers the flow
1997
+ when upstream runs within the same namespace complete successfully
1998
+
1999
+ Additionally, you can specify project aware upstream flow dependencies
2000
+ by specifying the fully qualified project_flow_name.
2001
+ ```
2002
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
2003
+ ```
2004
+ or
2005
+ ```
2006
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
2007
+ ```
2008
+
2009
+ You can also specify just the project or project branch (other values will be
2010
+ inferred from the current project or project branch):
2011
+ ```
2012
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
2013
+ ```
2014
+
2015
+ Note that `branch` is typically one of:
2016
+ - `prod`
2017
+ - `user.bob`
2018
+ - `test.my_experiment`
2019
+ - `prod.staging`
2006
2020
 
2007
2021
 
2008
2022
  Parameters
2009
2023
  ----------
2010
- packages : Dict[str, str], default {}
2011
- Packages to use for this flow. The key is the name of the package
2012
- and the value is the version to use.
2013
- libraries : Dict[str, str], default {}
2014
- Supported for backward compatibility. When used with packages, packages will take precedence.
2015
- python : str, optional, default None
2016
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
2017
- that the version used will correspond to the version of the Python interpreter used to start the run.
2018
- disabled : bool, default False
2019
- If set to True, disables Conda.
2024
+ flow : Union[str, Dict[str, str]], optional, default None
2025
+ Upstream flow dependency for this flow.
2026
+ flows : List[Union[str, Dict[str, str]]], default []
2027
+ Upstream flow dependencies for this flow.
2028
+ options : Dict[str, Any], default {}
2029
+ Backend-specific configuration for tuning eventing behavior.
2020
2030
  """
2021
2031
  ...
2022
2032
 
2023
2033
  @typing.overload
2024
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
2034
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
2025
2035
  ...
2026
2036
 
2027
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
2037
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
2028
2038
  """
2029
- Specifies the Conda environment for all steps of the flow.
2039
+ Specifies the flow(s) that this flow depends on.
2030
2040
 
2031
- Use `@conda_base` to set common libraries required by all
2032
- steps and use `@conda` to specify step-specific additions.
2041
+ ```
2042
+ @trigger_on_finish(flow='FooFlow')
2043
+ ```
2044
+ or
2045
+ ```
2046
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
2047
+ ```
2048
+ This decorator respects the @project decorator and triggers the flow
2049
+ when upstream runs within the same namespace complete successfully
2033
2050
 
2051
+ Additionally, you can specify project aware upstream flow dependencies
2052
+ by specifying the fully qualified project_flow_name.
2053
+ ```
2054
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
2055
+ ```
2056
+ or
2057
+ ```
2058
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
2059
+ ```
2034
2060
 
2035
- Parameters
2036
- ----------
2037
- packages : Dict[str, str], default {}
2038
- Packages to use for this flow. The key is the name of the package
2039
- and the value is the version to use.
2040
- libraries : Dict[str, str], default {}
2041
- Supported for backward compatibility. When used with packages, packages will take precedence.
2042
- python : str, optional, default None
2043
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
2044
- that the version used will correspond to the version of the Python interpreter used to start the run.
2045
- disabled : bool, default False
2046
- If set to True, disables Conda.
2047
- """
2048
- ...
2049
-
2050
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
2051
- """
2052
- Specifies what flows belong to the same project.
2061
+ You can also specify just the project or project branch (other values will be
2062
+ inferred from the current project or project branch):
2063
+ ```
2064
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
2065
+ ```
2053
2066
 
2054
- A project-specific namespace is created for all flows that
2055
- use the same `@project(name)`.
2067
+ Note that `branch` is typically one of:
2068
+ - `prod`
2069
+ - `user.bob`
2070
+ - `test.my_experiment`
2071
+ - `prod.staging`
2056
2072
 
2057
2073
 
2058
2074
  Parameters
2059
2075
  ----------
2060
- name : str
2061
- Project name. Make sure that the name is unique amongst all
2062
- projects that use the same production scheduler. The name may
2063
- contain only lowercase alphanumeric characters and underscores.
2064
-
2065
- branch : Optional[str], default None
2066
- The branch to use. If not specified, the branch is set to
2067
- `user.<username>` unless `production` is set to `True`. This can
2068
- also be set on the command line using `--branch` as a top-level option.
2069
- It is an error to specify `branch` in the decorator and on the command line.
2070
-
2071
- production : bool, default False
2072
- Whether or not the branch is the production branch. This can also be set on the
2073
- command line using `--production` as a top-level option. It is an error to specify
2074
- `production` in the decorator and on the command line.
2075
- The project branch name will be:
2076
- - if `branch` is specified:
2077
- - if `production` is True: `prod.<branch>`
2078
- - if `production` is False: `test.<branch>`
2079
- - if `branch` is not specified:
2080
- - if `production` is True: `prod`
2081
- - if `production` is False: `user.<username>`
2076
+ flow : Union[str, Dict[str, str]], optional, default None
2077
+ Upstream flow dependency for this flow.
2078
+ flows : List[Union[str, Dict[str, str]]], default []
2079
+ Upstream flow dependencies for this flow.
2080
+ options : Dict[str, Any], default {}
2081
+ Backend-specific configuration for tuning eventing behavior.
2082
2082
  """
2083
2083
  ...
2084
2084