ob-metaflow-stubs 6.0.6.1__py2.py3-none-any.whl → 6.0.6.2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (261) hide show
  1. metaflow-stubs/__init__.pyi +951 -951
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +50 -50
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +3 -3
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +4 -4
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +4 -4
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +3 -3
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +3 -3
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +2 -2
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +2 -2
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +2 -2
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +4 -4
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +3 -3
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +3 -3
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +4 -4
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +3 -3
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +3 -3
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +3 -3
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +3 -3
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
  116. metaflow-stubs/multicore_utils.pyi +2 -2
  117. metaflow-stubs/ob_internal.pyi +2 -2
  118. metaflow-stubs/packaging_sys/__init__.pyi +7 -7
  119. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  120. metaflow-stubs/packaging_sys/distribution_support.pyi +5 -5
  121. metaflow-stubs/packaging_sys/tar_backend.pyi +3 -3
  122. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  123. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  124. metaflow-stubs/parameters.pyi +4 -4
  125. metaflow-stubs/plugins/__init__.pyi +13 -13
  126. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  128. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  129. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  131. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  132. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  133. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  134. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  135. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  136. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  137. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  139. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  140. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  141. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  142. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  143. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  144. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  145. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  146. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  147. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  148. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  149. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  150. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  151. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  152. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  153. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  157. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  158. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  159. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  160. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  161. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  162. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  163. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  164. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  165. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  166. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  167. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  168. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  169. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  170. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  171. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  172. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  173. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  174. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  175. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  176. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  177. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  178. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  179. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  180. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  181. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  182. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  183. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  184. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  185. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  186. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  187. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  188. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  189. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  190. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  191. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  192. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  193. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  194. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  195. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  196. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  197. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  198. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  199. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  200. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  201. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  202. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  203. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  204. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  205. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  206. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  207. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  208. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  209. metaflow-stubs/plugins/perimeters.pyi +2 -2
  210. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  211. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  212. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  213. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  214. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  215. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  216. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  217. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  218. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  219. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  220. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  221. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  222. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  223. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  224. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  225. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  226. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  227. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  228. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  229. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  230. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  231. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  232. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  233. metaflow-stubs/profilers/__init__.pyi +2 -2
  234. metaflow-stubs/pylint_wrapper.pyi +2 -2
  235. metaflow-stubs/runner/__init__.pyi +2 -2
  236. metaflow-stubs/runner/deployer.pyi +5 -5
  237. metaflow-stubs/runner/deployer_impl.pyi +10 -2
  238. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  239. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  240. metaflow-stubs/runner/nbrun.pyi +2 -2
  241. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  242. metaflow-stubs/runner/utils.pyi +3 -3
  243. metaflow-stubs/system/__init__.pyi +2 -2
  244. metaflow-stubs/system/system_logger.pyi +3 -3
  245. metaflow-stubs/system/system_monitor.pyi +2 -2
  246. metaflow-stubs/tagging_util.pyi +2 -2
  247. metaflow-stubs/tuple_util.pyi +2 -2
  248. metaflow-stubs/user_configs/__init__.pyi +2 -2
  249. metaflow-stubs/user_configs/config_options.pyi +3 -3
  250. metaflow-stubs/user_configs/config_parameters.pyi +4 -4
  251. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  252. metaflow-stubs/user_decorators/common.pyi +2 -2
  253. metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
  254. metaflow-stubs/user_decorators/mutable_step.pyi +4 -4
  255. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  256. metaflow-stubs/user_decorators/user_step_decorator.pyi +4 -4
  257. {ob_metaflow_stubs-6.0.6.1.dist-info → ob_metaflow_stubs-6.0.6.2.dist-info}/METADATA +1 -1
  258. ob_metaflow_stubs-6.0.6.2.dist-info/RECORD +261 -0
  259. ob_metaflow_stubs-6.0.6.1.dist-info/RECORD +0 -261
  260. {ob_metaflow_stubs-6.0.6.1.dist-info → ob_metaflow_stubs-6.0.6.2.dist-info}/WHEEL +0 -0
  261. {ob_metaflow_stubs-6.0.6.1.dist-info → ob_metaflow_stubs-6.0.6.2.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.17.0.1+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-08-06T15:19:48.623789 #
3
+ # MF version: 2.17.1.0+obcheckpoint(0.2.4);ob(v1) #
4
+ # Generated on 2025-08-11T21:26:28.605682 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -39,17 +39,17 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
+ from . import metaflow_git as metaflow_git
42
43
  from . import tuple_util as tuple_util
43
44
  from . import cards as cards
44
- from . import metaflow_git as metaflow_git
45
45
  from . import events as events
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
51
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
53
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
@@ -168,288 +168,108 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
168
168
  ...
169
169
 
170
170
  @typing.overload
171
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
171
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
172
172
  """
173
- Enables loading / saving of models within a step.
174
-
175
- > Examples
176
- - Saving Models
177
- ```python
178
- @model
179
- @step
180
- def train(self):
181
- # current.model.save returns a dictionary reference to the model saved
182
- self.my_model = current.model.save(
183
- path_to_my_model,
184
- label="my_model",
185
- metadata={
186
- "epochs": 10,
187
- "batch-size": 32,
188
- "learning-rate": 0.001,
189
- }
190
- )
191
- self.next(self.test)
192
-
193
- @model(load="my_model")
194
- @step
195
- def test(self):
196
- # `current.model.loaded` returns a dictionary of the loaded models
197
- # where the key is the name of the artifact and the value is the path to the model
198
- print(os.listdir(current.model.loaded["my_model"]))
199
- self.next(self.end)
200
- ```
201
-
202
- - Loading models
203
- ```python
204
- @step
205
- def train(self):
206
- # current.model.load returns the path to the model loaded
207
- checkpoint_path = current.model.load(
208
- self.checkpoint_key,
209
- )
210
- model_path = current.model.load(
211
- self.model,
212
- )
213
- self.next(self.test)
214
- ```
215
-
216
-
217
- Parameters
218
- ----------
219
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
220
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
221
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
222
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
223
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
224
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
225
-
226
- temp_dir_root : str, default: None
227
- The root directory under which `current.model.loaded` will store loaded models
173
+ Decorator prototype for all step decorators. This function gets specialized
174
+ and imported for all decorators types by _import_plugin_decorators().
228
175
  """
229
176
  ...
230
177
 
231
178
  @typing.overload
232
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
233
- ...
234
-
235
- @typing.overload
236
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
179
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
237
180
  ...
238
181
 
239
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
182
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
240
183
  """
241
- Enables loading / saving of models within a step.
242
-
243
- > Examples
244
- - Saving Models
245
- ```python
246
- @model
247
- @step
248
- def train(self):
249
- # current.model.save returns a dictionary reference to the model saved
250
- self.my_model = current.model.save(
251
- path_to_my_model,
252
- label="my_model",
253
- metadata={
254
- "epochs": 10,
255
- "batch-size": 32,
256
- "learning-rate": 0.001,
257
- }
258
- )
259
- self.next(self.test)
260
-
261
- @model(load="my_model")
262
- @step
263
- def test(self):
264
- # `current.model.loaded` returns a dictionary of the loaded models
265
- # where the key is the name of the artifact and the value is the path to the model
266
- print(os.listdir(current.model.loaded["my_model"]))
267
- self.next(self.end)
268
- ```
269
-
270
- - Loading models
271
- ```python
272
- @step
273
- def train(self):
274
- # current.model.load returns the path to the model loaded
275
- checkpoint_path = current.model.load(
276
- self.checkpoint_key,
277
- )
278
- model_path = current.model.load(
279
- self.model,
280
- )
281
- self.next(self.test)
282
- ```
283
-
284
-
285
- Parameters
286
- ----------
287
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
288
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
289
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
290
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
291
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
292
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
293
-
294
- temp_dir_root : str, default: None
295
- The root directory under which `current.model.loaded` will store loaded models
184
+ Decorator prototype for all step decorators. This function gets specialized
185
+ and imported for all decorators types by _import_plugin_decorators().
296
186
  """
297
187
  ...
298
188
 
299
189
  @typing.overload
300
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
190
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
301
191
  """
302
- Specifies the Conda environment for the step.
192
+ Specifies the PyPI packages for the step.
303
193
 
304
194
  Information in this decorator will augment any
305
- attributes set in the `@conda_base` flow-level decorator. Hence,
306
- you can use `@conda_base` to set packages required by all
307
- steps and use `@conda` to specify step-specific overrides.
195
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
196
+ you can use `@pypi_base` to set packages required by all
197
+ steps and use `@pypi` to specify step-specific overrides.
308
198
 
309
199
 
310
200
  Parameters
311
201
  ----------
312
- packages : Dict[str, str], default {}
202
+ packages : Dict[str, str], default: {}
313
203
  Packages to use for this step. The key is the name of the package
314
204
  and the value is the version to use.
315
- libraries : Dict[str, str], default {}
316
- Supported for backward compatibility. When used with packages, packages will take precedence.
317
- python : str, optional, default None
205
+ python : str, optional, default: None
318
206
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
319
207
  that the version used will correspond to the version of the Python interpreter used to start the run.
320
- disabled : bool, default False
321
- If set to True, disables @conda.
322
208
  """
323
209
  ...
324
210
 
325
211
  @typing.overload
326
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
212
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
327
213
  ...
328
214
 
329
215
  @typing.overload
330
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
216
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
331
217
  ...
332
218
 
333
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
219
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
334
220
  """
335
- Specifies the Conda environment for the step.
221
+ Specifies the PyPI packages for the step.
336
222
 
337
223
  Information in this decorator will augment any
338
- attributes set in the `@conda_base` flow-level decorator. Hence,
339
- you can use `@conda_base` to set packages required by all
340
- steps and use `@conda` to specify step-specific overrides.
224
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
225
+ you can use `@pypi_base` to set packages required by all
226
+ steps and use `@pypi` to specify step-specific overrides.
341
227
 
342
228
 
343
229
  Parameters
344
230
  ----------
345
- packages : Dict[str, str], default {}
231
+ packages : Dict[str, str], default: {}
346
232
  Packages to use for this step. The key is the name of the package
347
233
  and the value is the version to use.
348
- libraries : Dict[str, str], default {}
349
- Supported for backward compatibility. When used with packages, packages will take precedence.
350
- python : str, optional, default None
234
+ python : str, optional, default: None
351
235
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
352
236
  that the version used will correspond to the version of the Python interpreter used to start the run.
353
- disabled : bool, default False
354
- If set to True, disables @conda.
355
- """
356
- ...
357
-
358
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
359
- """
360
- Specifies that this step should execute on DGX cloud.
361
-
362
-
363
- Parameters
364
- ----------
365
- gpu : int
366
- Number of GPUs to use.
367
- gpu_type : str
368
- Type of Nvidia GPU to use.
369
237
  """
370
238
  ...
371
239
 
372
240
  @typing.overload
373
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
241
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
374
242
  """
375
- Specifies environment variables to be set prior to the execution of a step.
376
-
377
-
378
- Parameters
379
- ----------
380
- vars : Dict[str, str], default {}
381
- Dictionary of environment variables to set.
243
+ Decorator prototype for all step decorators. This function gets specialized
244
+ and imported for all decorators types by _import_plugin_decorators().
382
245
  """
383
246
  ...
384
247
 
385
248
  @typing.overload
386
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
387
- ...
388
-
389
- @typing.overload
390
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
249
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
391
250
  ...
392
251
 
393
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
252
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
394
253
  """
395
- Specifies environment variables to be set prior to the execution of a step.
396
-
397
-
398
- Parameters
399
- ----------
400
- vars : Dict[str, str], default {}
401
- Dictionary of environment variables to set.
254
+ Decorator prototype for all step decorators. This function gets specialized
255
+ and imported for all decorators types by _import_plugin_decorators().
402
256
  """
403
257
  ...
404
258
 
405
259
  @typing.overload
406
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
260
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
407
261
  """
408
- Specifies the PyPI packages for the step.
409
-
410
- Information in this decorator will augment any
411
- attributes set in the `@pyi_base` flow-level decorator. Hence,
412
- you can use `@pypi_base` to set packages required by all
413
- steps and use `@pypi` to specify step-specific overrides.
414
-
415
-
416
- Parameters
417
- ----------
418
- packages : Dict[str, str], default: {}
419
- Packages to use for this step. The key is the name of the package
420
- and the value is the version to use.
421
- python : str, optional, default: None
422
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
423
- that the version used will correspond to the version of the Python interpreter used to start the run.
262
+ Internal decorator to support Fast bakery
424
263
  """
425
264
  ...
426
265
 
427
266
  @typing.overload
428
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
429
- ...
430
-
431
- @typing.overload
432
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
267
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
433
268
  ...
434
269
 
435
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
270
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
436
271
  """
437
- Specifies the PyPI packages for the step.
438
-
439
- Information in this decorator will augment any
440
- attributes set in the `@pyi_base` flow-level decorator. Hence,
441
- you can use `@pypi_base` to set packages required by all
442
- steps and use `@pypi` to specify step-specific overrides.
443
-
444
-
445
- Parameters
446
- ----------
447
- packages : Dict[str, str], default: {}
448
- Packages to use for this step. The key is the name of the package
449
- and the value is the version to use.
450
- python : str, optional, default: None
451
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
452
- that the version used will correspond to the version of the Python interpreter used to start the run.
272
+ Internal decorator to support Fast bakery
453
273
  """
454
274
  ...
455
275
 
@@ -600,106 +420,169 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
600
420
  """
601
421
  ...
602
422
 
603
- @typing.overload
604
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
423
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
605
424
  """
606
- A simple decorator that demonstrates using CardDecoratorInjector
607
- to inject a card and render simple markdown content.
425
+ S3 Proxy decorator for routing S3 requests through a local proxy service.
426
+
427
+
428
+ Parameters
429
+ ----------
430
+ integration_name : str, optional
431
+ Name of the S3 proxy integration. If not specified, will use the only
432
+ available S3 proxy integration in the namespace (fails if multiple exist).
433
+ write_mode : str, optional
434
+ The desired behavior during write operations to target (origin) S3 bucket.
435
+ allowed options are:
436
+ "origin-and-cache" -> write to both the target S3 bucket and local object
437
+ storage
438
+ "origin" -> only write to the target S3 bucket
439
+ "cache" -> only write to the object storage service used for caching
440
+ debug : bool, optional
441
+ Enable debug logging for proxy operations.
608
442
  """
609
443
  ...
610
444
 
611
- @typing.overload
612
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
613
- ...
614
-
615
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
445
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
616
446
  """
617
- A simple decorator that demonstrates using CardDecoratorInjector
618
- to inject a card and render simple markdown content.
447
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
448
+
449
+ User code call
450
+ --------------
451
+ @vllm(
452
+ model="...",
453
+ ...
454
+ )
455
+
456
+ Valid backend options
457
+ ---------------------
458
+ - 'local': Run as a separate process on the local task machine.
459
+
460
+ Valid model options
461
+ -------------------
462
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
463
+
464
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
465
+ If you need multiple models, you must create multiple @vllm decorators.
466
+
467
+
468
+ Parameters
469
+ ----------
470
+ model: str
471
+ HuggingFace model identifier to be served by vLLM.
472
+ backend: str
473
+ Determines where and how to run the vLLM process.
474
+ openai_api_server: bool
475
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
476
+ Default is False (uses native engine).
477
+ Set to True for backward compatibility with existing code.
478
+ debug: bool
479
+ Whether to turn on verbose debugging logs.
480
+ card_refresh_interval: int
481
+ Interval in seconds for refreshing the vLLM status card.
482
+ Only used when openai_api_server=True.
483
+ max_retries: int
484
+ Maximum number of retries checking for vLLM server startup.
485
+ Only used when openai_api_server=True.
486
+ retry_alert_frequency: int
487
+ Frequency of alert logs for vLLM server startup retries.
488
+ Only used when openai_api_server=True.
489
+ engine_args : dict
490
+ Additional keyword arguments to pass to the vLLM engine.
491
+ For example, `tensor_parallel_size=2`.
619
492
  """
620
493
  ...
621
494
 
622
495
  @typing.overload
623
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
496
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
624
497
  """
625
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
626
- It exists to make it easier for users to know that this decorator should only be used with
627
- a Neo Cloud like Nebius.
498
+ Specifies that the step will success under all circumstances.
499
+
500
+ The decorator will create an optional artifact, specified by `var`, which
501
+ contains the exception raised. You can use it to detect the presence
502
+ of errors, indicating that all happy-path artifacts produced by the step
503
+ are missing.
504
+
505
+
506
+ Parameters
507
+ ----------
508
+ var : str, optional, default None
509
+ Name of the artifact in which to store the caught exception.
510
+ If not specified, the exception is not stored.
511
+ print_exception : bool, default True
512
+ Determines whether or not the exception is printed to
513
+ stdout when caught.
628
514
  """
629
515
  ...
630
516
 
631
517
  @typing.overload
632
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
518
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
633
519
  ...
634
520
 
635
- def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
636
- """
637
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
638
- It exists to make it easier for users to know that this decorator should only be used with
639
- a Neo Cloud like Nebius.
640
- """
521
+ @typing.overload
522
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
641
523
  ...
642
524
 
643
- @typing.overload
644
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
525
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
645
526
  """
646
- Specifies a timeout for your step.
527
+ Specifies that the step will success under all circumstances.
647
528
 
648
- This decorator is useful if this step may hang indefinitely.
529
+ The decorator will create an optional artifact, specified by `var`, which
530
+ contains the exception raised. You can use it to detect the presence
531
+ of errors, indicating that all happy-path artifacts produced by the step
532
+ are missing.
649
533
 
650
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
651
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
652
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
653
534
 
654
- Note that all the values specified in parameters are added together so if you specify
655
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
535
+ Parameters
536
+ ----------
537
+ var : str, optional, default None
538
+ Name of the artifact in which to store the caught exception.
539
+ If not specified, the exception is not stored.
540
+ print_exception : bool, default True
541
+ Determines whether or not the exception is printed to
542
+ stdout when caught.
543
+ """
544
+ ...
545
+
546
+ @typing.overload
547
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
548
+ """
549
+ Specifies secrets to be retrieved and injected as environment variables prior to
550
+ the execution of a step.
656
551
 
657
552
 
658
553
  Parameters
659
554
  ----------
660
- seconds : int, default 0
661
- Number of seconds to wait prior to timing out.
662
- minutes : int, default 0
663
- Number of minutes to wait prior to timing out.
664
- hours : int, default 0
665
- Number of hours to wait prior to timing out.
555
+ sources : List[Union[str, Dict[str, Any]]], default: []
556
+ List of secret specs, defining how the secrets are to be retrieved
557
+ role : str, optional, default: None
558
+ Role to use for fetching secrets
666
559
  """
667
560
  ...
668
561
 
669
562
  @typing.overload
670
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
563
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
671
564
  ...
672
565
 
673
566
  @typing.overload
674
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
567
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
675
568
  ...
676
569
 
677
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
570
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
678
571
  """
679
- Specifies a timeout for your step.
680
-
681
- This decorator is useful if this step may hang indefinitely.
682
-
683
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
684
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
685
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
686
-
687
- Note that all the values specified in parameters are added together so if you specify
688
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
572
+ Specifies secrets to be retrieved and injected as environment variables prior to
573
+ the execution of a step.
689
574
 
690
575
 
691
576
  Parameters
692
577
  ----------
693
- seconds : int, default 0
694
- Number of seconds to wait prior to timing out.
695
- minutes : int, default 0
696
- Number of minutes to wait prior to timing out.
697
- hours : int, default 0
698
- Number of hours to wait prior to timing out.
578
+ sources : List[Union[str, Dict[str, Any]]], default: []
579
+ List of secret specs, defining how the secrets are to be retrieved
580
+ role : str, optional, default: None
581
+ Role to use for fetching secrets
699
582
  """
700
583
  ...
701
584
 
702
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
585
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
703
586
  """
704
587
  Specifies that this step should execute on DGX cloud.
705
588
 
@@ -710,109 +593,82 @@ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[t
710
593
  Number of GPUs to use.
711
594
  gpu_type : str
712
595
  Type of Nvidia GPU to use.
713
- queue_timeout : int
714
- Time to keep the job in NVCF's queue.
715
596
  """
716
597
  ...
717
598
 
718
- @typing.overload
719
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
599
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
720
600
  """
721
- Specifies the number of times the task corresponding
722
- to a step needs to be retried.
601
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
723
602
 
724
- This decorator is useful for handling transient errors, such as networking issues.
725
- If your task contains operations that can't be retried safely, e.g. database updates,
726
- it is advisable to annotate it with `@retry(times=0)`.
603
+ User code call
604
+ --------------
605
+ @ollama(
606
+ models=[...],
607
+ ...
608
+ )
727
609
 
728
- This can be used in conjunction with the `@catch` decorator. The `@catch`
729
- decorator will execute a no-op task after all retries have been exhausted,
730
- ensuring that the flow execution can continue.
610
+ Valid backend options
611
+ ---------------------
612
+ - 'local': Run as a separate process on the local task machine.
613
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
614
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
615
+
616
+ Valid model options
617
+ -------------------
618
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
731
619
 
732
620
 
733
621
  Parameters
734
622
  ----------
735
- times : int, default 3
736
- Number of times to retry this task.
737
- minutes_between_retries : int, default 2
738
- Number of minutes between retries.
623
+ models: list[str]
624
+ List of Ollama containers running models in sidecars.
625
+ backend: str
626
+ Determines where and how to run the Ollama process.
627
+ force_pull: bool
628
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
629
+ cache_update_policy: str
630
+ Cache update policy: "auto", "force", or "never".
631
+ force_cache_update: bool
632
+ Simple override for "force" cache update policy.
633
+ debug: bool
634
+ Whether to turn on verbose debugging logs.
635
+ circuit_breaker_config: dict
636
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
637
+ timeout_config: dict
638
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
739
639
  """
740
640
  ...
741
641
 
742
642
  @typing.overload
743
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
744
- ...
745
-
746
- @typing.overload
747
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
748
- ...
749
-
750
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
643
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
751
644
  """
752
- Specifies the number of times the task corresponding
753
- to a step needs to be retried.
754
-
755
- This decorator is useful for handling transient errors, such as networking issues.
756
- If your task contains operations that can't be retried safely, e.g. database updates,
757
- it is advisable to annotate it with `@retry(times=0)`.
645
+ Creates a human-readable report, a Metaflow Card, after this step completes.
758
646
 
759
- This can be used in conjunction with the `@catch` decorator. The `@catch`
760
- decorator will execute a no-op task after all retries have been exhausted,
761
- ensuring that the flow execution can continue.
647
+ Note that you may add multiple `@card` decorators in a step with different parameters.
762
648
 
763
649
 
764
650
  Parameters
765
651
  ----------
766
- times : int, default 3
767
- Number of times to retry this task.
768
- minutes_between_retries : int, default 2
769
- Number of minutes between retries.
652
+ type : str, default 'default'
653
+ Card type.
654
+ id : str, optional, default None
655
+ If multiple cards are present, use this id to identify this card.
656
+ options : Dict[str, Any], default {}
657
+ Options passed to the card. The contents depend on the card type.
658
+ timeout : int, default 45
659
+ Interrupt reporting if it takes more than this many seconds.
770
660
  """
771
661
  ...
772
662
 
773
663
  @typing.overload
774
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
775
- """
776
- Decorator prototype for all step decorators. This function gets specialized
777
- and imported for all decorators types by _import_plugin_decorators().
778
- """
664
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
779
665
  ...
780
666
 
781
667
  @typing.overload
782
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
783
- ...
784
-
785
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
786
- """
787
- Decorator prototype for all step decorators. This function gets specialized
788
- and imported for all decorators types by _import_plugin_decorators().
789
- """
790
- ...
791
-
792
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
793
- """
794
- S3 Proxy decorator for routing S3 requests through a local proxy service.
795
-
796
-
797
- Parameters
798
- ----------
799
- integration_name : str, optional
800
- Name of the S3 proxy integration. If not specified, will use the only
801
- available S3 proxy integration in the namespace (fails if multiple exist).
802
- write_mode : str, optional
803
- The desired behavior during write operations to target (origin) S3 bucket.
804
- allowed options are:
805
- "origin-and-cache" -> write to both the target S3 bucket and local object
806
- storage
807
- "origin" -> only write to the target S3 bucket
808
- "cache" -> only write to the object storage service used for caching
809
- debug : bool, optional
810
- Enable debug logging for proxy operations.
811
- """
668
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
812
669
  ...
813
670
 
814
- @typing.overload
815
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
671
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
816
672
  """
817
673
  Creates a human-readable report, a Metaflow Card, after this step completes.
818
674
 
@@ -833,160 +689,155 @@ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typ
833
689
  ...
834
690
 
835
691
  @typing.overload
836
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
837
- ...
838
-
839
- @typing.overload
840
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
841
- ...
842
-
843
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
692
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
844
693
  """
845
- Creates a human-readable report, a Metaflow Card, after this step completes.
694
+ Specifies the resources needed when executing this step.
846
695
 
847
- Note that you may add multiple `@card` decorators in a step with different parameters.
696
+ Use `@resources` to specify the resource requirements
697
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
698
+
699
+ You can choose the compute layer on the command line by executing e.g.
700
+ ```
701
+ python myflow.py run --with batch
702
+ ```
703
+ or
704
+ ```
705
+ python myflow.py run --with kubernetes
706
+ ```
707
+ which executes the flow on the desired system using the
708
+ requirements specified in `@resources`.
848
709
 
849
710
 
850
711
  Parameters
851
712
  ----------
852
- type : str, default 'default'
853
- Card type.
854
- id : str, optional, default None
855
- If multiple cards are present, use this id to identify this card.
856
- options : Dict[str, Any], default {}
857
- Options passed to the card. The contents depend on the card type.
858
- timeout : int, default 45
859
- Interrupt reporting if it takes more than this many seconds.
713
+ cpu : int, default 1
714
+ Number of CPUs required for this step.
715
+ gpu : int, optional, default None
716
+ Number of GPUs required for this step.
717
+ disk : int, optional, default None
718
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
719
+ memory : int, default 4096
720
+ Memory size (in MB) required for this step.
721
+ shared_memory : int, optional, default None
722
+ The value for the size (in MiB) of the /dev/shm volume for this step.
723
+ This parameter maps to the `--shm-size` option in Docker.
860
724
  """
861
725
  ...
862
726
 
863
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
727
+ @typing.overload
728
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
729
+ ...
730
+
731
+ @typing.overload
732
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
733
+ ...
734
+
735
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
864
736
  """
865
- Decorator that helps cache, version and store models/datasets from huggingface hub.
866
-
867
- > Examples
737
+ Specifies the resources needed when executing this step.
868
738
 
869
- **Usage: creating references of models from huggingface that may be loaded in downstream steps**
870
- ```python
871
- @huggingface_hub
872
- @step
873
- def pull_model_from_huggingface(self):
874
- # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
875
- # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
876
- # with the same `repo_id` in the backend storage, it will not download the model again. The return
877
- # value of the function is a reference to the model in the backend storage.
878
- # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
739
+ Use `@resources` to specify the resource requirements
740
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
879
741
 
880
- self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
881
- self.llama_model = current.huggingface_hub.snapshot_download(
882
- repo_id=self.model_id,
883
- allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
884
- )
885
- self.next(self.train)
742
+ You can choose the compute layer on the command line by executing e.g.
886
743
  ```
887
-
888
- **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
889
- ```python
890
- @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
891
- @step
892
- def pull_model_from_huggingface(self):
893
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
744
+ python myflow.py run --with batch
894
745
  ```
895
-
896
- ```python
897
- @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
898
- @step
899
- def finetune_model(self):
900
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
901
- # path_to_model will be /my-directory
746
+ or
902
747
  ```
903
-
904
- ```python
905
- # Takes all the arguments passed to `snapshot_download`
906
- # except for `local_dir`
907
- @huggingface_hub(load=[
908
- {
909
- "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
910
- },
911
- {
912
- "repo_id": "myorg/mistral-lora",
913
- "repo_type": "model",
914
- },
915
- ])
916
- @step
917
- def finetune_model(self):
918
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
919
- # path_to_model will be /my-directory
748
+ python myflow.py run --with kubernetes
920
749
  ```
750
+ which executes the flow on the desired system using the
751
+ requirements specified in `@resources`.
921
752
 
922
753
 
923
754
  Parameters
924
755
  ----------
925
- temp_dir_root : str, optional
926
- The root directory that will hold the temporary directory where objects will be downloaded.
756
+ cpu : int, default 1
757
+ Number of CPUs required for this step.
758
+ gpu : int, optional, default None
759
+ Number of GPUs required for this step.
760
+ disk : int, optional, default None
761
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
762
+ memory : int, default 4096
763
+ Memory size (in MB) required for this step.
764
+ shared_memory : int, optional, default None
765
+ The value for the size (in MiB) of the /dev/shm volume for this step.
766
+ This parameter maps to the `--shm-size` option in Docker.
767
+ """
768
+ ...
769
+
770
+ @typing.overload
771
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
772
+ """
773
+ A simple decorator that demonstrates using CardDecoratorInjector
774
+ to inject a card and render simple markdown content.
775
+ """
776
+ ...
777
+
778
+ @typing.overload
779
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
780
+ ...
781
+
782
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
783
+ """
784
+ A simple decorator that demonstrates using CardDecoratorInjector
785
+ to inject a card and render simple markdown content.
786
+ """
787
+ ...
788
+
789
+ @typing.overload
790
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
791
+ """
792
+ Specifies the number of times the task corresponding
793
+ to a step needs to be retried.
927
794
 
928
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
929
- The list of repos (models/datasets) to load.
795
+ This decorator is useful for handling transient errors, such as networking issues.
796
+ If your task contains operations that can't be retried safely, e.g. database updates,
797
+ it is advisable to annotate it with `@retry(times=0)`.
930
798
 
931
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
799
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
800
+ decorator will execute a no-op task after all retries have been exhausted,
801
+ ensuring that the flow execution can continue.
932
802
 
933
- - If repo (model/dataset) is not found in the datastore:
934
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
935
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
936
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
937
803
 
938
- - If repo is found in the datastore:
939
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
804
+ Parameters
805
+ ----------
806
+ times : int, default 3
807
+ Number of times to retry this task.
808
+ minutes_between_retries : int, default 2
809
+ Number of minutes between retries.
940
810
  """
941
811
  ...
942
812
 
943
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
813
+ @typing.overload
814
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
815
+ ...
816
+
817
+ @typing.overload
818
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
819
+ ...
820
+
821
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
944
822
  """
945
- This decorator is used to run vllm APIs as Metaflow task sidecars.
946
-
947
- User code call
948
- --------------
949
- @vllm(
950
- model="...",
951
- ...
952
- )
953
-
954
- Valid backend options
955
- ---------------------
956
- - 'local': Run as a separate process on the local task machine.
823
+ Specifies the number of times the task corresponding
824
+ to a step needs to be retried.
957
825
 
958
- Valid model options
959
- -------------------
960
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
826
+ This decorator is useful for handling transient errors, such as networking issues.
827
+ If your task contains operations that can't be retried safely, e.g. database updates,
828
+ it is advisable to annotate it with `@retry(times=0)`.
961
829
 
962
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
963
- If you need multiple models, you must create multiple @vllm decorators.
830
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
831
+ decorator will execute a no-op task after all retries have been exhausted,
832
+ ensuring that the flow execution can continue.
964
833
 
965
834
 
966
835
  Parameters
967
836
  ----------
968
- model: str
969
- HuggingFace model identifier to be served by vLLM.
970
- backend: str
971
- Determines where and how to run the vLLM process.
972
- openai_api_server: bool
973
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
974
- Default is False (uses native engine).
975
- Set to True for backward compatibility with existing code.
976
- debug: bool
977
- Whether to turn on verbose debugging logs.
978
- card_refresh_interval: int
979
- Interval in seconds for refreshing the vLLM status card.
980
- Only used when openai_api_server=True.
981
- max_retries: int
982
- Maximum number of retries checking for vLLM server startup.
983
- Only used when openai_api_server=True.
984
- retry_alert_frequency: int
985
- Frequency of alert logs for vLLM server startup retries.
986
- Only used when openai_api_server=True.
987
- engine_args : dict
988
- Additional keyword arguments to pass to the vLLM engine.
989
- For example, `tensor_parallel_size=2`.
837
+ times : int, default 3
838
+ Number of times to retry this task.
839
+ minutes_between_retries : int, default 2
840
+ Number of minutes between retries.
990
841
  """
991
842
  ...
992
843
 
@@ -1080,207 +931,399 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1080
931
  ...
1081
932
 
1082
933
  @typing.overload
1083
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
934
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1084
935
  """
1085
- Decorator prototype for all step decorators. This function gets specialized
1086
- and imported for all decorators types by _import_plugin_decorators().
936
+ Specifies the Conda environment for the step.
937
+
938
+ Information in this decorator will augment any
939
+ attributes set in the `@conda_base` flow-level decorator. Hence,
940
+ you can use `@conda_base` to set packages required by all
941
+ steps and use `@conda` to specify step-specific overrides.
942
+
943
+
944
+ Parameters
945
+ ----------
946
+ packages : Dict[str, str], default {}
947
+ Packages to use for this step. The key is the name of the package
948
+ and the value is the version to use.
949
+ libraries : Dict[str, str], default {}
950
+ Supported for backward compatibility. When used with packages, packages will take precedence.
951
+ python : str, optional, default None
952
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
953
+ that the version used will correspond to the version of the Python interpreter used to start the run.
954
+ disabled : bool, default False
955
+ If set to True, disables @conda.
1087
956
  """
1088
957
  ...
1089
958
 
1090
959
  @typing.overload
1091
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
960
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1092
961
  ...
1093
962
 
1094
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1095
- """
1096
- Decorator prototype for all step decorators. This function gets specialized
1097
- and imported for all decorators types by _import_plugin_decorators().
1098
- """
963
+ @typing.overload
964
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1099
965
  ...
1100
966
 
1101
- @typing.overload
1102
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
967
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1103
968
  """
1104
- Specifies secrets to be retrieved and injected as environment variables prior to
1105
- the execution of a step.
969
+ Specifies the Conda environment for the step.
970
+
971
+ Information in this decorator will augment any
972
+ attributes set in the `@conda_base` flow-level decorator. Hence,
973
+ you can use `@conda_base` to set packages required by all
974
+ steps and use `@conda` to specify step-specific overrides.
1106
975
 
1107
976
 
1108
977
  Parameters
1109
978
  ----------
1110
- sources : List[Union[str, Dict[str, Any]]], default: []
1111
- List of secret specs, defining how the secrets are to be retrieved
1112
- role : str, optional, default: None
1113
- Role to use for fetching secrets
979
+ packages : Dict[str, str], default {}
980
+ Packages to use for this step. The key is the name of the package
981
+ and the value is the version to use.
982
+ libraries : Dict[str, str], default {}
983
+ Supported for backward compatibility. When used with packages, packages will take precedence.
984
+ python : str, optional, default None
985
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
986
+ that the version used will correspond to the version of the Python interpreter used to start the run.
987
+ disabled : bool, default False
988
+ If set to True, disables @conda.
1114
989
  """
1115
990
  ...
1116
991
 
1117
992
  @typing.overload
1118
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
993
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
994
+ """
995
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
996
+ It exists to make it easier for users to know that this decorator should only be used with
997
+ a Neo Cloud like Nebius.
998
+ """
1119
999
  ...
1120
1000
 
1121
1001
  @typing.overload
1122
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1002
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1123
1003
  ...
1124
1004
 
1125
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
1005
+ def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1126
1006
  """
1127
- Specifies secrets to be retrieved and injected as environment variables prior to
1128
- the execution of a step.
1007
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1008
+ It exists to make it easier for users to know that this decorator should only be used with
1009
+ a Neo Cloud like Nebius.
1010
+ """
1011
+ ...
1012
+
1013
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1014
+ """
1015
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
1016
+
1017
+ > Examples
1018
+
1019
+ **Usage: creating references of models from huggingface that may be loaded in downstream steps**
1020
+ ```python
1021
+ @huggingface_hub
1022
+ @step
1023
+ def pull_model_from_huggingface(self):
1024
+ # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
1025
+ # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
1026
+ # with the same `repo_id` in the backend storage, it will not download the model again. The return
1027
+ # value of the function is a reference to the model in the backend storage.
1028
+ # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
1029
+
1030
+ self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
1031
+ self.llama_model = current.huggingface_hub.snapshot_download(
1032
+ repo_id=self.model_id,
1033
+ allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
1034
+ )
1035
+ self.next(self.train)
1036
+ ```
1037
+
1038
+ **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
1039
+ ```python
1040
+ @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
1041
+ @step
1042
+ def pull_model_from_huggingface(self):
1043
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
1044
+ ```
1045
+
1046
+ ```python
1047
+ @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
1048
+ @step
1049
+ def finetune_model(self):
1050
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
1051
+ # path_to_model will be /my-directory
1052
+ ```
1053
+
1054
+ ```python
1055
+ # Takes all the arguments passed to `snapshot_download`
1056
+ # except for `local_dir`
1057
+ @huggingface_hub(load=[
1058
+ {
1059
+ "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
1060
+ },
1061
+ {
1062
+ "repo_id": "myorg/mistral-lora",
1063
+ "repo_type": "model",
1064
+ },
1065
+ ])
1066
+ @step
1067
+ def finetune_model(self):
1068
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
1069
+ # path_to_model will be /my-directory
1070
+ ```
1129
1071
 
1130
1072
 
1131
1073
  Parameters
1132
1074
  ----------
1133
- sources : List[Union[str, Dict[str, Any]]], default: []
1134
- List of secret specs, defining how the secrets are to be retrieved
1135
- role : str, optional, default: None
1136
- Role to use for fetching secrets
1075
+ temp_dir_root : str, optional
1076
+ The root directory that will hold the temporary directory where objects will be downloaded.
1077
+
1078
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
1079
+ The list of repos (models/datasets) to load.
1080
+
1081
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
1082
+
1083
+ - If repo (model/dataset) is not found in the datastore:
1084
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
1085
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
1086
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
1087
+
1088
+ - If repo is found in the datastore:
1089
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
1137
1090
  """
1138
1091
  ...
1139
1092
 
1140
- @typing.overload
1141
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1093
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1142
1094
  """
1143
- Internal decorator to support Fast bakery
1095
+ Specifies that this step should execute on DGX cloud.
1096
+
1097
+
1098
+ Parameters
1099
+ ----------
1100
+ gpu : int
1101
+ Number of GPUs to use.
1102
+ gpu_type : str
1103
+ Type of Nvidia GPU to use.
1104
+ queue_timeout : int
1105
+ Time to keep the job in NVCF's queue.
1144
1106
  """
1145
1107
  ...
1146
1108
 
1147
1109
  @typing.overload
1148
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1149
- ...
1150
-
1151
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1110
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1152
1111
  """
1153
- Internal decorator to support Fast bakery
1112
+ Enables loading / saving of models within a step.
1113
+
1114
+ > Examples
1115
+ - Saving Models
1116
+ ```python
1117
+ @model
1118
+ @step
1119
+ def train(self):
1120
+ # current.model.save returns a dictionary reference to the model saved
1121
+ self.my_model = current.model.save(
1122
+ path_to_my_model,
1123
+ label="my_model",
1124
+ metadata={
1125
+ "epochs": 10,
1126
+ "batch-size": 32,
1127
+ "learning-rate": 0.001,
1128
+ }
1129
+ )
1130
+ self.next(self.test)
1131
+
1132
+ @model(load="my_model")
1133
+ @step
1134
+ def test(self):
1135
+ # `current.model.loaded` returns a dictionary of the loaded models
1136
+ # where the key is the name of the artifact and the value is the path to the model
1137
+ print(os.listdir(current.model.loaded["my_model"]))
1138
+ self.next(self.end)
1139
+ ```
1140
+
1141
+ - Loading models
1142
+ ```python
1143
+ @step
1144
+ def train(self):
1145
+ # current.model.load returns the path to the model loaded
1146
+ checkpoint_path = current.model.load(
1147
+ self.checkpoint_key,
1148
+ )
1149
+ model_path = current.model.load(
1150
+ self.model,
1151
+ )
1152
+ self.next(self.test)
1153
+ ```
1154
+
1155
+
1156
+ Parameters
1157
+ ----------
1158
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1159
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1160
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1161
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1162
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1163
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1164
+
1165
+ temp_dir_root : str, default: None
1166
+ The root directory under which `current.model.loaded` will store loaded models
1154
1167
  """
1155
1168
  ...
1156
1169
 
1157
1170
  @typing.overload
1158
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1171
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1172
+ ...
1173
+
1174
+ @typing.overload
1175
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1176
+ ...
1177
+
1178
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1159
1179
  """
1160
- Specifies the resources needed when executing this step.
1180
+ Enables loading / saving of models within a step.
1161
1181
 
1162
- Use `@resources` to specify the resource requirements
1163
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1182
+ > Examples
1183
+ - Saving Models
1184
+ ```python
1185
+ @model
1186
+ @step
1187
+ def train(self):
1188
+ # current.model.save returns a dictionary reference to the model saved
1189
+ self.my_model = current.model.save(
1190
+ path_to_my_model,
1191
+ label="my_model",
1192
+ metadata={
1193
+ "epochs": 10,
1194
+ "batch-size": 32,
1195
+ "learning-rate": 0.001,
1196
+ }
1197
+ )
1198
+ self.next(self.test)
1164
1199
 
1165
- You can choose the compute layer on the command line by executing e.g.
1166
- ```
1167
- python myflow.py run --with batch
1168
- ```
1169
- or
1200
+ @model(load="my_model")
1201
+ @step
1202
+ def test(self):
1203
+ # `current.model.loaded` returns a dictionary of the loaded models
1204
+ # where the key is the name of the artifact and the value is the path to the model
1205
+ print(os.listdir(current.model.loaded["my_model"]))
1206
+ self.next(self.end)
1170
1207
  ```
1171
- python myflow.py run --with kubernetes
1208
+
1209
+ - Loading models
1210
+ ```python
1211
+ @step
1212
+ def train(self):
1213
+ # current.model.load returns the path to the model loaded
1214
+ checkpoint_path = current.model.load(
1215
+ self.checkpoint_key,
1216
+ )
1217
+ model_path = current.model.load(
1218
+ self.model,
1219
+ )
1220
+ self.next(self.test)
1172
1221
  ```
1173
- which executes the flow on the desired system using the
1174
- requirements specified in `@resources`.
1175
1222
 
1176
1223
 
1177
1224
  Parameters
1178
1225
  ----------
1179
- cpu : int, default 1
1180
- Number of CPUs required for this step.
1181
- gpu : int, optional, default None
1182
- Number of GPUs required for this step.
1183
- disk : int, optional, default None
1184
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1185
- memory : int, default 4096
1186
- Memory size (in MB) required for this step.
1187
- shared_memory : int, optional, default None
1188
- The value for the size (in MiB) of the /dev/shm volume for this step.
1189
- This parameter maps to the `--shm-size` option in Docker.
1226
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1227
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1228
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1229
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1230
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1231
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1232
+
1233
+ temp_dir_root : str, default: None
1234
+ The root directory under which `current.model.loaded` will store loaded models
1190
1235
  """
1191
1236
  ...
1192
1237
 
1193
1238
  @typing.overload
1194
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1239
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1240
+ """
1241
+ Specifies environment variables to be set prior to the execution of a step.
1242
+
1243
+
1244
+ Parameters
1245
+ ----------
1246
+ vars : Dict[str, str], default {}
1247
+ Dictionary of environment variables to set.
1248
+ """
1195
1249
  ...
1196
1250
 
1197
1251
  @typing.overload
1198
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1252
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1199
1253
  ...
1200
1254
 
1201
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1255
+ @typing.overload
1256
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1257
+ ...
1258
+
1259
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1202
1260
  """
1203
- Specifies the resources needed when executing this step.
1204
-
1205
- Use `@resources` to specify the resource requirements
1206
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1207
-
1208
- You can choose the compute layer on the command line by executing e.g.
1209
- ```
1210
- python myflow.py run --with batch
1211
- ```
1212
- or
1213
- ```
1214
- python myflow.py run --with kubernetes
1215
- ```
1216
- which executes the flow on the desired system using the
1217
- requirements specified in `@resources`.
1261
+ Specifies environment variables to be set prior to the execution of a step.
1218
1262
 
1219
1263
 
1220
1264
  Parameters
1221
1265
  ----------
1222
- cpu : int, default 1
1223
- Number of CPUs required for this step.
1224
- gpu : int, optional, default None
1225
- Number of GPUs required for this step.
1226
- disk : int, optional, default None
1227
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1228
- memory : int, default 4096
1229
- Memory size (in MB) required for this step.
1230
- shared_memory : int, optional, default None
1231
- The value for the size (in MiB) of the /dev/shm volume for this step.
1232
- This parameter maps to the `--shm-size` option in Docker.
1266
+ vars : Dict[str, str], default {}
1267
+ Dictionary of environment variables to set.
1233
1268
  """
1234
1269
  ...
1235
1270
 
1236
1271
  @typing.overload
1237
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1272
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1238
1273
  """
1239
- Specifies that the step will success under all circumstances.
1274
+ Specifies a timeout for your step.
1240
1275
 
1241
- The decorator will create an optional artifact, specified by `var`, which
1242
- contains the exception raised. You can use it to detect the presence
1243
- of errors, indicating that all happy-path artifacts produced by the step
1244
- are missing.
1276
+ This decorator is useful if this step may hang indefinitely.
1277
+
1278
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1279
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1280
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1281
+
1282
+ Note that all the values specified in parameters are added together so if you specify
1283
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1245
1284
 
1246
1285
 
1247
1286
  Parameters
1248
1287
  ----------
1249
- var : str, optional, default None
1250
- Name of the artifact in which to store the caught exception.
1251
- If not specified, the exception is not stored.
1252
- print_exception : bool, default True
1253
- Determines whether or not the exception is printed to
1254
- stdout when caught.
1288
+ seconds : int, default 0
1289
+ Number of seconds to wait prior to timing out.
1290
+ minutes : int, default 0
1291
+ Number of minutes to wait prior to timing out.
1292
+ hours : int, default 0
1293
+ Number of hours to wait prior to timing out.
1255
1294
  """
1256
1295
  ...
1257
1296
 
1258
1297
  @typing.overload
1259
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1298
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1260
1299
  ...
1261
1300
 
1262
1301
  @typing.overload
1263
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1302
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1264
1303
  ...
1265
1304
 
1266
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1305
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1267
1306
  """
1268
- Specifies that the step will success under all circumstances.
1307
+ Specifies a timeout for your step.
1269
1308
 
1270
- The decorator will create an optional artifact, specified by `var`, which
1271
- contains the exception raised. You can use it to detect the presence
1272
- of errors, indicating that all happy-path artifacts produced by the step
1273
- are missing.
1309
+ This decorator is useful if this step may hang indefinitely.
1310
+
1311
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1312
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1313
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1314
+
1315
+ Note that all the values specified in parameters are added together so if you specify
1316
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1274
1317
 
1275
1318
 
1276
1319
  Parameters
1277
1320
  ----------
1278
- var : str, optional, default None
1279
- Name of the artifact in which to store the caught exception.
1280
- If not specified, the exception is not stored.
1281
- print_exception : bool, default True
1282
- Determines whether or not the exception is printed to
1283
- stdout when caught.
1321
+ seconds : int, default 0
1322
+ Number of seconds to wait prior to timing out.
1323
+ minutes : int, default 0
1324
+ Number of minutes to wait prior to timing out.
1325
+ hours : int, default 0
1326
+ Number of hours to wait prior to timing out.
1284
1327
  """
1285
1328
  ...
1286
1329
 
@@ -1305,89 +1348,89 @@ def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFla
1305
1348
  """
1306
1349
  ...
1307
1350
 
1308
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1351
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1309
1352
  """
1310
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
1353
+ Specifies what flows belong to the same project.
1311
1354
 
1312
- User code call
1313
- --------------
1314
- @ollama(
1315
- models=[...],
1316
- ...
1317
- )
1355
+ A project-specific namespace is created for all flows that
1356
+ use the same `@project(name)`.
1318
1357
 
1319
- Valid backend options
1320
- ---------------------
1321
- - 'local': Run as a separate process on the local task machine.
1322
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
1323
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
1324
1358
 
1325
- Valid model options
1326
- -------------------
1327
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
1359
+ Parameters
1360
+ ----------
1361
+ name : str
1362
+ Project name. Make sure that the name is unique amongst all
1363
+ projects that use the same production scheduler. The name may
1364
+ contain only lowercase alphanumeric characters and underscores.
1365
+
1366
+ branch : Optional[str], default None
1367
+ The branch to use. If not specified, the branch is set to
1368
+ `user.<username>` unless `production` is set to `True`. This can
1369
+ also be set on the command line using `--branch` as a top-level option.
1370
+ It is an error to specify `branch` in the decorator and on the command line.
1371
+
1372
+ production : bool, default False
1373
+ Whether or not the branch is the production branch. This can also be set on the
1374
+ command line using `--production` as a top-level option. It is an error to specify
1375
+ `production` in the decorator and on the command line.
1376
+ The project branch name will be:
1377
+ - if `branch` is specified:
1378
+ - if `production` is True: `prod.<branch>`
1379
+ - if `production` is False: `test.<branch>`
1380
+ - if `branch` is not specified:
1381
+ - if `production` is True: `prod`
1382
+ - if `production` is False: `user.<username>`
1383
+ """
1384
+ ...
1385
+
1386
+ @typing.overload
1387
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1388
+ """
1389
+ Specifies the times when the flow should be run when running on a
1390
+ production scheduler.
1328
1391
 
1329
1392
 
1330
1393
  Parameters
1331
1394
  ----------
1332
- models: list[str]
1333
- List of Ollama containers running models in sidecars.
1334
- backend: str
1335
- Determines where and how to run the Ollama process.
1336
- force_pull: bool
1337
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1338
- cache_update_policy: str
1339
- Cache update policy: "auto", "force", or "never".
1340
- force_cache_update: bool
1341
- Simple override for "force" cache update policy.
1342
- debug: bool
1343
- Whether to turn on verbose debugging logs.
1344
- circuit_breaker_config: dict
1345
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1346
- timeout_config: dict
1347
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1395
+ hourly : bool, default False
1396
+ Run the workflow hourly.
1397
+ daily : bool, default True
1398
+ Run the workflow daily.
1399
+ weekly : bool, default False
1400
+ Run the workflow weekly.
1401
+ cron : str, optional, default None
1402
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1403
+ specified by this expression.
1404
+ timezone : str, optional, default None
1405
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1406
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1348
1407
  """
1349
1408
  ...
1350
1409
 
1351
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1410
+ @typing.overload
1411
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1412
+ ...
1413
+
1414
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1352
1415
  """
1353
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1354
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1355
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1356
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1357
- starts only after all sensors finish.
1416
+ Specifies the times when the flow should be run when running on a
1417
+ production scheduler.
1358
1418
 
1359
1419
 
1360
1420
  Parameters
1361
1421
  ----------
1362
- timeout : int
1363
- Time, in seconds before the task times out and fails. (Default: 3600)
1364
- poke_interval : int
1365
- Time in seconds that the job should wait in between each try. (Default: 60)
1366
- mode : str
1367
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1368
- exponential_backoff : bool
1369
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1370
- pool : str
1371
- the slot pool this task should run in,
1372
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1373
- soft_fail : bool
1374
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1375
- name : str
1376
- Name of the sensor on Airflow
1377
- description : str
1378
- Description of sensor in the Airflow UI
1379
- bucket_key : Union[str, List[str]]
1380
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1381
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1382
- bucket_name : str
1383
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1384
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1385
- wildcard_match : bool
1386
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1387
- aws_conn_id : str
1388
- a reference to the s3 connection on Airflow. (Default: None)
1389
- verify : bool
1390
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1422
+ hourly : bool, default False
1423
+ Run the workflow hourly.
1424
+ daily : bool, default True
1425
+ Run the workflow daily.
1426
+ weekly : bool, default False
1427
+ Run the workflow weekly.
1428
+ cron : str, optional, default None
1429
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1430
+ specified by this expression.
1431
+ timezone : str, optional, default None
1432
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1433
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1391
1434
  """
1392
1435
  ...
1393
1436
 
@@ -1434,117 +1477,147 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1434
1477
  """
1435
1478
  ...
1436
1479
 
1437
- def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1480
+ @typing.overload
1481
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1438
1482
  """
1439
- Allows setting external datastores to save data for the
1440
- `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1441
-
1442
- This decorator is useful when users wish to save data to a different datastore
1443
- than what is configured in Metaflow. This can be for variety of reasons:
1444
-
1445
- 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1446
- 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1447
- - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1448
- 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1449
- - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1483
+ Specifies the event(s) that this flow depends on.
1450
1484
 
1451
- Usage:
1452
- ----------
1485
+ ```
1486
+ @trigger(event='foo')
1487
+ ```
1488
+ or
1489
+ ```
1490
+ @trigger(events=['foo', 'bar'])
1491
+ ```
1453
1492
 
1454
- - Using a custom IAM role to access the datastore.
1493
+ Additionally, you can specify the parameter mappings
1494
+ to map event payload to Metaflow parameters for the flow.
1495
+ ```
1496
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1497
+ ```
1498
+ or
1499
+ ```
1500
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1501
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1502
+ ```
1455
1503
 
1456
- ```python
1457
- @with_artifact_store(
1458
- type="s3",
1459
- config=lambda: {
1460
- "root": "s3://my-bucket-foo/path/to/root",
1461
- "role_arn": ROLE,
1462
- },
1463
- )
1464
- class MyFlow(FlowSpec):
1504
+ 'parameters' can also be a list of strings and tuples like so:
1505
+ ```
1506
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1507
+ ```
1508
+ This is equivalent to:
1509
+ ```
1510
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1511
+ ```
1465
1512
 
1466
- @checkpoint
1467
- @step
1468
- def start(self):
1469
- with open("my_file.txt", "w") as f:
1470
- f.write("Hello, World!")
1471
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1472
- self.next(self.end)
1473
1513
 
1474
- ```
1514
+ Parameters
1515
+ ----------
1516
+ event : Union[str, Dict[str, Any]], optional, default None
1517
+ Event dependency for this flow.
1518
+ events : List[Union[str, Dict[str, Any]]], default []
1519
+ Events dependency for this flow.
1520
+ options : Dict[str, Any], default {}
1521
+ Backend-specific configuration for tuning eventing behavior.
1522
+ """
1523
+ ...
1524
+
1525
+ @typing.overload
1526
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1527
+ ...
1528
+
1529
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1530
+ """
1531
+ Specifies the event(s) that this flow depends on.
1475
1532
 
1476
- - Using credentials to access the s3-compatible datastore.
1533
+ ```
1534
+ @trigger(event='foo')
1535
+ ```
1536
+ or
1537
+ ```
1538
+ @trigger(events=['foo', 'bar'])
1539
+ ```
1477
1540
 
1478
- ```python
1479
- @with_artifact_store(
1480
- type="s3",
1481
- config=lambda: {
1482
- "root": "s3://my-bucket-foo/path/to/root",
1483
- "client_params": {
1484
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1485
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1486
- },
1487
- },
1488
- )
1489
- class MyFlow(FlowSpec):
1541
+ Additionally, you can specify the parameter mappings
1542
+ to map event payload to Metaflow parameters for the flow.
1543
+ ```
1544
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1545
+ ```
1546
+ or
1547
+ ```
1548
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1549
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1550
+ ```
1490
1551
 
1491
- @checkpoint
1492
- @step
1493
- def start(self):
1494
- with open("my_file.txt", "w") as f:
1495
- f.write("Hello, World!")
1496
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1497
- self.next(self.end)
1552
+ 'parameters' can also be a list of strings and tuples like so:
1553
+ ```
1554
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1555
+ ```
1556
+ This is equivalent to:
1557
+ ```
1558
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1559
+ ```
1498
1560
 
1499
- ```
1500
1561
 
1501
- - Accessing objects stored in external datastores after task execution.
1562
+ Parameters
1563
+ ----------
1564
+ event : Union[str, Dict[str, Any]], optional, default None
1565
+ Event dependency for this flow.
1566
+ events : List[Union[str, Dict[str, Any]]], default []
1567
+ Events dependency for this flow.
1568
+ options : Dict[str, Any], default {}
1569
+ Backend-specific configuration for tuning eventing behavior.
1570
+ """
1571
+ ...
1572
+
1573
+ @typing.overload
1574
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1575
+ """
1576
+ Specifies the Conda environment for all steps of the flow.
1502
1577
 
1503
- ```python
1504
- run = Run("CheckpointsTestsFlow/8992")
1505
- with artifact_store_from(run=run, config={
1506
- "client_params": {
1507
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1508
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1509
- },
1510
- }):
1511
- with Checkpoint() as cp:
1512
- latest = cp.list(
1513
- task=run["start"].task
1514
- )[0]
1515
- print(latest)
1516
- cp.load(
1517
- latest,
1518
- "test-checkpoints"
1519
- )
1578
+ Use `@conda_base` to set common libraries required by all
1579
+ steps and use `@conda` to specify step-specific additions.
1520
1580
 
1521
- task = Task("TorchTuneFlow/8484/train/53673")
1522
- with artifact_store_from(run=run, config={
1523
- "client_params": {
1524
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1525
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1526
- },
1527
- }):
1528
- load_model(
1529
- task.data.model_ref,
1530
- "test-models"
1531
- )
1532
- ```
1533
- Parameters:
1581
+
1582
+ Parameters
1534
1583
  ----------
1584
+ packages : Dict[str, str], default {}
1585
+ Packages to use for this flow. The key is the name of the package
1586
+ and the value is the version to use.
1587
+ libraries : Dict[str, str], default {}
1588
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1589
+ python : str, optional, default None
1590
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1591
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1592
+ disabled : bool, default False
1593
+ If set to True, disables Conda.
1594
+ """
1595
+ ...
1596
+
1597
+ @typing.overload
1598
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1599
+ ...
1600
+
1601
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1602
+ """
1603
+ Specifies the Conda environment for all steps of the flow.
1535
1604
 
1536
- type: str
1537
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1605
+ Use `@conda_base` to set common libraries required by all
1606
+ steps and use `@conda` to specify step-specific additions.
1538
1607
 
1539
- config: dict or Callable
1540
- Dictionary of configuration options for the datastore. The following keys are required:
1541
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1542
- - example: 's3://bucket-name/path/to/root'
1543
- - example: 'gs://bucket-name/path/to/root'
1544
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1545
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1546
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1547
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1608
+
1609
+ Parameters
1610
+ ----------
1611
+ packages : Dict[str, str], default {}
1612
+ Packages to use for this flow. The key is the name of the package
1613
+ and the value is the version to use.
1614
+ libraries : Dict[str, str], default {}
1615
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1616
+ python : str, optional, default None
1617
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1618
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1619
+ disabled : bool, default False
1620
+ If set to True, disables Conda.
1548
1621
  """
1549
1622
  ...
1550
1623
 
@@ -1589,89 +1662,117 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1589
1662
  """
1590
1663
  ...
1591
1664
 
1592
- @typing.overload
1593
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1665
+ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1594
1666
  """
1595
- Specifies the times when the flow should be run when running on a
1596
- production scheduler.
1667
+ Allows setting external datastores to save data for the
1668
+ `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1597
1669
 
1670
+ This decorator is useful when users wish to save data to a different datastore
1671
+ than what is configured in Metaflow. This can be for variety of reasons:
1598
1672
 
1599
- Parameters
1673
+ 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1674
+ 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1675
+ - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1676
+ 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1677
+ - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1678
+
1679
+ Usage:
1600
1680
  ----------
1601
- hourly : bool, default False
1602
- Run the workflow hourly.
1603
- daily : bool, default True
1604
- Run the workflow daily.
1605
- weekly : bool, default False
1606
- Run the workflow weekly.
1607
- cron : str, optional, default None
1608
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1609
- specified by this expression.
1610
- timezone : str, optional, default None
1611
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1612
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1613
- """
1614
- ...
1615
-
1616
- @typing.overload
1617
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1618
- ...
1619
-
1620
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1621
- """
1622
- Specifies the times when the flow should be run when running on a
1623
- production scheduler.
1624
1681
 
1682
+ - Using a custom IAM role to access the datastore.
1625
1683
 
1626
- Parameters
1627
- ----------
1628
- hourly : bool, default False
1629
- Run the workflow hourly.
1630
- daily : bool, default True
1631
- Run the workflow daily.
1632
- weekly : bool, default False
1633
- Run the workflow weekly.
1634
- cron : str, optional, default None
1635
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1636
- specified by this expression.
1637
- timezone : str, optional, default None
1638
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1639
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1640
- """
1641
- ...
1642
-
1643
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1644
- """
1645
- Specifies what flows belong to the same project.
1684
+ ```python
1685
+ @with_artifact_store(
1686
+ type="s3",
1687
+ config=lambda: {
1688
+ "root": "s3://my-bucket-foo/path/to/root",
1689
+ "role_arn": ROLE,
1690
+ },
1691
+ )
1692
+ class MyFlow(FlowSpec):
1693
+
1694
+ @checkpoint
1695
+ @step
1696
+ def start(self):
1697
+ with open("my_file.txt", "w") as f:
1698
+ f.write("Hello, World!")
1699
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1700
+ self.next(self.end)
1701
+
1702
+ ```
1703
+
1704
+ - Using credentials to access the s3-compatible datastore.
1705
+
1706
+ ```python
1707
+ @with_artifact_store(
1708
+ type="s3",
1709
+ config=lambda: {
1710
+ "root": "s3://my-bucket-foo/path/to/root",
1711
+ "client_params": {
1712
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1713
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1714
+ },
1715
+ },
1716
+ )
1717
+ class MyFlow(FlowSpec):
1646
1718
 
1647
- A project-specific namespace is created for all flows that
1648
- use the same `@project(name)`.
1719
+ @checkpoint
1720
+ @step
1721
+ def start(self):
1722
+ with open("my_file.txt", "w") as f:
1723
+ f.write("Hello, World!")
1724
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1725
+ self.next(self.end)
1726
+
1727
+ ```
1649
1728
 
1729
+ - Accessing objects stored in external datastores after task execution.
1650
1730
 
1651
- Parameters
1731
+ ```python
1732
+ run = Run("CheckpointsTestsFlow/8992")
1733
+ with artifact_store_from(run=run, config={
1734
+ "client_params": {
1735
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1736
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1737
+ },
1738
+ }):
1739
+ with Checkpoint() as cp:
1740
+ latest = cp.list(
1741
+ task=run["start"].task
1742
+ )[0]
1743
+ print(latest)
1744
+ cp.load(
1745
+ latest,
1746
+ "test-checkpoints"
1747
+ )
1748
+
1749
+ task = Task("TorchTuneFlow/8484/train/53673")
1750
+ with artifact_store_from(run=run, config={
1751
+ "client_params": {
1752
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1753
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1754
+ },
1755
+ }):
1756
+ load_model(
1757
+ task.data.model_ref,
1758
+ "test-models"
1759
+ )
1760
+ ```
1761
+ Parameters:
1652
1762
  ----------
1653
- name : str
1654
- Project name. Make sure that the name is unique amongst all
1655
- projects that use the same production scheduler. The name may
1656
- contain only lowercase alphanumeric characters and underscores.
1657
1763
 
1658
- branch : Optional[str], default None
1659
- The branch to use. If not specified, the branch is set to
1660
- `user.<username>` unless `production` is set to `True`. This can
1661
- also be set on the command line using `--branch` as a top-level option.
1662
- It is an error to specify `branch` in the decorator and on the command line.
1764
+ type: str
1765
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1663
1766
 
1664
- production : bool, default False
1665
- Whether or not the branch is the production branch. This can also be set on the
1666
- command line using `--production` as a top-level option. It is an error to specify
1667
- `production` in the decorator and on the command line.
1668
- The project branch name will be:
1669
- - if `branch` is specified:
1670
- - if `production` is True: `prod.<branch>`
1671
- - if `production` is False: `test.<branch>`
1672
- - if `branch` is not specified:
1673
- - if `production` is True: `prod`
1674
- - if `production` is False: `user.<username>`
1767
+ config: dict or Callable
1768
+ Dictionary of configuration options for the datastore. The following keys are required:
1769
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1770
+ - example: 's3://bucket-name/path/to/root'
1771
+ - example: 'gs://bucket-name/path/to/root'
1772
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1773
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1774
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1775
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1675
1776
  """
1676
1777
  ...
1677
1778
 
@@ -1776,147 +1877,46 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1776
1877
  """
1777
1878
  ...
1778
1879
 
1779
- @typing.overload
1780
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1781
- """
1782
- Specifies the Conda environment for all steps of the flow.
1783
-
1784
- Use `@conda_base` to set common libraries required by all
1785
- steps and use `@conda` to specify step-specific additions.
1786
-
1787
-
1788
- Parameters
1789
- ----------
1790
- packages : Dict[str, str], default {}
1791
- Packages to use for this flow. The key is the name of the package
1792
- and the value is the version to use.
1793
- libraries : Dict[str, str], default {}
1794
- Supported for backward compatibility. When used with packages, packages will take precedence.
1795
- python : str, optional, default None
1796
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1797
- that the version used will correspond to the version of the Python interpreter used to start the run.
1798
- disabled : bool, default False
1799
- If set to True, disables Conda.
1800
- """
1801
- ...
1802
-
1803
- @typing.overload
1804
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1805
- ...
1806
-
1807
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1808
- """
1809
- Specifies the Conda environment for all steps of the flow.
1810
-
1811
- Use `@conda_base` to set common libraries required by all
1812
- steps and use `@conda` to specify step-specific additions.
1813
-
1814
-
1815
- Parameters
1816
- ----------
1817
- packages : Dict[str, str], default {}
1818
- Packages to use for this flow. The key is the name of the package
1819
- and the value is the version to use.
1820
- libraries : Dict[str, str], default {}
1821
- Supported for backward compatibility. When used with packages, packages will take precedence.
1822
- python : str, optional, default None
1823
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1824
- that the version used will correspond to the version of the Python interpreter used to start the run.
1825
- disabled : bool, default False
1826
- If set to True, disables Conda.
1827
- """
1828
- ...
1829
-
1830
- @typing.overload
1831
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1832
- """
1833
- Specifies the event(s) that this flow depends on.
1834
-
1835
- ```
1836
- @trigger(event='foo')
1837
- ```
1838
- or
1839
- ```
1840
- @trigger(events=['foo', 'bar'])
1841
- ```
1842
-
1843
- Additionally, you can specify the parameter mappings
1844
- to map event payload to Metaflow parameters for the flow.
1845
- ```
1846
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1847
- ```
1848
- or
1849
- ```
1850
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1851
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1852
- ```
1853
-
1854
- 'parameters' can also be a list of strings and tuples like so:
1855
- ```
1856
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1857
- ```
1858
- This is equivalent to:
1859
- ```
1860
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1861
- ```
1862
-
1863
-
1864
- Parameters
1865
- ----------
1866
- event : Union[str, Dict[str, Any]], optional, default None
1867
- Event dependency for this flow.
1868
- events : List[Union[str, Dict[str, Any]]], default []
1869
- Events dependency for this flow.
1870
- options : Dict[str, Any], default {}
1871
- Backend-specific configuration for tuning eventing behavior.
1872
- """
1873
- ...
1874
-
1875
- @typing.overload
1876
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1877
- ...
1878
-
1879
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1880
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1880
1881
  """
1881
- Specifies the event(s) that this flow depends on.
1882
-
1883
- ```
1884
- @trigger(event='foo')
1885
- ```
1886
- or
1887
- ```
1888
- @trigger(events=['foo', 'bar'])
1889
- ```
1890
-
1891
- Additionally, you can specify the parameter mappings
1892
- to map event payload to Metaflow parameters for the flow.
1893
- ```
1894
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1895
- ```
1896
- or
1897
- ```
1898
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1899
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1900
- ```
1901
-
1902
- 'parameters' can also be a list of strings and tuples like so:
1903
- ```
1904
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1905
- ```
1906
- This is equivalent to:
1907
- ```
1908
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1909
- ```
1882
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1883
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1884
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1885
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1886
+ starts only after all sensors finish.
1910
1887
 
1911
1888
 
1912
1889
  Parameters
1913
1890
  ----------
1914
- event : Union[str, Dict[str, Any]], optional, default None
1915
- Event dependency for this flow.
1916
- events : List[Union[str, Dict[str, Any]]], default []
1917
- Events dependency for this flow.
1918
- options : Dict[str, Any], default {}
1919
- Backend-specific configuration for tuning eventing behavior.
1891
+ timeout : int
1892
+ Time, in seconds before the task times out and fails. (Default: 3600)
1893
+ poke_interval : int
1894
+ Time in seconds that the job should wait in between each try. (Default: 60)
1895
+ mode : str
1896
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1897
+ exponential_backoff : bool
1898
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1899
+ pool : str
1900
+ the slot pool this task should run in,
1901
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1902
+ soft_fail : bool
1903
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1904
+ name : str
1905
+ Name of the sensor on Airflow
1906
+ description : str
1907
+ Description of sensor in the Airflow UI
1908
+ bucket_key : Union[str, List[str]]
1909
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1910
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1911
+ bucket_name : str
1912
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1913
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1914
+ wildcard_match : bool
1915
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1916
+ aws_conn_id : str
1917
+ a reference to the s3 connection on Airflow. (Default: None)
1918
+ verify : bool
1919
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1920
1920
  """
1921
1921
  ...
1922
1922