ob-metaflow-stubs 6.0.10.18__py2.py3-none-any.whl → 6.0.10.20__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-stubs might be problematic. Click here for more details.

Files changed (266) hide show
  1. metaflow-stubs/__init__.pyi +975 -974
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +3 -2
  7. metaflow-stubs/client/core.pyi +39 -7
  8. metaflow-stubs/client/filecache.pyi +20 -4
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +16 -2
  20. metaflow-stubs/metaflow_current.pyi +59 -59
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +5 -5
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +4 -4
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +3 -3
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +3 -3
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/cards/__init__.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/cards/hf_hub_card.pyi +3 -3
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +5 -5
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  64. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +3 -3
  65. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +3 -3
  66. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +3 -3
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +4 -4
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +3 -3
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +3 -3
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +3 -3
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +5 -5
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +3 -3
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +3 -3
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +3 -3
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  110. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  111. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  112. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  113. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  114. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +3 -3
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  116. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  117. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
  118. metaflow-stubs/multicore_utils.pyi +2 -2
  119. metaflow-stubs/ob_internal.pyi +2 -2
  120. metaflow-stubs/packaging_sys/__init__.pyi +7 -7
  121. metaflow-stubs/packaging_sys/backend.pyi +2 -2
  122. metaflow-stubs/packaging_sys/distribution_support.pyi +3 -3
  123. metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
  124. metaflow-stubs/packaging_sys/utils.pyi +2 -5
  125. metaflow-stubs/packaging_sys/v1.pyi +4 -4
  126. metaflow-stubs/parameters.pyi +3 -3
  127. metaflow-stubs/plugins/__init__.pyi +13 -13
  128. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  129. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  130. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  131. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  132. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  133. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  134. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  135. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  136. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  137. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  138. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  139. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  140. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  141. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  142. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  143. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  144. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  145. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  146. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  147. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  148. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  149. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  150. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  151. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  152. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  153. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  154. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  155. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  157. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  158. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
  159. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  160. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  161. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  162. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  163. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  164. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  165. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  166. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  167. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  168. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  169. metaflow-stubs/plugins/cards/card_datastore.pyi +4 -2
  170. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  171. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  172. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  173. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  174. metaflow-stubs/plugins/cards/card_modules/components.pyi +5 -5
  175. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  176. metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +3 -3
  177. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  178. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  179. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  180. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  181. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  182. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  183. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  184. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  185. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  186. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -4
  187. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  188. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  189. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  190. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  191. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  192. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  193. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  194. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  195. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  196. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  197. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  198. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  199. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  200. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  201. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  202. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  203. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  204. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  205. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  206. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  207. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  208. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  209. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  210. metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
  211. metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
  212. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  213. metaflow-stubs/plugins/parsers.pyi +2 -2
  214. metaflow-stubs/plugins/perimeters.pyi +2 -2
  215. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  216. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  217. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  218. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  219. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  220. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  221. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  222. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  223. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  224. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  225. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  226. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  227. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  228. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  229. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  230. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  231. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  232. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  233. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  234. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  235. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  236. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  237. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  238. metaflow-stubs/profilers/__init__.pyi +2 -2
  239. metaflow-stubs/pylint_wrapper.pyi +2 -2
  240. metaflow-stubs/runner/__init__.pyi +2 -2
  241. metaflow-stubs/runner/deployer.pyi +34 -34
  242. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  243. metaflow-stubs/runner/metaflow_runner.pyi +131 -18
  244. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  245. metaflow-stubs/runner/nbrun.pyi +2 -2
  246. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  247. metaflow-stubs/runner/utils.pyi +2 -2
  248. metaflow-stubs/system/__init__.pyi +2 -2
  249. metaflow-stubs/system/system_logger.pyi +2 -2
  250. metaflow-stubs/system/system_monitor.pyi +2 -2
  251. metaflow-stubs/tagging_util.pyi +2 -2
  252. metaflow-stubs/tuple_util.pyi +2 -2
  253. metaflow-stubs/user_configs/__init__.pyi +2 -2
  254. metaflow-stubs/user_configs/config_options.pyi +3 -3
  255. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  256. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  257. metaflow-stubs/user_decorators/common.pyi +2 -2
  258. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  259. metaflow-stubs/user_decorators/mutable_step.pyi +4 -4
  260. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  261. metaflow-stubs/user_decorators/user_step_decorator.pyi +15 -6
  262. {ob_metaflow_stubs-6.0.10.18.dist-info → ob_metaflow_stubs-6.0.10.20.dist-info}/METADATA +1 -1
  263. ob_metaflow_stubs-6.0.10.20.dist-info/RECORD +266 -0
  264. ob_metaflow_stubs-6.0.10.18.dist-info/RECORD +0 -266
  265. {ob_metaflow_stubs-6.0.10.18.dist-info → ob_metaflow_stubs-6.0.10.20.dist-info}/WHEEL +0 -0
  266. {ob_metaflow_stubs-6.0.10.18.dist-info → ob_metaflow_stubs-6.0.10.20.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.12.1+obcheckpoint(0.2.8);ob(v1) #
4
- # Generated on 2025-10-20T19:13:33.388213 #
3
+ # MF version: 2.19.3.1+obcheckpoint(0.2.8);ob(v1) #
4
+ # Generated on 2025-10-28T11:23:52.528703 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -39,19 +39,19 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import cards as cards
43
42
  from . import metaflow_git as metaflow_git
44
- from . import tuple_util as tuple_util
43
+ from . import cards as cards
45
44
  from . import events as events
45
+ from . import tuple_util as tuple_util
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
+ from .plugins.parsers import yaml_parser as yaml_parser
52
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
51
53
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
54
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
54
- from .plugins.parsers import yaml_parser as yaml_parser
55
55
  from . import client as client
56
56
  from .client.core import namespace as namespace
57
57
  from .client.core import get_namespace as get_namespace
@@ -59,6 +59,7 @@ from .client.core import default_namespace as default_namespace
59
59
  from .client.core import metadata as metadata
60
60
  from .client.core import get_metadata as get_metadata
61
61
  from .client.core import default_metadata as default_metadata
62
+ from .client.core import inspect_spin as inspect_spin
62
63
  from .client.core import Metaflow as Metaflow
63
64
  from .client.core import Flow as Flow
64
65
  from .client.core import Run as Run
@@ -85,8 +86,8 @@ from .mf_extensions.outerbounds.plugins.checkpoint_datastores.nebius import nebi
85
86
  from .mf_extensions.outerbounds.plugins.checkpoint_datastores.coreweave import coreweave_checkpoints as coreweave_checkpoints
86
87
  from .mf_extensions.outerbounds.plugins.aws.assume_role_decorator import assume_role as assume_role
87
88
  from .mf_extensions.outerbounds.plugins.apps.core.deployer import AppDeployer as AppDeployer
88
- from . import system as system
89
89
  from . import cli_components as cli_components
90
+ from . import system as system
90
91
  from . import pylint_wrapper as pylint_wrapper
91
92
  from . import cli as cli
92
93
  from . import profilers as profilers
@@ -169,70 +170,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
169
170
  """
170
171
  ...
171
172
 
172
- def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
173
- """
174
- `@nebius_s3_proxy` is a Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
175
- It exists to make it easier for users to know that this decorator should only be used with
176
- a Neo Cloud like Nebius. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
177
-
178
-
179
- Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
180
- for S3 read and write requests.
181
-
182
- This decorator requires an integration in the Outerbounds platform that
183
- points to an external bucket. It affects S3 operations performed via
184
- Metaflow's `get_aws_client` and `S3` within a `@step`.
185
-
186
- Read operations
187
- ---------------
188
- All read operations pass through the proxy. If an object does not already
189
- exist in the external bucket, it is cached there. For example, if code reads
190
- from buckets `FOO` and `BAR` using the `S3` interface, objects from both
191
- buckets are cached in the external bucket.
192
-
193
- During task execution, all S3‑related read requests are routed through the
194
- proxy:
195
- - If the object is present in the external object store, the proxy
196
- streams it directly from there without accessing the requested origin
197
- bucket.
198
- - If the object is not present in the external storage, the proxy
199
- fetches it from the requested bucket, caches it in the external
200
- storage, and streams the response from the origin bucket.
201
-
202
- Warning
203
- -------
204
- All READ operations (e.g., GetObject, HeadObject) pass through the external
205
- bucket regardless of the bucket specified in user code. Even
206
- `S3(run=self)` and `S3(s3root="mybucketfoo")` requests go through the
207
- external bucket cache.
208
-
209
- Write operations
210
- ----------------
211
- Write behavior is controlled by the `write_mode` parameter, which determines
212
- whether writes also persist objects in the cache.
213
-
214
- `write_mode` values:
215
- - `origin-and-cache`: objects are written both to the cache and to their
216
- intended origin bucket.
217
- - `origin`: objects are written only to their intended origin bucket.
218
-
219
-
220
- Parameters
221
- ----------
222
- integration_name : str, optional
223
- [Outerbounds integration name](https://docs.outerbounds.com/outerbounds/configuring-secrets/#integrations-view)
224
- that holds the configuration for the external, S3‑compatible object
225
- storage bucket. If not specified, the only available S3 proxy
226
- integration in the namespace is used (fails if multiple exist).
227
- write_mode : str, optional
228
- Controls whether writes also go to the external bucket.
229
- - `origin` (default)
230
- - `origin-and-cache`
231
- debug : bool, optional
232
- Enables debug logging for proxy operations.
233
- """
234
- ...
235
-
236
173
  @typing.overload
237
174
  def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
238
175
  """
@@ -292,296 +229,245 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
292
229
  """
293
230
  ...
294
231
 
295
- @typing.overload
296
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
232
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
297
233
  """
298
- Enables checkpointing for a step.
234
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
299
235
 
300
- > Examples
236
+ User code call
237
+ --------------
238
+ @vllm(
239
+ model="...",
240
+ ...
241
+ )
301
242
 
302
- - Saving Checkpoints
243
+ Valid backend options
244
+ ---------------------
245
+ - 'local': Run as a separate process on the local task machine.
303
246
 
304
- ```python
305
- @checkpoint
306
- @step
307
- def train(self):
308
- model = create_model(self.parameters, checkpoint_path = None)
309
- for i in range(self.epochs):
310
- # some training logic
311
- loss = model.train(self.dataset)
312
- if i % 10 == 0:
313
- model.save(
314
- current.checkpoint.directory,
315
- )
316
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
317
- # and returns a reference dictionary to the checkpoint saved in the datastore
318
- self.latest_checkpoint = current.checkpoint.save(
319
- name="epoch_checkpoint",
320
- metadata={
321
- "epoch": i,
322
- "loss": loss,
323
- }
324
- )
325
- ```
247
+ Valid model options
248
+ -------------------
249
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
326
250
 
327
- - Using Loaded Checkpoints
251
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
252
+ If you need multiple models, you must create multiple @vllm decorators.
328
253
 
329
- ```python
330
- @retry(times=3)
331
- @checkpoint
332
- @step
333
- def train(self):
334
- # Assume that the task has restarted and the previous attempt of the task
335
- # saved a checkpoint
336
- checkpoint_path = None
337
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
338
- print("Loaded checkpoint from the previous attempt")
339
- checkpoint_path = current.checkpoint.directory
340
254
 
341
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
342
- for i in range(self.epochs):
343
- ...
344
- ```
255
+ Parameters
256
+ ----------
257
+ model: str
258
+ HuggingFace model identifier to be served by vLLM.
259
+ backend: str
260
+ Determines where and how to run the vLLM process.
261
+ openai_api_server: bool
262
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
263
+ Default is False (uses native engine).
264
+ Set to True for backward compatibility with existing code.
265
+ debug: bool
266
+ Whether to turn on verbose debugging logs.
267
+ card_refresh_interval: int
268
+ Interval in seconds for refreshing the vLLM status card.
269
+ Only used when openai_api_server=True.
270
+ max_retries: int
271
+ Maximum number of retries checking for vLLM server startup.
272
+ Only used when openai_api_server=True.
273
+ retry_alert_frequency: int
274
+ Frequency of alert logs for vLLM server startup retries.
275
+ Only used when openai_api_server=True.
276
+ engine_args : dict
277
+ Additional keyword arguments to pass to the vLLM engine.
278
+ For example, `tensor_parallel_size=2`.
279
+ """
280
+ ...
281
+
282
+ @typing.overload
283
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
284
+ """
285
+ Specifies the PyPI packages for the step.
286
+
287
+ Information in this decorator will augment any
288
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
289
+ you can use `@pypi_base` to set packages required by all
290
+ steps and use `@pypi` to specify step-specific overrides.
345
291
 
346
292
 
347
293
  Parameters
348
294
  ----------
349
- load_policy : str, default: "fresh"
350
- The policy for loading the checkpoint. The following policies are supported:
351
- - "eager": Loads the the latest available checkpoint within the namespace.
352
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
353
- will be loaded at the start of the task.
354
- - "none": Do not load any checkpoint
355
- - "fresh": Loads the lastest checkpoint created within the running Task.
356
- This mode helps loading checkpoints across various retry attempts of the same task.
357
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
358
- created within the task will be loaded when the task is retries execution on failure.
359
-
360
- temp_dir_root : str, default: None
361
- The root directory under which `current.checkpoint.directory` will be created.
295
+ packages : Dict[str, str], default: {}
296
+ Packages to use for this step. The key is the name of the package
297
+ and the value is the version to use.
298
+ python : str, optional, default: None
299
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
300
+ that the version used will correspond to the version of the Python interpreter used to start the run.
362
301
  """
363
302
  ...
364
303
 
365
304
  @typing.overload
366
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
305
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
367
306
  ...
368
307
 
369
308
  @typing.overload
370
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
309
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
371
310
  ...
372
311
 
373
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
312
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
374
313
  """
375
- Enables checkpointing for a step.
376
-
377
- > Examples
378
-
379
- - Saving Checkpoints
380
-
381
- ```python
382
- @checkpoint
383
- @step
384
- def train(self):
385
- model = create_model(self.parameters, checkpoint_path = None)
386
- for i in range(self.epochs):
387
- # some training logic
388
- loss = model.train(self.dataset)
389
- if i % 10 == 0:
390
- model.save(
391
- current.checkpoint.directory,
392
- )
393
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
394
- # and returns a reference dictionary to the checkpoint saved in the datastore
395
- self.latest_checkpoint = current.checkpoint.save(
396
- name="epoch_checkpoint",
397
- metadata={
398
- "epoch": i,
399
- "loss": loss,
400
- }
401
- )
402
- ```
403
-
404
- - Using Loaded Checkpoints
405
-
406
- ```python
407
- @retry(times=3)
408
- @checkpoint
409
- @step
410
- def train(self):
411
- # Assume that the task has restarted and the previous attempt of the task
412
- # saved a checkpoint
413
- checkpoint_path = None
414
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
415
- print("Loaded checkpoint from the previous attempt")
416
- checkpoint_path = current.checkpoint.directory
314
+ Specifies the PyPI packages for the step.
417
315
 
418
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
419
- for i in range(self.epochs):
420
- ...
421
- ```
316
+ Information in this decorator will augment any
317
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
318
+ you can use `@pypi_base` to set packages required by all
319
+ steps and use `@pypi` to specify step-specific overrides.
422
320
 
423
321
 
424
322
  Parameters
425
323
  ----------
426
- load_policy : str, default: "fresh"
427
- The policy for loading the checkpoint. The following policies are supported:
428
- - "eager": Loads the the latest available checkpoint within the namespace.
429
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
430
- will be loaded at the start of the task.
431
- - "none": Do not load any checkpoint
432
- - "fresh": Loads the lastest checkpoint created within the running Task.
433
- This mode helps loading checkpoints across various retry attempts of the same task.
434
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
435
- created within the task will be loaded when the task is retries execution on failure.
436
-
437
- temp_dir_root : str, default: None
438
- The root directory under which `current.checkpoint.directory` will be created.
324
+ packages : Dict[str, str], default: {}
325
+ Packages to use for this step. The key is the name of the package
326
+ and the value is the version to use.
327
+ python : str, optional, default: None
328
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
329
+ that the version used will correspond to the version of the Python interpreter used to start the run.
439
330
  """
440
331
  ...
441
332
 
442
333
  @typing.overload
443
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
334
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
444
335
  """
445
- Creates a human-readable report, a Metaflow Card, after this step completes.
446
-
447
- Note that you may add multiple `@card` decorators in a step with different parameters.
448
-
449
-
450
- Parameters
451
- ----------
452
- type : str, default 'default'
453
- Card type.
454
- id : str, optional, default None
455
- If multiple cards are present, use this id to identify this card.
456
- options : Dict[str, Any], default {}
457
- Options passed to the card. The contents depend on the card type.
458
- timeout : int, default 45
459
- Interrupt reporting if it takes more than this many seconds.
336
+ Decorator prototype for all step decorators. This function gets specialized
337
+ and imported for all decorators types by _import_plugin_decorators().
460
338
  """
461
339
  ...
462
340
 
463
341
  @typing.overload
464
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
342
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
465
343
  ...
466
344
 
467
- @typing.overload
468
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
345
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
346
+ """
347
+ Decorator prototype for all step decorators. This function gets specialized
348
+ and imported for all decorators types by _import_plugin_decorators().
349
+ """
469
350
  ...
470
351
 
471
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
352
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
472
353
  """
473
- Creates a human-readable report, a Metaflow Card, after this step completes.
354
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
474
355
 
475
- Note that you may add multiple `@card` decorators in a step with different parameters.
356
+ User code call
357
+ --------------
358
+ @ollama(
359
+ models=[...],
360
+ ...
361
+ )
362
+
363
+ Valid backend options
364
+ ---------------------
365
+ - 'local': Run as a separate process on the local task machine.
366
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
367
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
368
+
369
+ Valid model options
370
+ -------------------
371
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
476
372
 
477
373
 
478
374
  Parameters
479
375
  ----------
480
- type : str, default 'default'
481
- Card type.
482
- id : str, optional, default None
483
- If multiple cards are present, use this id to identify this card.
484
- options : Dict[str, Any], default {}
485
- Options passed to the card. The contents depend on the card type.
486
- timeout : int, default 45
487
- Interrupt reporting if it takes more than this many seconds.
488
- """
489
- ...
490
-
491
- @typing.overload
492
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
493
- """
494
- Internal decorator to support Fast bakery
495
- """
496
- ...
497
-
498
- @typing.overload
499
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
500
- ...
501
-
502
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
503
- """
504
- Internal decorator to support Fast bakery
376
+ models: list[str]
377
+ List of Ollama containers running models in sidecars.
378
+ backend: str
379
+ Determines where and how to run the Ollama process.
380
+ force_pull: bool
381
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
382
+ cache_update_policy: str
383
+ Cache update policy: "auto", "force", or "never".
384
+ force_cache_update: bool
385
+ Simple override for "force" cache update policy.
386
+ debug: bool
387
+ Whether to turn on verbose debugging logs.
388
+ circuit_breaker_config: dict
389
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
390
+ timeout_config: dict
391
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
505
392
  """
506
393
  ...
507
394
 
508
395
  @typing.overload
509
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
396
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
510
397
  """
511
- Decorator prototype for all step decorators. This function gets specialized
512
- and imported for all decorators types by _import_plugin_decorators().
398
+ A simple decorator that demonstrates using CardDecoratorInjector
399
+ to inject a card and render simple markdown content.
513
400
  """
514
401
  ...
515
402
 
516
403
  @typing.overload
517
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
404
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
518
405
  ...
519
406
 
520
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
407
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
521
408
  """
522
- Decorator prototype for all step decorators. This function gets specialized
523
- and imported for all decorators types by _import_plugin_decorators().
409
+ A simple decorator that demonstrates using CardDecoratorInjector
410
+ to inject a card and render simple markdown content.
524
411
  """
525
412
  ...
526
413
 
527
414
  @typing.overload
528
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
415
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
529
416
  """
530
- Specifies the PyPI packages for the step.
417
+ Specifies the number of times the task corresponding
418
+ to a step needs to be retried.
531
419
 
532
- Information in this decorator will augment any
533
- attributes set in the `@pyi_base` flow-level decorator. Hence,
534
- you can use `@pypi_base` to set packages required by all
535
- steps and use `@pypi` to specify step-specific overrides.
420
+ This decorator is useful for handling transient errors, such as networking issues.
421
+ If your task contains operations that can't be retried safely, e.g. database updates,
422
+ it is advisable to annotate it with `@retry(times=0)`.
423
+
424
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
425
+ decorator will execute a no-op task after all retries have been exhausted,
426
+ ensuring that the flow execution can continue.
536
427
 
537
428
 
538
429
  Parameters
539
430
  ----------
540
- packages : Dict[str, str], default: {}
541
- Packages to use for this step. The key is the name of the package
542
- and the value is the version to use.
543
- python : str, optional, default: None
544
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
545
- that the version used will correspond to the version of the Python interpreter used to start the run.
431
+ times : int, default 3
432
+ Number of times to retry this task.
433
+ minutes_between_retries : int, default 2
434
+ Number of minutes between retries.
546
435
  """
547
436
  ...
548
437
 
549
438
  @typing.overload
550
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
439
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
551
440
  ...
552
441
 
553
442
  @typing.overload
554
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
443
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
555
444
  ...
556
445
 
557
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
446
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
558
447
  """
559
- Specifies the PyPI packages for the step.
448
+ Specifies the number of times the task corresponding
449
+ to a step needs to be retried.
560
450
 
561
- Information in this decorator will augment any
562
- attributes set in the `@pyi_base` flow-level decorator. Hence,
563
- you can use `@pypi_base` to set packages required by all
564
- steps and use `@pypi` to specify step-specific overrides.
451
+ This decorator is useful for handling transient errors, such as networking issues.
452
+ If your task contains operations that can't be retried safely, e.g. database updates,
453
+ it is advisable to annotate it with `@retry(times=0)`.
454
+
455
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
456
+ decorator will execute a no-op task after all retries have been exhausted,
457
+ ensuring that the flow execution can continue.
565
458
 
566
459
 
567
460
  Parameters
568
461
  ----------
569
- packages : Dict[str, str], default: {}
570
- Packages to use for this step. The key is the name of the package
571
- and the value is the version to use.
572
- python : str, optional, default: None
573
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
574
- that the version used will correspond to the version of the Python interpreter used to start the run.
462
+ times : int, default 3
463
+ Number of times to retry this task.
464
+ minutes_between_retries : int, default 2
465
+ Number of minutes between retries.
575
466
  """
576
467
  ...
577
468
 
578
- def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
469
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
579
470
  """
580
- `@coreweave_s3_proxy` is a CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
581
- It exists to make it easier for users to know that this decorator should only be used with
582
- a Neo Cloud like CoreWeave. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
583
-
584
-
585
471
  Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
586
472
  for S3 read and write requests.
587
473
 
@@ -640,21 +526,78 @@ def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_m
640
526
  ...
641
527
 
642
528
  @typing.overload
643
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
529
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
644
530
  """
645
- A simple decorator that demonstrates using CardDecoratorInjector
646
- to inject a card and render simple markdown content.
531
+ Internal decorator to support Fast bakery
647
532
  """
648
533
  ...
649
534
 
650
535
  @typing.overload
651
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
536
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
652
537
  ...
653
538
 
654
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
539
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
655
540
  """
656
- A simple decorator that demonstrates using CardDecoratorInjector
657
- to inject a card and render simple markdown content.
541
+ Internal decorator to support Fast bakery
542
+ """
543
+ ...
544
+
545
+ @typing.overload
546
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
547
+ """
548
+ Specifies a timeout for your step.
549
+
550
+ This decorator is useful if this step may hang indefinitely.
551
+
552
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
553
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
554
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
555
+
556
+ Note that all the values specified in parameters are added together so if you specify
557
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
558
+
559
+
560
+ Parameters
561
+ ----------
562
+ seconds : int, default 0
563
+ Number of seconds to wait prior to timing out.
564
+ minutes : int, default 0
565
+ Number of minutes to wait prior to timing out.
566
+ hours : int, default 0
567
+ Number of hours to wait prior to timing out.
568
+ """
569
+ ...
570
+
571
+ @typing.overload
572
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
573
+ ...
574
+
575
+ @typing.overload
576
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
577
+ ...
578
+
579
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
580
+ """
581
+ Specifies a timeout for your step.
582
+
583
+ This decorator is useful if this step may hang indefinitely.
584
+
585
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
586
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
587
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
588
+
589
+ Note that all the values specified in parameters are added together so if you specify
590
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
591
+
592
+
593
+ Parameters
594
+ ----------
595
+ seconds : int, default 0
596
+ Number of seconds to wait prior to timing out.
597
+ minutes : int, default 0
598
+ Number of minutes to wait prior to timing out.
599
+ hours : int, default 0
600
+ Number of hours to wait prior to timing out.
658
601
  """
659
602
  ...
660
603
 
@@ -710,57 +653,41 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
710
653
  ...
711
654
 
712
655
  @typing.overload
713
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
656
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
714
657
  """
715
- Specifies the number of times the task corresponding
716
- to a step needs to be retried.
717
-
718
- This decorator is useful for handling transient errors, such as networking issues.
719
- If your task contains operations that can't be retried safely, e.g. database updates,
720
- it is advisable to annotate it with `@retry(times=0)`.
721
-
722
- This can be used in conjunction with the `@catch` decorator. The `@catch`
723
- decorator will execute a no-op task after all retries have been exhausted,
724
- ensuring that the flow execution can continue.
658
+ Specifies secrets to be retrieved and injected as environment variables prior to
659
+ the execution of a step.
725
660
 
726
661
 
727
662
  Parameters
728
663
  ----------
729
- times : int, default 3
730
- Number of times to retry this task.
731
- minutes_between_retries : int, default 2
732
- Number of minutes between retries.
664
+ sources : List[Union[str, Dict[str, Any]]], default: []
665
+ List of secret specs, defining how the secrets are to be retrieved
666
+ role : str, optional, default: None
667
+ Role to use for fetching secrets
733
668
  """
734
669
  ...
735
670
 
736
671
  @typing.overload
737
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
672
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
738
673
  ...
739
674
 
740
675
  @typing.overload
741
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
676
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
742
677
  ...
743
678
 
744
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
679
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
745
680
  """
746
- Specifies the number of times the task corresponding
747
- to a step needs to be retried.
748
-
749
- This decorator is useful for handling transient errors, such as networking issues.
750
- If your task contains operations that can't be retried safely, e.g. database updates,
751
- it is advisable to annotate it with `@retry(times=0)`.
752
-
753
- This can be used in conjunction with the `@catch` decorator. The `@catch`
754
- decorator will execute a no-op task after all retries have been exhausted,
755
- ensuring that the flow execution can continue.
681
+ Specifies secrets to be retrieved and injected as environment variables prior to
682
+ the execution of a step.
756
683
 
757
684
 
758
685
  Parameters
759
686
  ----------
760
- times : int, default 3
761
- Number of times to retry this task.
762
- minutes_between_retries : int, default 2
763
- Number of minutes between retries.
687
+ sources : List[Union[str, Dict[str, Any]]], default: []
688
+ List of secret specs, defining how the secrets are to be retrieved
689
+ role : str, optional, default: None
690
+ Role to use for fetching secrets
764
691
  """
765
692
  ...
766
693
 
@@ -893,6 +820,221 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
893
820
  """
894
821
  ...
895
822
 
823
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
824
+ """
825
+ Specifies that this step should execute on DGX cloud.
826
+
827
+
828
+ Parameters
829
+ ----------
830
+ gpu : int
831
+ Number of GPUs to use.
832
+ gpu_type : str
833
+ Type of Nvidia GPU to use.
834
+ queue_timeout : int
835
+ Time to keep the job in NVCF's queue.
836
+ """
837
+ ...
838
+
839
+ @typing.overload
840
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
841
+ """
842
+ Specifies environment variables to be set prior to the execution of a step.
843
+
844
+
845
+ Parameters
846
+ ----------
847
+ vars : Dict[str, str], default {}
848
+ Dictionary of environment variables to set.
849
+ """
850
+ ...
851
+
852
+ @typing.overload
853
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
854
+ ...
855
+
856
+ @typing.overload
857
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
858
+ ...
859
+
860
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
861
+ """
862
+ Specifies environment variables to be set prior to the execution of a step.
863
+
864
+
865
+ Parameters
866
+ ----------
867
+ vars : Dict[str, str], default {}
868
+ Dictionary of environment variables to set.
869
+ """
870
+ ...
871
+
872
+ @typing.overload
873
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
874
+ """
875
+ Decorator prototype for all step decorators. This function gets specialized
876
+ and imported for all decorators types by _import_plugin_decorators().
877
+ """
878
+ ...
879
+
880
+ @typing.overload
881
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
882
+ ...
883
+
884
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
885
+ """
886
+ Decorator prototype for all step decorators. This function gets specialized
887
+ and imported for all decorators types by _import_plugin_decorators().
888
+ """
889
+ ...
890
+
891
+ @typing.overload
892
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
893
+ """
894
+ Enables checkpointing for a step.
895
+
896
+ > Examples
897
+
898
+ - Saving Checkpoints
899
+
900
+ ```python
901
+ @checkpoint
902
+ @step
903
+ def train(self):
904
+ model = create_model(self.parameters, checkpoint_path = None)
905
+ for i in range(self.epochs):
906
+ # some training logic
907
+ loss = model.train(self.dataset)
908
+ if i % 10 == 0:
909
+ model.save(
910
+ current.checkpoint.directory,
911
+ )
912
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
913
+ # and returns a reference dictionary to the checkpoint saved in the datastore
914
+ self.latest_checkpoint = current.checkpoint.save(
915
+ name="epoch_checkpoint",
916
+ metadata={
917
+ "epoch": i,
918
+ "loss": loss,
919
+ }
920
+ )
921
+ ```
922
+
923
+ - Using Loaded Checkpoints
924
+
925
+ ```python
926
+ @retry(times=3)
927
+ @checkpoint
928
+ @step
929
+ def train(self):
930
+ # Assume that the task has restarted and the previous attempt of the task
931
+ # saved a checkpoint
932
+ checkpoint_path = None
933
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
934
+ print("Loaded checkpoint from the previous attempt")
935
+ checkpoint_path = current.checkpoint.directory
936
+
937
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
938
+ for i in range(self.epochs):
939
+ ...
940
+ ```
941
+
942
+
943
+ Parameters
944
+ ----------
945
+ load_policy : str, default: "fresh"
946
+ The policy for loading the checkpoint. The following policies are supported:
947
+ - "eager": Loads the the latest available checkpoint within the namespace.
948
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
949
+ will be loaded at the start of the task.
950
+ - "none": Do not load any checkpoint
951
+ - "fresh": Loads the lastest checkpoint created within the running Task.
952
+ This mode helps loading checkpoints across various retry attempts of the same task.
953
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
954
+ created within the task will be loaded when the task is retries execution on failure.
955
+
956
+ temp_dir_root : str, default: None
957
+ The root directory under which `current.checkpoint.directory` will be created.
958
+ """
959
+ ...
960
+
961
+ @typing.overload
962
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
963
+ ...
964
+
965
+ @typing.overload
966
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
967
+ ...
968
+
969
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
970
+ """
971
+ Enables checkpointing for a step.
972
+
973
+ > Examples
974
+
975
+ - Saving Checkpoints
976
+
977
+ ```python
978
+ @checkpoint
979
+ @step
980
+ def train(self):
981
+ model = create_model(self.parameters, checkpoint_path = None)
982
+ for i in range(self.epochs):
983
+ # some training logic
984
+ loss = model.train(self.dataset)
985
+ if i % 10 == 0:
986
+ model.save(
987
+ current.checkpoint.directory,
988
+ )
989
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
990
+ # and returns a reference dictionary to the checkpoint saved in the datastore
991
+ self.latest_checkpoint = current.checkpoint.save(
992
+ name="epoch_checkpoint",
993
+ metadata={
994
+ "epoch": i,
995
+ "loss": loss,
996
+ }
997
+ )
998
+ ```
999
+
1000
+ - Using Loaded Checkpoints
1001
+
1002
+ ```python
1003
+ @retry(times=3)
1004
+ @checkpoint
1005
+ @step
1006
+ def train(self):
1007
+ # Assume that the task has restarted and the previous attempt of the task
1008
+ # saved a checkpoint
1009
+ checkpoint_path = None
1010
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
1011
+ print("Loaded checkpoint from the previous attempt")
1012
+ checkpoint_path = current.checkpoint.directory
1013
+
1014
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
1015
+ for i in range(self.epochs):
1016
+ ...
1017
+ ```
1018
+
1019
+
1020
+ Parameters
1021
+ ----------
1022
+ load_policy : str, default: "fresh"
1023
+ The policy for loading the checkpoint. The following policies are supported:
1024
+ - "eager": Loads the the latest available checkpoint within the namespace.
1025
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1026
+ will be loaded at the start of the task.
1027
+ - "none": Do not load any checkpoint
1028
+ - "fresh": Loads the lastest checkpoint created within the running Task.
1029
+ This mode helps loading checkpoints across various retry attempts of the same task.
1030
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1031
+ created within the task will be loaded when the task is retries execution on failure.
1032
+
1033
+ temp_dir_root : str, default: None
1034
+ The root directory under which `current.checkpoint.directory` will be created.
1035
+ """
1036
+ ...
1037
+
896
1038
  def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
897
1039
  """
898
1040
  Specifies that this step should execute on Kubernetes.
@@ -982,418 +1124,7 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
982
1124
  """
983
1125
  ...
984
1126
 
985
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
986
- """
987
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
988
-
989
- User code call
990
- --------------
991
- @ollama(
992
- models=[...],
993
- ...
994
- )
995
-
996
- Valid backend options
997
- ---------------------
998
- - 'local': Run as a separate process on the local task machine.
999
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
1000
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
1001
-
1002
- Valid model options
1003
- -------------------
1004
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
1005
-
1006
-
1007
- Parameters
1008
- ----------
1009
- models: list[str]
1010
- List of Ollama containers running models in sidecars.
1011
- backend: str
1012
- Determines where and how to run the Ollama process.
1013
- force_pull: bool
1014
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1015
- cache_update_policy: str
1016
- Cache update policy: "auto", "force", or "never".
1017
- force_cache_update: bool
1018
- Simple override for "force" cache update policy.
1019
- debug: bool
1020
- Whether to turn on verbose debugging logs.
1021
- circuit_breaker_config: dict
1022
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1023
- timeout_config: dict
1024
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1025
- """
1026
- ...
1027
-
1028
- @typing.overload
1029
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1030
- """
1031
- Decorator prototype for all step decorators. This function gets specialized
1032
- and imported for all decorators types by _import_plugin_decorators().
1033
- """
1034
- ...
1035
-
1036
- @typing.overload
1037
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1038
- ...
1039
-
1040
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1041
- """
1042
- Decorator prototype for all step decorators. This function gets specialized
1043
- and imported for all decorators types by _import_plugin_decorators().
1044
- """
1045
- ...
1046
-
1047
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1048
- """
1049
- Specifies that this step should execute on DGX cloud.
1050
-
1051
-
1052
- Parameters
1053
- ----------
1054
- gpu : int
1055
- Number of GPUs to use.
1056
- gpu_type : str
1057
- Type of Nvidia GPU to use.
1058
- queue_timeout : int
1059
- Time to keep the job in NVCF's queue.
1060
- """
1061
- ...
1062
-
1063
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1064
- """
1065
- Specifies that this step should execute on DGX cloud.
1066
-
1067
-
1068
- Parameters
1069
- ----------
1070
- gpu : int
1071
- Number of GPUs to use.
1072
- gpu_type : str
1073
- Type of Nvidia GPU to use.
1074
- """
1075
- ...
1076
-
1077
- @typing.overload
1078
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1079
- """
1080
- Specifies environment variables to be set prior to the execution of a step.
1081
-
1082
-
1083
- Parameters
1084
- ----------
1085
- vars : Dict[str, str], default {}
1086
- Dictionary of environment variables to set.
1087
- """
1088
- ...
1089
-
1090
- @typing.overload
1091
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1092
- ...
1093
-
1094
- @typing.overload
1095
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1096
- ...
1097
-
1098
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1099
- """
1100
- Specifies environment variables to be set prior to the execution of a step.
1101
-
1102
-
1103
- Parameters
1104
- ----------
1105
- vars : Dict[str, str], default {}
1106
- Dictionary of environment variables to set.
1107
- """
1108
- ...
1109
-
1110
- @typing.overload
1111
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1112
- """
1113
- Specifies a timeout for your step.
1114
-
1115
- This decorator is useful if this step may hang indefinitely.
1116
-
1117
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1118
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1119
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1120
-
1121
- Note that all the values specified in parameters are added together so if you specify
1122
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1123
-
1124
-
1125
- Parameters
1126
- ----------
1127
- seconds : int, default 0
1128
- Number of seconds to wait prior to timing out.
1129
- minutes : int, default 0
1130
- Number of minutes to wait prior to timing out.
1131
- hours : int, default 0
1132
- Number of hours to wait prior to timing out.
1133
- """
1134
- ...
1135
-
1136
- @typing.overload
1137
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1138
- ...
1139
-
1140
- @typing.overload
1141
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1142
- ...
1143
-
1144
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1145
- """
1146
- Specifies a timeout for your step.
1147
-
1148
- This decorator is useful if this step may hang indefinitely.
1149
-
1150
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1151
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1152
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1153
-
1154
- Note that all the values specified in parameters are added together so if you specify
1155
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1156
-
1157
-
1158
- Parameters
1159
- ----------
1160
- seconds : int, default 0
1161
- Number of seconds to wait prior to timing out.
1162
- minutes : int, default 0
1163
- Number of minutes to wait prior to timing out.
1164
- hours : int, default 0
1165
- Number of hours to wait prior to timing out.
1166
- """
1167
- ...
1168
-
1169
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1170
- """
1171
- Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
1172
- for S3 read and write requests.
1173
-
1174
- This decorator requires an integration in the Outerbounds platform that
1175
- points to an external bucket. It affects S3 operations performed via
1176
- Metaflow's `get_aws_client` and `S3` within a `@step`.
1177
-
1178
- Read operations
1179
- ---------------
1180
- All read operations pass through the proxy. If an object does not already
1181
- exist in the external bucket, it is cached there. For example, if code reads
1182
- from buckets `FOO` and `BAR` using the `S3` interface, objects from both
1183
- buckets are cached in the external bucket.
1184
-
1185
- During task execution, all S3‑related read requests are routed through the
1186
- proxy:
1187
- - If the object is present in the external object store, the proxy
1188
- streams it directly from there without accessing the requested origin
1189
- bucket.
1190
- - If the object is not present in the external storage, the proxy
1191
- fetches it from the requested bucket, caches it in the external
1192
- storage, and streams the response from the origin bucket.
1193
-
1194
- Warning
1195
- -------
1196
- All READ operations (e.g., GetObject, HeadObject) pass through the external
1197
- bucket regardless of the bucket specified in user code. Even
1198
- `S3(run=self)` and `S3(s3root="mybucketfoo")` requests go through the
1199
- external bucket cache.
1200
-
1201
- Write operations
1202
- ----------------
1203
- Write behavior is controlled by the `write_mode` parameter, which determines
1204
- whether writes also persist objects in the cache.
1205
-
1206
- `write_mode` values:
1207
- - `origin-and-cache`: objects are written both to the cache and to their
1208
- intended origin bucket.
1209
- - `origin`: objects are written only to their intended origin bucket.
1210
-
1211
-
1212
- Parameters
1213
- ----------
1214
- integration_name : str, optional
1215
- [Outerbounds integration name](https://docs.outerbounds.com/outerbounds/configuring-secrets/#integrations-view)
1216
- that holds the configuration for the external, S3‑compatible object
1217
- storage bucket. If not specified, the only available S3 proxy
1218
- integration in the namespace is used (fails if multiple exist).
1219
- write_mode : str, optional
1220
- Controls whether writes also go to the external bucket.
1221
- - `origin` (default)
1222
- - `origin-and-cache`
1223
- debug : bool, optional
1224
- Enables debug logging for proxy operations.
1225
- """
1226
- ...
1227
-
1228
- @typing.overload
1229
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1230
- """
1231
- Specifies secrets to be retrieved and injected as environment variables prior to
1232
- the execution of a step.
1233
-
1234
-
1235
- Parameters
1236
- ----------
1237
- sources : List[Union[str, Dict[str, Any]]], default: []
1238
- List of secret specs, defining how the secrets are to be retrieved
1239
- role : str, optional, default: None
1240
- Role to use for fetching secrets
1241
- """
1242
- ...
1243
-
1244
- @typing.overload
1245
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1246
- ...
1247
-
1248
- @typing.overload
1249
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1250
- ...
1251
-
1252
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
1253
- """
1254
- Specifies secrets to be retrieved and injected as environment variables prior to
1255
- the execution of a step.
1256
-
1257
-
1258
- Parameters
1259
- ----------
1260
- sources : List[Union[str, Dict[str, Any]]], default: []
1261
- List of secret specs, defining how the secrets are to be retrieved
1262
- role : str, optional, default: None
1263
- Role to use for fetching secrets
1264
- """
1265
- ...
1266
-
1267
- @typing.overload
1268
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1269
- """
1270
- Specifies the resources needed when executing this step.
1271
-
1272
- Use `@resources` to specify the resource requirements
1273
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1274
-
1275
- You can choose the compute layer on the command line by executing e.g.
1276
- ```
1277
- python myflow.py run --with batch
1278
- ```
1279
- or
1280
- ```
1281
- python myflow.py run --with kubernetes
1282
- ```
1283
- which executes the flow on the desired system using the
1284
- requirements specified in `@resources`.
1285
-
1286
-
1287
- Parameters
1288
- ----------
1289
- cpu : int, default 1
1290
- Number of CPUs required for this step.
1291
- gpu : int, optional, default None
1292
- Number of GPUs required for this step.
1293
- disk : int, optional, default None
1294
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1295
- memory : int, default 4096
1296
- Memory size (in MB) required for this step.
1297
- shared_memory : int, optional, default None
1298
- The value for the size (in MiB) of the /dev/shm volume for this step.
1299
- This parameter maps to the `--shm-size` option in Docker.
1300
- """
1301
- ...
1302
-
1303
- @typing.overload
1304
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1305
- ...
1306
-
1307
- @typing.overload
1308
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1309
- ...
1310
-
1311
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1312
- """
1313
- Specifies the resources needed when executing this step.
1314
-
1315
- Use `@resources` to specify the resource requirements
1316
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1317
-
1318
- You can choose the compute layer on the command line by executing e.g.
1319
- ```
1320
- python myflow.py run --with batch
1321
- ```
1322
- or
1323
- ```
1324
- python myflow.py run --with kubernetes
1325
- ```
1326
- which executes the flow on the desired system using the
1327
- requirements specified in `@resources`.
1328
-
1329
-
1330
- Parameters
1331
- ----------
1332
- cpu : int, default 1
1333
- Number of CPUs required for this step.
1334
- gpu : int, optional, default None
1335
- Number of GPUs required for this step.
1336
- disk : int, optional, default None
1337
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1338
- memory : int, default 4096
1339
- Memory size (in MB) required for this step.
1340
- shared_memory : int, optional, default None
1341
- The value for the size (in MiB) of the /dev/shm volume for this step.
1342
- This parameter maps to the `--shm-size` option in Docker.
1343
- """
1344
- ...
1345
-
1346
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1347
- """
1348
- This decorator is used to run vllm APIs as Metaflow task sidecars.
1349
-
1350
- User code call
1351
- --------------
1352
- @vllm(
1353
- model="...",
1354
- ...
1355
- )
1356
-
1357
- Valid backend options
1358
- ---------------------
1359
- - 'local': Run as a separate process on the local task machine.
1360
-
1361
- Valid model options
1362
- -------------------
1363
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1364
-
1365
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1366
- If you need multiple models, you must create multiple @vllm decorators.
1367
-
1368
-
1369
- Parameters
1370
- ----------
1371
- model: str
1372
- HuggingFace model identifier to be served by vLLM.
1373
- backend: str
1374
- Determines where and how to run the vLLM process.
1375
- openai_api_server: bool
1376
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1377
- Default is False (uses native engine).
1378
- Set to True for backward compatibility with existing code.
1379
- debug: bool
1380
- Whether to turn on verbose debugging logs.
1381
- card_refresh_interval: int
1382
- Interval in seconds for refreshing the vLLM status card.
1383
- Only used when openai_api_server=True.
1384
- max_retries: int
1385
- Maximum number of retries checking for vLLM server startup.
1386
- Only used when openai_api_server=True.
1387
- retry_alert_frequency: int
1388
- Frequency of alert logs for vLLM server startup retries.
1389
- Only used when openai_api_server=True.
1390
- engine_args : dict
1391
- Additional keyword arguments to pass to the vLLM engine.
1392
- For example, `tensor_parallel_size=2`.
1393
- """
1394
- ...
1395
-
1396
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1127
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1397
1128
  """
1398
1129
  Decorator that helps cache, version, and store models/datasets from the Hugging Face Hub.
1399
1130
 
@@ -1511,180 +1242,323 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope:
1511
1242
  ...
1512
1243
 
1513
1244
  @typing.overload
1514
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1245
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1515
1246
  """
1516
- Specifies the times when the flow should be run when running on a
1517
- production scheduler.
1247
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1248
+
1249
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1518
1250
 
1519
1251
 
1520
1252
  Parameters
1521
1253
  ----------
1522
- hourly : bool, default False
1523
- Run the workflow hourly.
1524
- daily : bool, default True
1525
- Run the workflow daily.
1526
- weekly : bool, default False
1527
- Run the workflow weekly.
1528
- cron : str, optional, default None
1529
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1530
- specified by this expression.
1531
- timezone : str, optional, default None
1532
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1533
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1254
+ type : str, default 'default'
1255
+ Card type.
1256
+ id : str, optional, default None
1257
+ If multiple cards are present, use this id to identify this card.
1258
+ options : Dict[str, Any], default {}
1259
+ Options passed to the card. The contents depend on the card type.
1260
+ timeout : int, default 45
1261
+ Interrupt reporting if it takes more than this many seconds.
1534
1262
  """
1535
1263
  ...
1536
1264
 
1537
1265
  @typing.overload
1538
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1266
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1539
1267
  ...
1540
1268
 
1541
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1269
+ @typing.overload
1270
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1271
+ ...
1272
+
1273
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1542
1274
  """
1543
- Specifies the times when the flow should be run when running on a
1544
- production scheduler.
1275
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1276
+
1277
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1545
1278
 
1546
1279
 
1547
1280
  Parameters
1548
1281
  ----------
1549
- hourly : bool, default False
1550
- Run the workflow hourly.
1551
- daily : bool, default True
1552
- Run the workflow daily.
1553
- weekly : bool, default False
1554
- Run the workflow weekly.
1555
- cron : str, optional, default None
1556
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1557
- specified by this expression.
1558
- timezone : str, optional, default None
1559
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1560
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1282
+ type : str, default 'default'
1283
+ Card type.
1284
+ id : str, optional, default None
1285
+ If multiple cards are present, use this id to identify this card.
1286
+ options : Dict[str, Any], default {}
1287
+ Options passed to the card. The contents depend on the card type.
1288
+ timeout : int, default 45
1289
+ Interrupt reporting if it takes more than this many seconds.
1561
1290
  """
1562
1291
  ...
1563
1292
 
1564
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1293
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1565
1294
  """
1566
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1567
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1295
+ Specifies that this step should execute on DGX cloud.
1568
1296
 
1569
1297
 
1570
1298
  Parameters
1571
1299
  ----------
1572
- timeout : int
1573
- Time, in seconds before the task times out and fails. (Default: 3600)
1574
- poke_interval : int
1575
- Time in seconds that the job should wait in between each try. (Default: 60)
1576
- mode : str
1577
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1578
- exponential_backoff : bool
1579
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1580
- pool : str
1581
- the slot pool this task should run in,
1582
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1583
- soft_fail : bool
1584
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1585
- name : str
1586
- Name of the sensor on Airflow
1587
- description : str
1588
- Description of sensor in the Airflow UI
1589
- external_dag_id : str
1590
- The dag_id that contains the task you want to wait for.
1591
- external_task_ids : List[str]
1592
- The list of task_ids that you want to wait for.
1593
- If None (default value) the sensor waits for the DAG. (Default: None)
1594
- allowed_states : List[str]
1595
- Iterable of allowed states, (Default: ['success'])
1596
- failed_states : List[str]
1597
- Iterable of failed or dis-allowed states. (Default: None)
1598
- execution_delta : datetime.timedelta
1599
- time difference with the previous execution to look at,
1600
- the default is the same logical date as the current task or DAG. (Default: None)
1601
- check_existence: bool
1602
- Set to True to check if the external task exists or check if
1603
- the DAG to wait for exists. (Default: True)
1300
+ gpu : int
1301
+ Number of GPUs to use.
1302
+ gpu_type : str
1303
+ Type of Nvidia GPU to use.
1604
1304
  """
1605
1305
  ...
1606
1306
 
1607
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1307
+ @typing.overload
1308
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1608
1309
  """
1609
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1610
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1611
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1612
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1613
- starts only after all sensors finish.
1310
+ Specifies the resources needed when executing this step.
1311
+
1312
+ Use `@resources` to specify the resource requirements
1313
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1314
+
1315
+ You can choose the compute layer on the command line by executing e.g.
1316
+ ```
1317
+ python myflow.py run --with batch
1318
+ ```
1319
+ or
1320
+ ```
1321
+ python myflow.py run --with kubernetes
1322
+ ```
1323
+ which executes the flow on the desired system using the
1324
+ requirements specified in `@resources`.
1325
+
1326
+
1327
+ Parameters
1328
+ ----------
1329
+ cpu : int, default 1
1330
+ Number of CPUs required for this step.
1331
+ gpu : int, optional, default None
1332
+ Number of GPUs required for this step.
1333
+ disk : int, optional, default None
1334
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1335
+ memory : int, default 4096
1336
+ Memory size (in MB) required for this step.
1337
+ shared_memory : int, optional, default None
1338
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1339
+ This parameter maps to the `--shm-size` option in Docker.
1340
+ """
1341
+ ...
1342
+
1343
+ @typing.overload
1344
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1345
+ ...
1346
+
1347
+ @typing.overload
1348
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1349
+ ...
1350
+
1351
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1352
+ """
1353
+ Specifies the resources needed when executing this step.
1354
+
1355
+ Use `@resources` to specify the resource requirements
1356
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1357
+
1358
+ You can choose the compute layer on the command line by executing e.g.
1359
+ ```
1360
+ python myflow.py run --with batch
1361
+ ```
1362
+ or
1363
+ ```
1364
+ python myflow.py run --with kubernetes
1365
+ ```
1366
+ which executes the flow on the desired system using the
1367
+ requirements specified in `@resources`.
1368
+
1369
+
1370
+ Parameters
1371
+ ----------
1372
+ cpu : int, default 1
1373
+ Number of CPUs required for this step.
1374
+ gpu : int, optional, default None
1375
+ Number of GPUs required for this step.
1376
+ disk : int, optional, default None
1377
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1378
+ memory : int, default 4096
1379
+ Memory size (in MB) required for this step.
1380
+ shared_memory : int, optional, default None
1381
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1382
+ This parameter maps to the `--shm-size` option in Docker.
1383
+ """
1384
+ ...
1385
+
1386
+ def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1387
+ """
1388
+ `@nebius_s3_proxy` is a Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1389
+ It exists to make it easier for users to know that this decorator should only be used with
1390
+ a Neo Cloud like Nebius. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
1391
+
1392
+
1393
+ Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
1394
+ for S3 read and write requests.
1395
+
1396
+ This decorator requires an integration in the Outerbounds platform that
1397
+ points to an external bucket. It affects S3 operations performed via
1398
+ Metaflow's `get_aws_client` and `S3` within a `@step`.
1399
+
1400
+ Read operations
1401
+ ---------------
1402
+ All read operations pass through the proxy. If an object does not already
1403
+ exist in the external bucket, it is cached there. For example, if code reads
1404
+ from buckets `FOO` and `BAR` using the `S3` interface, objects from both
1405
+ buckets are cached in the external bucket.
1406
+
1407
+ During task execution, all S3‑related read requests are routed through the
1408
+ proxy:
1409
+ - If the object is present in the external object store, the proxy
1410
+ streams it directly from there without accessing the requested origin
1411
+ bucket.
1412
+ - If the object is not present in the external storage, the proxy
1413
+ fetches it from the requested bucket, caches it in the external
1414
+ storage, and streams the response from the origin bucket.
1415
+
1416
+ Warning
1417
+ -------
1418
+ All READ operations (e.g., GetObject, HeadObject) pass through the external
1419
+ bucket regardless of the bucket specified in user code. Even
1420
+ `S3(run=self)` and `S3(s3root="mybucketfoo")` requests go through the
1421
+ external bucket cache.
1422
+
1423
+ Write operations
1424
+ ----------------
1425
+ Write behavior is controlled by the `write_mode` parameter, which determines
1426
+ whether writes also persist objects in the cache.
1427
+
1428
+ `write_mode` values:
1429
+ - `origin-and-cache`: objects are written both to the cache and to their
1430
+ intended origin bucket.
1431
+ - `origin`: objects are written only to their intended origin bucket.
1432
+
1433
+
1434
+ Parameters
1435
+ ----------
1436
+ integration_name : str, optional
1437
+ [Outerbounds integration name](https://docs.outerbounds.com/outerbounds/configuring-secrets/#integrations-view)
1438
+ that holds the configuration for the external, S3‑compatible object
1439
+ storage bucket. If not specified, the only available S3 proxy
1440
+ integration in the namespace is used (fails if multiple exist).
1441
+ write_mode : str, optional
1442
+ Controls whether writes also go to the external bucket.
1443
+ - `origin` (default)
1444
+ - `origin-and-cache`
1445
+ debug : bool, optional
1446
+ Enables debug logging for proxy operations.
1447
+ """
1448
+ ...
1449
+
1450
+ def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1451
+ """
1452
+ `@coreweave_s3_proxy` is a CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1453
+ It exists to make it easier for users to know that this decorator should only be used with
1454
+ a Neo Cloud like CoreWeave. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
1455
+
1456
+
1457
+ Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
1458
+ for S3 read and write requests.
1459
+
1460
+ This decorator requires an integration in the Outerbounds platform that
1461
+ points to an external bucket. It affects S3 operations performed via
1462
+ Metaflow's `get_aws_client` and `S3` within a `@step`.
1463
+
1464
+ Read operations
1465
+ ---------------
1466
+ All read operations pass through the proxy. If an object does not already
1467
+ exist in the external bucket, it is cached there. For example, if code reads
1468
+ from buckets `FOO` and `BAR` using the `S3` interface, objects from both
1469
+ buckets are cached in the external bucket.
1470
+
1471
+ During task execution, all S3‑related read requests are routed through the
1472
+ proxy:
1473
+ - If the object is present in the external object store, the proxy
1474
+ streams it directly from there without accessing the requested origin
1475
+ bucket.
1476
+ - If the object is not present in the external storage, the proxy
1477
+ fetches it from the requested bucket, caches it in the external
1478
+ storage, and streams the response from the origin bucket.
1479
+
1480
+ Warning
1481
+ -------
1482
+ All READ operations (e.g., GetObject, HeadObject) pass through the external
1483
+ bucket regardless of the bucket specified in user code. Even
1484
+ `S3(run=self)` and `S3(s3root="mybucketfoo")` requests go through the
1485
+ external bucket cache.
1486
+
1487
+ Write operations
1488
+ ----------------
1489
+ Write behavior is controlled by the `write_mode` parameter, which determines
1490
+ whether writes also persist objects in the cache.
1491
+
1492
+ `write_mode` values:
1493
+ - `origin-and-cache`: objects are written both to the cache and to their
1494
+ intended origin bucket.
1495
+ - `origin`: objects are written only to their intended origin bucket.
1614
1496
 
1615
1497
 
1616
1498
  Parameters
1617
1499
  ----------
1618
- timeout : int
1619
- Time, in seconds before the task times out and fails. (Default: 3600)
1620
- poke_interval : int
1621
- Time in seconds that the job should wait in between each try. (Default: 60)
1622
- mode : str
1623
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1624
- exponential_backoff : bool
1625
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1626
- pool : str
1627
- the slot pool this task should run in,
1628
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1629
- soft_fail : bool
1630
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1631
- name : str
1632
- Name of the sensor on Airflow
1633
- description : str
1634
- Description of sensor in the Airflow UI
1635
- bucket_key : Union[str, List[str]]
1636
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1637
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1638
- bucket_name : str
1639
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1640
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1641
- wildcard_match : bool
1642
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1643
- aws_conn_id : str
1644
- a reference to the s3 connection on Airflow. (Default: None)
1645
- verify : bool
1646
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1500
+ integration_name : str, optional
1501
+ [Outerbounds integration name](https://docs.outerbounds.com/outerbounds/configuring-secrets/#integrations-view)
1502
+ that holds the configuration for the external, S3‑compatible object
1503
+ storage bucket. If not specified, the only available S3 proxy
1504
+ integration in the namespace is used (fails if multiple exist).
1505
+ write_mode : str, optional
1506
+ Controls whether writes also go to the external bucket.
1507
+ - `origin` (default)
1508
+ - `origin-and-cache`
1509
+ debug : bool, optional
1510
+ Enables debug logging for proxy operations.
1647
1511
  """
1648
1512
  ...
1649
1513
 
1650
1514
  @typing.overload
1651
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1515
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1652
1516
  """
1653
- Specifies the PyPI packages for all steps of the flow.
1517
+ Specifies the Conda environment for all steps of the flow.
1518
+
1519
+ Use `@conda_base` to set common libraries required by all
1520
+ steps and use `@conda` to specify step-specific additions.
1654
1521
 
1655
- Use `@pypi_base` to set common packages required by all
1656
- steps and use `@pypi` to specify step-specific overrides.
1657
1522
 
1658
1523
  Parameters
1659
1524
  ----------
1660
- packages : Dict[str, str], default: {}
1525
+ packages : Dict[str, str], default {}
1661
1526
  Packages to use for this flow. The key is the name of the package
1662
1527
  and the value is the version to use.
1663
- python : str, optional, default: None
1528
+ libraries : Dict[str, str], default {}
1529
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1530
+ python : str, optional, default None
1664
1531
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1665
1532
  that the version used will correspond to the version of the Python interpreter used to start the run.
1533
+ disabled : bool, default False
1534
+ If set to True, disables Conda.
1666
1535
  """
1667
1536
  ...
1668
1537
 
1669
1538
  @typing.overload
1670
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1539
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1671
1540
  ...
1672
1541
 
1673
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1542
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1674
1543
  """
1675
- Specifies the PyPI packages for all steps of the flow.
1544
+ Specifies the Conda environment for all steps of the flow.
1545
+
1546
+ Use `@conda_base` to set common libraries required by all
1547
+ steps and use `@conda` to specify step-specific additions.
1676
1548
 
1677
- Use `@pypi_base` to set common packages required by all
1678
- steps and use `@pypi` to specify step-specific overrides.
1679
1549
 
1680
1550
  Parameters
1681
1551
  ----------
1682
- packages : Dict[str, str], default: {}
1552
+ packages : Dict[str, str], default {}
1683
1553
  Packages to use for this flow. The key is the name of the package
1684
1554
  and the value is the version to use.
1685
- python : str, optional, default: None
1555
+ libraries : Dict[str, str], default {}
1556
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1557
+ python : str, optional, default None
1686
1558
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1687
1559
  that the version used will correspond to the version of the Python interpreter used to start the run.
1560
+ disabled : bool, default False
1561
+ If set to True, disables Conda.
1688
1562
  """
1689
1563
  ...
1690
1564
 
@@ -1723,147 +1597,231 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1723
1597
  """
1724
1598
  ...
1725
1599
 
1726
- @typing.overload
1727
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1600
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1728
1601
  """
1729
- Specifies the Conda environment for all steps of the flow.
1730
-
1731
- Use `@conda_base` to set common libraries required by all
1732
- steps and use `@conda` to specify step-specific additions.
1602
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1603
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1604
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1605
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1606
+ starts only after all sensors finish.
1733
1607
 
1734
1608
 
1735
1609
  Parameters
1736
1610
  ----------
1737
- packages : Dict[str, str], default {}
1738
- Packages to use for this flow. The key is the name of the package
1739
- and the value is the version to use.
1740
- libraries : Dict[str, str], default {}
1741
- Supported for backward compatibility. When used with packages, packages will take precedence.
1742
- python : str, optional, default None
1743
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1744
- that the version used will correspond to the version of the Python interpreter used to start the run.
1745
- disabled : bool, default False
1746
- If set to True, disables Conda.
1611
+ timeout : int
1612
+ Time, in seconds before the task times out and fails. (Default: 3600)
1613
+ poke_interval : int
1614
+ Time in seconds that the job should wait in between each try. (Default: 60)
1615
+ mode : str
1616
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1617
+ exponential_backoff : bool
1618
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1619
+ pool : str
1620
+ the slot pool this task should run in,
1621
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1622
+ soft_fail : bool
1623
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1624
+ name : str
1625
+ Name of the sensor on Airflow
1626
+ description : str
1627
+ Description of sensor in the Airflow UI
1628
+ bucket_key : Union[str, List[str]]
1629
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1630
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1631
+ bucket_name : str
1632
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1633
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1634
+ wildcard_match : bool
1635
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1636
+ aws_conn_id : str
1637
+ a reference to the s3 connection on Airflow. (Default: None)
1638
+ verify : bool
1639
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1747
1640
  """
1748
1641
  ...
1749
1642
 
1750
1643
  @typing.overload
1751
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1752
- ...
1753
-
1754
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1644
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1755
1645
  """
1756
- Specifies the Conda environment for all steps of the flow.
1646
+ Specifies the flow(s) that this flow depends on.
1757
1647
 
1758
- Use `@conda_base` to set common libraries required by all
1759
- steps and use `@conda` to specify step-specific additions.
1648
+ ```
1649
+ @trigger_on_finish(flow='FooFlow')
1650
+ ```
1651
+ or
1652
+ ```
1653
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1654
+ ```
1655
+ This decorator respects the @project decorator and triggers the flow
1656
+ when upstream runs within the same namespace complete successfully
1657
+
1658
+ Additionally, you can specify project aware upstream flow dependencies
1659
+ by specifying the fully qualified project_flow_name.
1660
+ ```
1661
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1662
+ ```
1663
+ or
1664
+ ```
1665
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1666
+ ```
1667
+
1668
+ You can also specify just the project or project branch (other values will be
1669
+ inferred from the current project or project branch):
1670
+ ```
1671
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1672
+ ```
1673
+
1674
+ Note that `branch` is typically one of:
1675
+ - `prod`
1676
+ - `user.bob`
1677
+ - `test.my_experiment`
1678
+ - `prod.staging`
1760
1679
 
1761
1680
 
1762
1681
  Parameters
1763
1682
  ----------
1764
- packages : Dict[str, str], default {}
1765
- Packages to use for this flow. The key is the name of the package
1766
- and the value is the version to use.
1767
- libraries : Dict[str, str], default {}
1768
- Supported for backward compatibility. When used with packages, packages will take precedence.
1769
- python : str, optional, default None
1770
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1771
- that the version used will correspond to the version of the Python interpreter used to start the run.
1772
- disabled : bool, default False
1773
- If set to True, disables Conda.
1683
+ flow : Union[str, Dict[str, str]], optional, default None
1684
+ Upstream flow dependency for this flow.
1685
+ flows : List[Union[str, Dict[str, str]]], default []
1686
+ Upstream flow dependencies for this flow.
1687
+ options : Dict[str, Any], default {}
1688
+ Backend-specific configuration for tuning eventing behavior.
1774
1689
  """
1775
1690
  ...
1776
1691
 
1777
1692
  @typing.overload
1778
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1693
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1694
+ ...
1695
+
1696
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1779
1697
  """
1780
- Specifies the event(s) that this flow depends on.
1698
+ Specifies the flow(s) that this flow depends on.
1781
1699
 
1782
1700
  ```
1783
- @trigger(event='foo')
1701
+ @trigger_on_finish(flow='FooFlow')
1784
1702
  ```
1785
1703
  or
1786
1704
  ```
1787
- @trigger(events=['foo', 'bar'])
1705
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1788
1706
  ```
1707
+ This decorator respects the @project decorator and triggers the flow
1708
+ when upstream runs within the same namespace complete successfully
1789
1709
 
1790
- Additionally, you can specify the parameter mappings
1791
- to map event payload to Metaflow parameters for the flow.
1710
+ Additionally, you can specify project aware upstream flow dependencies
1711
+ by specifying the fully qualified project_flow_name.
1792
1712
  ```
1793
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1713
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1794
1714
  ```
1795
1715
  or
1796
1716
  ```
1797
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1798
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1717
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1799
1718
  ```
1800
1719
 
1801
- 'parameters' can also be a list of strings and tuples like so:
1802
- ```
1803
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1804
- ```
1805
- This is equivalent to:
1720
+ You can also specify just the project or project branch (other values will be
1721
+ inferred from the current project or project branch):
1806
1722
  ```
1807
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1723
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1808
1724
  ```
1809
1725
 
1726
+ Note that `branch` is typically one of:
1727
+ - `prod`
1728
+ - `user.bob`
1729
+ - `test.my_experiment`
1730
+ - `prod.staging`
1731
+
1810
1732
 
1811
1733
  Parameters
1812
1734
  ----------
1813
- event : Union[str, Dict[str, Any]], optional, default None
1814
- Event dependency for this flow.
1815
- events : List[Union[str, Dict[str, Any]]], default []
1816
- Events dependency for this flow.
1735
+ flow : Union[str, Dict[str, str]], optional, default None
1736
+ Upstream flow dependency for this flow.
1737
+ flows : List[Union[str, Dict[str, str]]], default []
1738
+ Upstream flow dependencies for this flow.
1817
1739
  options : Dict[str, Any], default {}
1818
1740
  Backend-specific configuration for tuning eventing behavior.
1819
1741
  """
1820
1742
  ...
1821
1743
 
1822
1744
  @typing.overload
1823
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1745
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1746
+ """
1747
+ Specifies the PyPI packages for all steps of the flow.
1748
+
1749
+ Use `@pypi_base` to set common packages required by all
1750
+ steps and use `@pypi` to specify step-specific overrides.
1751
+
1752
+ Parameters
1753
+ ----------
1754
+ packages : Dict[str, str], default: {}
1755
+ Packages to use for this flow. The key is the name of the package
1756
+ and the value is the version to use.
1757
+ python : str, optional, default: None
1758
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1759
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1760
+ """
1824
1761
  ...
1825
1762
 
1826
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1763
+ @typing.overload
1764
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1765
+ ...
1766
+
1767
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1827
1768
  """
1828
- Specifies the event(s) that this flow depends on.
1829
-
1830
- ```
1831
- @trigger(event='foo')
1832
- ```
1833
- or
1834
- ```
1835
- @trigger(events=['foo', 'bar'])
1836
- ```
1769
+ Specifies the PyPI packages for all steps of the flow.
1837
1770
 
1838
- Additionally, you can specify the parameter mappings
1839
- to map event payload to Metaflow parameters for the flow.
1840
- ```
1841
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1842
- ```
1843
- or
1844
- ```
1845
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1846
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1847
- ```
1771
+ Use `@pypi_base` to set common packages required by all
1772
+ steps and use `@pypi` to specify step-specific overrides.
1848
1773
 
1849
- 'parameters' can also be a list of strings and tuples like so:
1850
- ```
1851
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1852
- ```
1853
- This is equivalent to:
1854
- ```
1855
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1856
- ```
1774
+ Parameters
1775
+ ----------
1776
+ packages : Dict[str, str], default: {}
1777
+ Packages to use for this flow. The key is the name of the package
1778
+ and the value is the version to use.
1779
+ python : str, optional, default: None
1780
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1781
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1782
+ """
1783
+ ...
1784
+
1785
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1786
+ """
1787
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1788
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1857
1789
 
1858
1790
 
1859
1791
  Parameters
1860
1792
  ----------
1861
- event : Union[str, Dict[str, Any]], optional, default None
1862
- Event dependency for this flow.
1863
- events : List[Union[str, Dict[str, Any]]], default []
1864
- Events dependency for this flow.
1865
- options : Dict[str, Any], default {}
1866
- Backend-specific configuration for tuning eventing behavior.
1793
+ timeout : int
1794
+ Time, in seconds before the task times out and fails. (Default: 3600)
1795
+ poke_interval : int
1796
+ Time in seconds that the job should wait in between each try. (Default: 60)
1797
+ mode : str
1798
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1799
+ exponential_backoff : bool
1800
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1801
+ pool : str
1802
+ the slot pool this task should run in,
1803
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1804
+ soft_fail : bool
1805
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1806
+ name : str
1807
+ Name of the sensor on Airflow
1808
+ description : str
1809
+ Description of sensor in the Airflow UI
1810
+ external_dag_id : str
1811
+ The dag_id that contains the task you want to wait for.
1812
+ external_task_ids : List[str]
1813
+ The list of task_ids that you want to wait for.
1814
+ If None (default value) the sensor waits for the DAG. (Default: None)
1815
+ allowed_states : List[str]
1816
+ Iterable of allowed states, (Default: ['success'])
1817
+ failed_states : List[str]
1818
+ Iterable of failed or dis-allowed states. (Default: None)
1819
+ execution_delta : datetime.timedelta
1820
+ time difference with the previous execution to look at,
1821
+ the default is the same logical date as the current task or DAG. (Default: None)
1822
+ check_existence: bool
1823
+ Set to True to check if the external task exists or check if
1824
+ the DAG to wait for exists. (Default: True)
1867
1825
  """
1868
1826
  ...
1869
1827
 
@@ -1982,105 +1940,148 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1982
1940
  ...
1983
1941
 
1984
1942
  @typing.overload
1985
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1943
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1986
1944
  """
1987
- Specifies the flow(s) that this flow depends on.
1945
+ Specifies the event(s) that this flow depends on.
1988
1946
 
1989
1947
  ```
1990
- @trigger_on_finish(flow='FooFlow')
1948
+ @trigger(event='foo')
1991
1949
  ```
1992
1950
  or
1993
1951
  ```
1994
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1952
+ @trigger(events=['foo', 'bar'])
1995
1953
  ```
1996
- This decorator respects the @project decorator and triggers the flow
1997
- when upstream runs within the same namespace complete successfully
1998
1954
 
1999
- Additionally, you can specify project aware upstream flow dependencies
2000
- by specifying the fully qualified project_flow_name.
1955
+ Additionally, you can specify the parameter mappings
1956
+ to map event payload to Metaflow parameters for the flow.
2001
1957
  ```
2002
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1958
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
2003
1959
  ```
2004
1960
  or
2005
1961
  ```
2006
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1962
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1963
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
2007
1964
  ```
2008
1965
 
2009
- You can also specify just the project or project branch (other values will be
2010
- inferred from the current project or project branch):
1966
+ 'parameters' can also be a list of strings and tuples like so:
2011
1967
  ```
2012
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1968
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1969
+ ```
1970
+ This is equivalent to:
1971
+ ```
1972
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
2013
1973
  ```
2014
-
2015
- Note that `branch` is typically one of:
2016
- - `prod`
2017
- - `user.bob`
2018
- - `test.my_experiment`
2019
- - `prod.staging`
2020
1974
 
2021
1975
 
2022
1976
  Parameters
2023
1977
  ----------
2024
- flow : Union[str, Dict[str, str]], optional, default None
2025
- Upstream flow dependency for this flow.
2026
- flows : List[Union[str, Dict[str, str]]], default []
2027
- Upstream flow dependencies for this flow.
1978
+ event : Union[str, Dict[str, Any]], optional, default None
1979
+ Event dependency for this flow.
1980
+ events : List[Union[str, Dict[str, Any]]], default []
1981
+ Events dependency for this flow.
2028
1982
  options : Dict[str, Any], default {}
2029
1983
  Backend-specific configuration for tuning eventing behavior.
2030
1984
  """
2031
1985
  ...
2032
1986
 
2033
1987
  @typing.overload
2034
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1988
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
2035
1989
  ...
2036
1990
 
2037
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1991
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
2038
1992
  """
2039
- Specifies the flow(s) that this flow depends on.
1993
+ Specifies the event(s) that this flow depends on.
2040
1994
 
2041
1995
  ```
2042
- @trigger_on_finish(flow='FooFlow')
1996
+ @trigger(event='foo')
2043
1997
  ```
2044
1998
  or
2045
1999
  ```
2046
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
2000
+ @trigger(events=['foo', 'bar'])
2047
2001
  ```
2048
- This decorator respects the @project decorator and triggers the flow
2049
- when upstream runs within the same namespace complete successfully
2050
2002
 
2051
- Additionally, you can specify project aware upstream flow dependencies
2052
- by specifying the fully qualified project_flow_name.
2003
+ Additionally, you can specify the parameter mappings
2004
+ to map event payload to Metaflow parameters for the flow.
2053
2005
  ```
2054
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
2006
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
2055
2007
  ```
2056
2008
  or
2057
2009
  ```
2058
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
2010
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
2011
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
2059
2012
  ```
2060
2013
 
2061
- You can also specify just the project or project branch (other values will be
2062
- inferred from the current project or project branch):
2014
+ 'parameters' can also be a list of strings and tuples like so:
2063
2015
  ```
2064
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
2016
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
2017
+ ```
2018
+ This is equivalent to:
2019
+ ```
2020
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
2065
2021
  ```
2066
-
2067
- Note that `branch` is typically one of:
2068
- - `prod`
2069
- - `user.bob`
2070
- - `test.my_experiment`
2071
- - `prod.staging`
2072
2022
 
2073
2023
 
2074
2024
  Parameters
2075
2025
  ----------
2076
- flow : Union[str, Dict[str, str]], optional, default None
2077
- Upstream flow dependency for this flow.
2078
- flows : List[Union[str, Dict[str, str]]], default []
2079
- Upstream flow dependencies for this flow.
2026
+ event : Union[str, Dict[str, Any]], optional, default None
2027
+ Event dependency for this flow.
2028
+ events : List[Union[str, Dict[str, Any]]], default []
2029
+ Events dependency for this flow.
2080
2030
  options : Dict[str, Any], default {}
2081
2031
  Backend-specific configuration for tuning eventing behavior.
2082
2032
  """
2083
2033
  ...
2084
2034
 
2035
+ @typing.overload
2036
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
2037
+ """
2038
+ Specifies the times when the flow should be run when running on a
2039
+ production scheduler.
2040
+
2041
+
2042
+ Parameters
2043
+ ----------
2044
+ hourly : bool, default False
2045
+ Run the workflow hourly.
2046
+ daily : bool, default True
2047
+ Run the workflow daily.
2048
+ weekly : bool, default False
2049
+ Run the workflow weekly.
2050
+ cron : str, optional, default None
2051
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
2052
+ specified by this expression.
2053
+ timezone : str, optional, default None
2054
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
2055
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
2056
+ """
2057
+ ...
2058
+
2059
+ @typing.overload
2060
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
2061
+ ...
2062
+
2063
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
2064
+ """
2065
+ Specifies the times when the flow should be run when running on a
2066
+ production scheduler.
2067
+
2068
+
2069
+ Parameters
2070
+ ----------
2071
+ hourly : bool, default False
2072
+ Run the workflow hourly.
2073
+ daily : bool, default True
2074
+ Run the workflow daily.
2075
+ weekly : bool, default False
2076
+ Run the workflow weekly.
2077
+ cron : str, optional, default None
2078
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
2079
+ specified by this expression.
2080
+ timezone : str, optional, default None
2081
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
2082
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
2083
+ """
2084
+ ...
2085
+
2085
2086
  pkg_name: str
2086
2087