ob-metaflow-stubs 6.0.8.2__py2.py3-none-any.whl → 6.0.8.3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-stubs might be problematic. Click here for more details.

Files changed (262) hide show
  1. metaflow-stubs/__init__.pyi +978 -978
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +3 -3
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +1 -1
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +2 -2
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/meta_files.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +1 -1
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +49 -49
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +1 -1
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +2 -2
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +1 -1
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +1 -1
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +2 -2
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +1 -1
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +1 -1
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +1 -1
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +2 -2
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +2 -2
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +2 -2
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +2 -2
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +4 -4
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +1 -1
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +2 -2
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +2 -2
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +2 -2
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +1 -1
  116. metaflow-stubs/multicore_utils.pyi +1 -1
  117. metaflow-stubs/ob_internal.pyi +1 -1
  118. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  119. metaflow-stubs/packaging_sys/backend.pyi +3 -3
  120. metaflow-stubs/packaging_sys/distribution_support.pyi +3 -3
  121. metaflow-stubs/packaging_sys/tar_backend.pyi +4 -4
  122. metaflow-stubs/packaging_sys/utils.pyi +1 -1
  123. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  124. metaflow-stubs/parameters.pyi +2 -2
  125. metaflow-stubs/plugins/__init__.pyi +11 -11
  126. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  128. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  129. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  130. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  131. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  132. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  133. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  134. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  135. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  136. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  137. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  139. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  140. metaflow-stubs/plugins/argo/exit_hooks.pyi +1 -1
  141. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  142. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  143. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  144. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  145. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  146. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  147. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  148. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  149. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  150. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  151. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  152. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  153. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  157. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  158. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  159. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  160. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  161. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  162. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  163. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  164. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  165. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  166. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  167. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  168. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  169. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  170. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  171. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  172. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  173. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  174. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  175. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  176. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  177. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  178. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  179. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  180. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  181. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  182. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  183. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  184. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  185. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  186. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  187. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  188. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  189. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/exit_hook/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +1 -1
  192. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  193. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  194. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  195. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  196. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  197. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  198. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  199. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  200. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  201. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  202. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  203. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  204. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  205. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  206. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  207. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  208. metaflow-stubs/plugins/optuna/__init__.pyi +1 -1
  209. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  210. metaflow-stubs/plugins/perimeters.pyi +1 -1
  211. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  212. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  213. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  214. metaflow-stubs/plugins/pypi/conda_environment.pyi +2 -2
  215. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  216. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  217. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  218. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  219. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  220. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  221. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  222. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  223. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  224. metaflow-stubs/plugins/secrets/secrets_func.pyi +1 -1
  225. metaflow-stubs/plugins/secrets/secrets_spec.pyi +1 -1
  226. metaflow-stubs/plugins/secrets/utils.pyi +1 -1
  227. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  228. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  229. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  230. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  231. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  232. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  233. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  234. metaflow-stubs/profilers/__init__.pyi +1 -1
  235. metaflow-stubs/pylint_wrapper.pyi +1 -1
  236. metaflow-stubs/runner/__init__.pyi +1 -1
  237. metaflow-stubs/runner/deployer.pyi +4 -4
  238. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  239. metaflow-stubs/runner/metaflow_runner.pyi +1 -1
  240. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  241. metaflow-stubs/runner/nbrun.pyi +1 -1
  242. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  243. metaflow-stubs/runner/utils.pyi +3 -3
  244. metaflow-stubs/system/__init__.pyi +1 -1
  245. metaflow-stubs/system/system_logger.pyi +2 -2
  246. metaflow-stubs/system/system_monitor.pyi +1 -1
  247. metaflow-stubs/tagging_util.pyi +1 -1
  248. metaflow-stubs/tuple_util.pyi +1 -1
  249. metaflow-stubs/user_configs/__init__.pyi +1 -1
  250. metaflow-stubs/user_configs/config_options.pyi +1 -1
  251. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  252. metaflow-stubs/user_decorators/__init__.pyi +1 -1
  253. metaflow-stubs/user_decorators/common.pyi +1 -1
  254. metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
  255. metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
  256. metaflow-stubs/user_decorators/user_flow_decorator.pyi +2 -2
  257. metaflow-stubs/user_decorators/user_step_decorator.pyi +4 -4
  258. {ob_metaflow_stubs-6.0.8.2.dist-info → ob_metaflow_stubs-6.0.8.3.dist-info}/METADATA +1 -1
  259. ob_metaflow_stubs-6.0.8.3.dist-info/RECORD +262 -0
  260. ob_metaflow_stubs-6.0.8.2.dist-info/RECORD +0 -262
  261. {ob_metaflow_stubs-6.0.8.2.dist-info → ob_metaflow_stubs-6.0.8.3.dist-info}/WHEEL +0 -0
  262. {ob_metaflow_stubs-6.0.8.2.dist-info → ob_metaflow_stubs-6.0.8.3.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.17.1.0+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-08-22T06:36:31.648602 #
4
+ # Generated on 2025-08-25T21:23:22.454608 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -40,16 +40,16 @@ from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
42
  from . import cards as cards
43
- from . import tuple_util as tuple_util
44
- from . import metaflow_git as metaflow_git
45
43
  from . import events as events
44
+ from . import metaflow_git as metaflow_git
45
+ from . import tuple_util as tuple_util
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
51
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
53
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
@@ -167,269 +167,402 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
167
167
  """
168
168
  ...
169
169
 
170
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
170
+ @typing.overload
171
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
171
172
  """
172
- This decorator is used to run vllm APIs as Metaflow task sidecars.
173
-
174
- User code call
175
- --------------
176
- @vllm(
177
- model="...",
178
- ...
179
- )
173
+ Specifies a timeout for your step.
180
174
 
181
- Valid backend options
182
- ---------------------
183
- - 'local': Run as a separate process on the local task machine.
175
+ This decorator is useful if this step may hang indefinitely.
184
176
 
185
- Valid model options
186
- -------------------
187
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
177
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
178
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
179
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
188
180
 
189
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
190
- If you need multiple models, you must create multiple @vllm decorators.
181
+ Note that all the values specified in parameters are added together so if you specify
182
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
191
183
 
192
184
 
193
185
  Parameters
194
186
  ----------
195
- model: str
196
- HuggingFace model identifier to be served by vLLM.
197
- backend: str
198
- Determines where and how to run the vLLM process.
199
- openai_api_server: bool
200
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
201
- Default is False (uses native engine).
202
- Set to True for backward compatibility with existing code.
203
- debug: bool
204
- Whether to turn on verbose debugging logs.
205
- card_refresh_interval: int
206
- Interval in seconds for refreshing the vLLM status card.
207
- Only used when openai_api_server=True.
208
- max_retries: int
209
- Maximum number of retries checking for vLLM server startup.
210
- Only used when openai_api_server=True.
211
- retry_alert_frequency: int
212
- Frequency of alert logs for vLLM server startup retries.
213
- Only used when openai_api_server=True.
214
- engine_args : dict
215
- Additional keyword arguments to pass to the vLLM engine.
216
- For example, `tensor_parallel_size=2`.
187
+ seconds : int, default 0
188
+ Number of seconds to wait prior to timing out.
189
+ minutes : int, default 0
190
+ Number of minutes to wait prior to timing out.
191
+ hours : int, default 0
192
+ Number of hours to wait prior to timing out.
217
193
  """
218
194
  ...
219
195
 
220
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
196
+ @typing.overload
197
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
198
+ ...
199
+
200
+ @typing.overload
201
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
202
+ ...
203
+
204
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
221
205
  """
222
- Decorator that helps cache, version and store models/datasets from huggingface hub.
223
-
224
- > Examples
225
-
226
- **Usage: creating references of models from huggingface that may be loaded in downstream steps**
227
- ```python
228
- @huggingface_hub
229
- @step
230
- def pull_model_from_huggingface(self):
231
- # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
232
- # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
233
- # with the same `repo_id` in the backend storage, it will not download the model again. The return
234
- # value of the function is a reference to the model in the backend storage.
235
- # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
236
-
237
- self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
238
- self.llama_model = current.huggingface_hub.snapshot_download(
239
- repo_id=self.model_id,
240
- allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
241
- )
242
- self.next(self.train)
243
- ```
206
+ Specifies a timeout for your step.
244
207
 
245
- **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
246
- ```python
247
- @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
248
- @step
249
- def pull_model_from_huggingface(self):
250
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
251
- ```
208
+ This decorator is useful if this step may hang indefinitely.
252
209
 
253
- ```python
254
- @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
255
- @step
256
- def finetune_model(self):
257
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
258
- # path_to_model will be /my-directory
259
- ```
210
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
211
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
212
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
260
213
 
261
- ```python
262
- # Takes all the arguments passed to `snapshot_download`
263
- # except for `local_dir`
264
- @huggingface_hub(load=[
265
- {
266
- "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
267
- },
268
- {
269
- "repo_id": "myorg/mistral-lora",
270
- "repo_type": "model",
271
- },
272
- ])
273
- @step
274
- def finetune_model(self):
275
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
276
- # path_to_model will be /my-directory
277
- ```
214
+ Note that all the values specified in parameters are added together so if you specify
215
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
278
216
 
279
217
 
280
218
  Parameters
281
219
  ----------
282
- temp_dir_root : str, optional
283
- The root directory that will hold the temporary directory where objects will be downloaded.
284
-
285
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
286
- The list of repos (models/datasets) to load.
287
-
288
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
289
-
290
- - If repo (model/dataset) is not found in the datastore:
291
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
292
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
293
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
294
-
295
- - If repo is found in the datastore:
296
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
297
- """
298
- ...
299
-
300
- @typing.overload
301
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
302
- """
303
- Decorator prototype for all step decorators. This function gets specialized
304
- and imported for all decorators types by _import_plugin_decorators().
220
+ seconds : int, default 0
221
+ Number of seconds to wait prior to timing out.
222
+ minutes : int, default 0
223
+ Number of minutes to wait prior to timing out.
224
+ hours : int, default 0
225
+ Number of hours to wait prior to timing out.
305
226
  """
306
227
  ...
307
228
 
308
229
  @typing.overload
309
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
310
- ...
311
-
312
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
230
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
313
231
  """
314
- Decorator prototype for all step decorators. This function gets specialized
315
- and imported for all decorators types by _import_plugin_decorators().
232
+ Specifies secrets to be retrieved and injected as environment variables prior to
233
+ the execution of a step.
234
+
235
+
236
+ Parameters
237
+ ----------
238
+ sources : List[Union[str, Dict[str, Any]]], default: []
239
+ List of secret specs, defining how the secrets are to be retrieved
240
+ role : str, optional, default: None
241
+ Role to use for fetching secrets
316
242
  """
317
243
  ...
318
244
 
319
245
  @typing.overload
320
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
321
- """
322
- Internal decorator to support Fast bakery
323
- """
246
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
324
247
  ...
325
248
 
326
249
  @typing.overload
327
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
250
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
328
251
  ...
329
252
 
330
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
253
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
331
254
  """
332
- Internal decorator to support Fast bakery
255
+ Specifies secrets to be retrieved and injected as environment variables prior to
256
+ the execution of a step.
257
+
258
+
259
+ Parameters
260
+ ----------
261
+ sources : List[Union[str, Dict[str, Any]]], default: []
262
+ List of secret specs, defining how the secrets are to be retrieved
263
+ role : str, optional, default: None
264
+ Role to use for fetching secrets
333
265
  """
334
266
  ...
335
267
 
336
268
  @typing.overload
337
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
269
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
338
270
  """
339
- Specifies environment variables to be set prior to the execution of a step.
271
+ Creates a human-readable report, a Metaflow Card, after this step completes.
272
+
273
+ Note that you may add multiple `@card` decorators in a step with different parameters.
340
274
 
341
275
 
342
276
  Parameters
343
277
  ----------
344
- vars : Dict[str, str], default {}
345
- Dictionary of environment variables to set.
278
+ type : str, default 'default'
279
+ Card type.
280
+ id : str, optional, default None
281
+ If multiple cards are present, use this id to identify this card.
282
+ options : Dict[str, Any], default {}
283
+ Options passed to the card. The contents depend on the card type.
284
+ timeout : int, default 45
285
+ Interrupt reporting if it takes more than this many seconds.
346
286
  """
347
287
  ...
348
288
 
349
289
  @typing.overload
350
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
290
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
351
291
  ...
352
292
 
353
293
  @typing.overload
354
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
294
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
355
295
  ...
356
296
 
357
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
297
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
358
298
  """
359
- Specifies environment variables to be set prior to the execution of a step.
360
-
299
+ Creates a human-readable report, a Metaflow Card, after this step completes.
361
300
 
362
- Parameters
363
- ----------
364
- vars : Dict[str, str], default {}
365
- Dictionary of environment variables to set.
366
- """
367
- ...
368
-
369
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
370
- """
371
- Specifies that this step should execute on DGX cloud.
301
+ Note that you may add multiple `@card` decorators in a step with different parameters.
372
302
 
373
303
 
374
304
  Parameters
375
305
  ----------
376
- gpu : int
377
- Number of GPUs to use.
378
- gpu_type : str
379
- Type of Nvidia GPU to use.
380
- queue_timeout : int
381
- Time to keep the job in NVCF's queue.
306
+ type : str, default 'default'
307
+ Card type.
308
+ id : str, optional, default None
309
+ If multiple cards are present, use this id to identify this card.
310
+ options : Dict[str, Any], default {}
311
+ Options passed to the card. The contents depend on the card type.
312
+ timeout : int, default 45
313
+ Interrupt reporting if it takes more than this many seconds.
382
314
  """
383
315
  ...
384
316
 
385
317
  @typing.overload
386
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
318
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
387
319
  """
388
- Specifies the PyPI packages for the step.
320
+ Specifies the Conda environment for the step.
389
321
 
390
322
  Information in this decorator will augment any
391
- attributes set in the `@pyi_base` flow-level decorator. Hence,
392
- you can use `@pypi_base` to set packages required by all
393
- steps and use `@pypi` to specify step-specific overrides.
323
+ attributes set in the `@conda_base` flow-level decorator. Hence,
324
+ you can use `@conda_base` to set packages required by all
325
+ steps and use `@conda` to specify step-specific overrides.
394
326
 
395
327
 
396
328
  Parameters
397
329
  ----------
398
- packages : Dict[str, str], default: {}
330
+ packages : Dict[str, str], default {}
399
331
  Packages to use for this step. The key is the name of the package
400
332
  and the value is the version to use.
401
- python : str, optional, default: None
333
+ libraries : Dict[str, str], default {}
334
+ Supported for backward compatibility. When used with packages, packages will take precedence.
335
+ python : str, optional, default None
402
336
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
403
337
  that the version used will correspond to the version of the Python interpreter used to start the run.
338
+ disabled : bool, default False
339
+ If set to True, disables @conda.
404
340
  """
405
341
  ...
406
342
 
407
343
  @typing.overload
408
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
344
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
409
345
  ...
410
346
 
411
347
  @typing.overload
412
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
348
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
413
349
  ...
414
350
 
415
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
351
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
416
352
  """
417
- Specifies the PyPI packages for the step.
353
+ Specifies the Conda environment for the step.
418
354
 
419
355
  Information in this decorator will augment any
420
- attributes set in the `@pyi_base` flow-level decorator. Hence,
421
- you can use `@pypi_base` to set packages required by all
422
- steps and use `@pypi` to specify step-specific overrides.
356
+ attributes set in the `@conda_base` flow-level decorator. Hence,
357
+ you can use `@conda_base` to set packages required by all
358
+ steps and use `@conda` to specify step-specific overrides.
423
359
 
424
360
 
425
361
  Parameters
426
362
  ----------
427
- packages : Dict[str, str], default: {}
363
+ packages : Dict[str, str], default {}
428
364
  Packages to use for this step. The key is the name of the package
429
365
  and the value is the version to use.
430
- python : str, optional, default: None
366
+ libraries : Dict[str, str], default {}
367
+ Supported for backward compatibility. When used with packages, packages will take precedence.
368
+ python : str, optional, default None
431
369
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
432
370
  that the version used will correspond to the version of the Python interpreter used to start the run.
371
+ disabled : bool, default False
372
+ If set to True, disables @conda.
373
+ """
374
+ ...
375
+
376
+ @typing.overload
377
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
378
+ """
379
+ Internal decorator to support Fast bakery
380
+ """
381
+ ...
382
+
383
+ @typing.overload
384
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
385
+ ...
386
+
387
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
388
+ """
389
+ Internal decorator to support Fast bakery
390
+ """
391
+ ...
392
+
393
+ @typing.overload
394
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
395
+ """
396
+ Specifies the number of times the task corresponding
397
+ to a step needs to be retried.
398
+
399
+ This decorator is useful for handling transient errors, such as networking issues.
400
+ If your task contains operations that can't be retried safely, e.g. database updates,
401
+ it is advisable to annotate it with `@retry(times=0)`.
402
+
403
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
404
+ decorator will execute a no-op task after all retries have been exhausted,
405
+ ensuring that the flow execution can continue.
406
+
407
+
408
+ Parameters
409
+ ----------
410
+ times : int, default 3
411
+ Number of times to retry this task.
412
+ minutes_between_retries : int, default 2
413
+ Number of minutes between retries.
414
+ """
415
+ ...
416
+
417
+ @typing.overload
418
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
419
+ ...
420
+
421
+ @typing.overload
422
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
423
+ ...
424
+
425
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
426
+ """
427
+ Specifies the number of times the task corresponding
428
+ to a step needs to be retried.
429
+
430
+ This decorator is useful for handling transient errors, such as networking issues.
431
+ If your task contains operations that can't be retried safely, e.g. database updates,
432
+ it is advisable to annotate it with `@retry(times=0)`.
433
+
434
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
435
+ decorator will execute a no-op task after all retries have been exhausted,
436
+ ensuring that the flow execution can continue.
437
+
438
+
439
+ Parameters
440
+ ----------
441
+ times : int, default 3
442
+ Number of times to retry this task.
443
+ minutes_between_retries : int, default 2
444
+ Number of minutes between retries.
445
+ """
446
+ ...
447
+
448
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
449
+ """
450
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
451
+
452
+ > Examples
453
+
454
+ **Usage: creating references of models from huggingface that may be loaded in downstream steps**
455
+ ```python
456
+ @huggingface_hub
457
+ @step
458
+ def pull_model_from_huggingface(self):
459
+ # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
460
+ # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
461
+ # with the same `repo_id` in the backend storage, it will not download the model again. The return
462
+ # value of the function is a reference to the model in the backend storage.
463
+ # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
464
+
465
+ self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
466
+ self.llama_model = current.huggingface_hub.snapshot_download(
467
+ repo_id=self.model_id,
468
+ allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
469
+ )
470
+ self.next(self.train)
471
+ ```
472
+
473
+ **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
474
+ ```python
475
+ @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
476
+ @step
477
+ def pull_model_from_huggingface(self):
478
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
479
+ ```
480
+
481
+ ```python
482
+ @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
483
+ @step
484
+ def finetune_model(self):
485
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
486
+ # path_to_model will be /my-directory
487
+ ```
488
+
489
+ ```python
490
+ # Takes all the arguments passed to `snapshot_download`
491
+ # except for `local_dir`
492
+ @huggingface_hub(load=[
493
+ {
494
+ "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
495
+ },
496
+ {
497
+ "repo_id": "myorg/mistral-lora",
498
+ "repo_type": "model",
499
+ },
500
+ ])
501
+ @step
502
+ def finetune_model(self):
503
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
504
+ # path_to_model will be /my-directory
505
+ ```
506
+
507
+
508
+ Parameters
509
+ ----------
510
+ temp_dir_root : str, optional
511
+ The root directory that will hold the temporary directory where objects will be downloaded.
512
+
513
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
514
+ The list of repos (models/datasets) to load.
515
+
516
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
517
+
518
+ - If repo (model/dataset) is not found in the datastore:
519
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
520
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
521
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
522
+
523
+ - If repo is found in the datastore:
524
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
525
+ """
526
+ ...
527
+
528
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
529
+ """
530
+ S3 Proxy decorator for routing S3 requests through a local proxy service.
531
+
532
+
533
+ Parameters
534
+ ----------
535
+ integration_name : str, optional
536
+ Name of the S3 proxy integration. If not specified, will use the only
537
+ available S3 proxy integration in the namespace (fails if multiple exist).
538
+ write_mode : str, optional
539
+ The desired behavior during write operations to target (origin) S3 bucket.
540
+ allowed options are:
541
+ "origin-and-cache" -> write to both the target S3 bucket and local object
542
+ storage
543
+ "origin" -> only write to the target S3 bucket
544
+ "cache" -> only write to the object storage service used for caching
545
+ debug : bool, optional
546
+ Enable debug logging for proxy operations.
547
+ """
548
+ ...
549
+
550
+ @typing.overload
551
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
552
+ """
553
+ Decorator prototype for all step decorators. This function gets specialized
554
+ and imported for all decorators types by _import_plugin_decorators().
555
+ """
556
+ ...
557
+
558
+ @typing.overload
559
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
560
+ ...
561
+
562
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
563
+ """
564
+ Decorator prototype for all step decorators. This function gets specialized
565
+ and imported for all decorators types by _import_plugin_decorators().
433
566
  """
434
567
  ...
435
568
 
@@ -562,710 +695,423 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
562
695
  """
563
696
  ...
564
697
 
565
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
698
+ @typing.overload
699
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
566
700
  """
567
- Specifies that this step should execute on DGX cloud.
568
-
569
-
570
- Parameters
571
- ----------
572
- gpu : int
573
- Number of GPUs to use.
574
- gpu_type : str
575
- Type of Nvidia GPU to use.
701
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
702
+ It exists to make it easier for users to know that this decorator should only be used with
703
+ a Neo Cloud like Nebius.
576
704
  """
577
705
  ...
578
706
 
579
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
707
+ @typing.overload
708
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
709
+ ...
710
+
711
+ def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
580
712
  """
581
- S3 Proxy decorator for routing S3 requests through a local proxy service.
713
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
714
+ It exists to make it easier for users to know that this decorator should only be used with
715
+ a Neo Cloud like Nebius.
716
+ """
717
+ ...
718
+
719
+ @typing.overload
720
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
721
+ """
722
+ Enables checkpointing for a step.
582
723
 
724
+ > Examples
583
725
 
584
- Parameters
585
- ----------
586
- integration_name : str, optional
587
- Name of the S3 proxy integration. If not specified, will use the only
588
- available S3 proxy integration in the namespace (fails if multiple exist).
589
- write_mode : str, optional
590
- The desired behavior during write operations to target (origin) S3 bucket.
591
- allowed options are:
592
- "origin-and-cache" -> write to both the target S3 bucket and local object
593
- storage
594
- "origin" -> only write to the target S3 bucket
595
- "cache" -> only write to the object storage service used for caching
596
- debug : bool, optional
597
- Enable debug logging for proxy operations.
598
- """
599
- ...
600
-
601
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
602
- """
603
- Specifies that this step should execute on Kubernetes.
604
-
605
-
606
- Parameters
607
- ----------
608
- cpu : int, default 1
609
- Number of CPUs required for this step. If `@resources` is
610
- also present, the maximum value from all decorators is used.
611
- memory : int, default 4096
612
- Memory size (in MB) required for this step. If
613
- `@resources` is also present, the maximum value from all decorators is
614
- used.
615
- disk : int, default 10240
616
- Disk size (in MB) required for this step. If
617
- `@resources` is also present, the maximum value from all decorators is
618
- used.
619
- image : str, optional, default None
620
- Docker image to use when launching on Kubernetes. If not specified, and
621
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
622
- not, a default Docker image mapping to the current version of Python is used.
623
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
624
- If given, the imagePullPolicy to be applied to the Docker image of the step.
625
- image_pull_secrets: List[str], default []
626
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
627
- Kubernetes image pull secrets to use when pulling container images
628
- in Kubernetes.
629
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
630
- Kubernetes service account to use when launching pod in Kubernetes.
631
- secrets : List[str], optional, default None
632
- Kubernetes secrets to use when launching pod in Kubernetes. These
633
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
634
- in Metaflow configuration.
635
- node_selector: Union[Dict[str,str], str], optional, default None
636
- Kubernetes node selector(s) to apply to the pod running the task.
637
- Can be passed in as a comma separated string of values e.g.
638
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
639
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
640
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
641
- Kubernetes namespace to use when launching pod in Kubernetes.
642
- gpu : int, optional, default None
643
- Number of GPUs required for this step. A value of zero implies that
644
- the scheduled node should not have GPUs.
645
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
646
- The vendor of the GPUs to be used for this step.
647
- tolerations : List[Dict[str,str]], default []
648
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
649
- Kubernetes tolerations to use when launching pod in Kubernetes.
650
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
651
- Kubernetes labels to use when launching pod in Kubernetes.
652
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
653
- Kubernetes annotations to use when launching pod in Kubernetes.
654
- use_tmpfs : bool, default False
655
- This enables an explicit tmpfs mount for this step.
656
- tmpfs_tempdir : bool, default True
657
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
658
- tmpfs_size : int, optional, default: None
659
- The value for the size (in MiB) of the tmpfs mount for this step.
660
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
661
- memory allocated for this step.
662
- tmpfs_path : str, optional, default /metaflow_temp
663
- Path to tmpfs mount for this step.
664
- persistent_volume_claims : Dict[str, str], optional, default None
665
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
666
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
667
- shared_memory: int, optional
668
- Shared memory size (in MiB) required for this step
669
- port: int, optional
670
- Port number to specify in the Kubernetes job object
671
- compute_pool : str, optional, default None
672
- Compute pool to be used for for this step.
673
- If not specified, any accessible compute pool within the perimeter is used.
674
- hostname_resolution_timeout: int, default 10 * 60
675
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
676
- Only applicable when @parallel is used.
677
- qos: str, default: Burstable
678
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
679
-
680
- security_context: Dict[str, Any], optional, default None
681
- Container security context. Applies to the task container. Allows the following keys:
682
- - privileged: bool, optional, default None
683
- - allow_privilege_escalation: bool, optional, default None
684
- - run_as_user: int, optional, default None
685
- - run_as_group: int, optional, default None
686
- - run_as_non_root: bool, optional, default None
687
- """
688
- ...
689
-
690
- @typing.overload
691
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
692
- """
693
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
694
- It exists to make it easier for users to know that this decorator should only be used with
695
- a Neo Cloud like Nebius.
696
- """
697
- ...
698
-
699
- @typing.overload
700
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
701
- ...
702
-
703
- def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
704
- """
705
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
706
- It exists to make it easier for users to know that this decorator should only be used with
707
- a Neo Cloud like Nebius.
708
- """
709
- ...
710
-
711
- @typing.overload
712
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
713
- """
714
- Specifies a timeout for your step.
715
-
716
- This decorator is useful if this step may hang indefinitely.
717
-
718
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
719
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
720
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
721
-
722
- Note that all the values specified in parameters are added together so if you specify
723
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
724
-
725
-
726
- Parameters
727
- ----------
728
- seconds : int, default 0
729
- Number of seconds to wait prior to timing out.
730
- minutes : int, default 0
731
- Number of minutes to wait prior to timing out.
732
- hours : int, default 0
733
- Number of hours to wait prior to timing out.
734
- """
735
- ...
736
-
737
- @typing.overload
738
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
739
- ...
740
-
741
- @typing.overload
742
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
743
- ...
744
-
745
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
746
- """
747
- Specifies a timeout for your step.
748
-
749
- This decorator is useful if this step may hang indefinitely.
750
-
751
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
752
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
753
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
754
-
755
- Note that all the values specified in parameters are added together so if you specify
756
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
757
-
758
-
759
- Parameters
760
- ----------
761
- seconds : int, default 0
762
- Number of seconds to wait prior to timing out.
763
- minutes : int, default 0
764
- Number of minutes to wait prior to timing out.
765
- hours : int, default 0
766
- Number of hours to wait prior to timing out.
767
- """
768
- ...
769
-
770
- @typing.overload
771
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
772
- """
773
- Specifies that the step will success under all circumstances.
774
-
775
- The decorator will create an optional artifact, specified by `var`, which
776
- contains the exception raised. You can use it to detect the presence
777
- of errors, indicating that all happy-path artifacts produced by the step
778
- are missing.
779
-
780
-
781
- Parameters
782
- ----------
783
- var : str, optional, default None
784
- Name of the artifact in which to store the caught exception.
785
- If not specified, the exception is not stored.
786
- print_exception : bool, default True
787
- Determines whether or not the exception is printed to
788
- stdout when caught.
789
- """
790
- ...
791
-
792
- @typing.overload
793
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
794
- ...
795
-
796
- @typing.overload
797
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
798
- ...
799
-
800
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
801
- """
802
- Specifies that the step will success under all circumstances.
803
-
804
- The decorator will create an optional artifact, specified by `var`, which
805
- contains the exception raised. You can use it to detect the presence
806
- of errors, indicating that all happy-path artifacts produced by the step
807
- are missing.
808
-
726
+ - Saving Checkpoints
809
727
 
810
- Parameters
811
- ----------
812
- var : str, optional, default None
813
- Name of the artifact in which to store the caught exception.
814
- If not specified, the exception is not stored.
815
- print_exception : bool, default True
816
- Determines whether or not the exception is printed to
817
- stdout when caught.
818
- """
819
- ...
820
-
821
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
822
- """
823
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
728
+ ```python
729
+ @checkpoint
730
+ @step
731
+ def train(self):
732
+ model = create_model(self.parameters, checkpoint_path = None)
733
+ for i in range(self.epochs):
734
+ # some training logic
735
+ loss = model.train(self.dataset)
736
+ if i % 10 == 0:
737
+ model.save(
738
+ current.checkpoint.directory,
739
+ )
740
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
741
+ # and returns a reference dictionary to the checkpoint saved in the datastore
742
+ self.latest_checkpoint = current.checkpoint.save(
743
+ name="epoch_checkpoint",
744
+ metadata={
745
+ "epoch": i,
746
+ "loss": loss,
747
+ }
748
+ )
749
+ ```
824
750
 
825
- User code call
826
- --------------
827
- @ollama(
828
- models=[...],
829
- ...
830
- )
751
+ - Using Loaded Checkpoints
831
752
 
832
- Valid backend options
833
- ---------------------
834
- - 'local': Run as a separate process on the local task machine.
835
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
836
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
753
+ ```python
754
+ @retry(times=3)
755
+ @checkpoint
756
+ @step
757
+ def train(self):
758
+ # Assume that the task has restarted and the previous attempt of the task
759
+ # saved a checkpoint
760
+ checkpoint_path = None
761
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
762
+ print("Loaded checkpoint from the previous attempt")
763
+ checkpoint_path = current.checkpoint.directory
837
764
 
838
- Valid model options
839
- -------------------
840
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
765
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
766
+ for i in range(self.epochs):
767
+ ...
768
+ ```
841
769
 
842
770
 
843
771
  Parameters
844
772
  ----------
845
- models: list[str]
846
- List of Ollama containers running models in sidecars.
847
- backend: str
848
- Determines where and how to run the Ollama process.
849
- force_pull: bool
850
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
851
- cache_update_policy: str
852
- Cache update policy: "auto", "force", or "never".
853
- force_cache_update: bool
854
- Simple override for "force" cache update policy.
855
- debug: bool
856
- Whether to turn on verbose debugging logs.
857
- circuit_breaker_config: dict
858
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
859
- timeout_config: dict
860
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
861
- """
862
- ...
863
-
864
- @typing.overload
865
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
866
- """
867
- Creates a human-readable report, a Metaflow Card, after this step completes.
868
-
869
- Note that you may add multiple `@card` decorators in a step with different parameters.
870
-
773
+ load_policy : str, default: "fresh"
774
+ The policy for loading the checkpoint. The following policies are supported:
775
+ - "eager": Loads the the latest available checkpoint within the namespace.
776
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
777
+ will be loaded at the start of the task.
778
+ - "none": Do not load any checkpoint
779
+ - "fresh": Loads the lastest checkpoint created within the running Task.
780
+ This mode helps loading checkpoints across various retry attempts of the same task.
781
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
782
+ created within the task will be loaded when the task is retries execution on failure.
871
783
 
872
- Parameters
873
- ----------
874
- type : str, default 'default'
875
- Card type.
876
- id : str, optional, default None
877
- If multiple cards are present, use this id to identify this card.
878
- options : Dict[str, Any], default {}
879
- Options passed to the card. The contents depend on the card type.
880
- timeout : int, default 45
881
- Interrupt reporting if it takes more than this many seconds.
784
+ temp_dir_root : str, default: None
785
+ The root directory under which `current.checkpoint.directory` will be created.
882
786
  """
883
787
  ...
884
788
 
885
789
  @typing.overload
886
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
790
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
887
791
  ...
888
792
 
889
793
  @typing.overload
890
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
794
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
891
795
  ...
892
796
 
893
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
797
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
894
798
  """
895
- Creates a human-readable report, a Metaflow Card, after this step completes.
799
+ Enables checkpointing for a step.
896
800
 
897
- Note that you may add multiple `@card` decorators in a step with different parameters.
801
+ > Examples
898
802
 
803
+ - Saving Checkpoints
899
804
 
900
- Parameters
901
- ----------
902
- type : str, default 'default'
903
- Card type.
904
- id : str, optional, default None
905
- If multiple cards are present, use this id to identify this card.
906
- options : Dict[str, Any], default {}
907
- Options passed to the card. The contents depend on the card type.
908
- timeout : int, default 45
909
- Interrupt reporting if it takes more than this many seconds.
910
- """
911
- ...
912
-
913
- @typing.overload
914
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
915
- """
916
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
917
- It exists to make it easier for users to know that this decorator should only be used with
918
- a Neo Cloud like CoreWeave.
919
- """
920
- ...
921
-
922
- @typing.overload
923
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
924
- ...
925
-
926
- def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
927
- """
928
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
929
- It exists to make it easier for users to know that this decorator should only be used with
930
- a Neo Cloud like CoreWeave.
931
- """
932
- ...
933
-
934
- @typing.overload
935
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
936
- """
937
- Decorator prototype for all step decorators. This function gets specialized
938
- and imported for all decorators types by _import_plugin_decorators().
939
- """
940
- ...
941
-
942
- @typing.overload
943
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
944
- ...
945
-
946
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
947
- """
948
- Decorator prototype for all step decorators. This function gets specialized
949
- and imported for all decorators types by _import_plugin_decorators().
950
- """
951
- ...
952
-
953
- @typing.overload
954
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
955
- """
956
- Specifies the number of times the task corresponding
957
- to a step needs to be retried.
805
+ ```python
806
+ @checkpoint
807
+ @step
808
+ def train(self):
809
+ model = create_model(self.parameters, checkpoint_path = None)
810
+ for i in range(self.epochs):
811
+ # some training logic
812
+ loss = model.train(self.dataset)
813
+ if i % 10 == 0:
814
+ model.save(
815
+ current.checkpoint.directory,
816
+ )
817
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
818
+ # and returns a reference dictionary to the checkpoint saved in the datastore
819
+ self.latest_checkpoint = current.checkpoint.save(
820
+ name="epoch_checkpoint",
821
+ metadata={
822
+ "epoch": i,
823
+ "loss": loss,
824
+ }
825
+ )
826
+ ```
958
827
 
959
- This decorator is useful for handling transient errors, such as networking issues.
960
- If your task contains operations that can't be retried safely, e.g. database updates,
961
- it is advisable to annotate it with `@retry(times=0)`.
828
+ - Using Loaded Checkpoints
962
829
 
963
- This can be used in conjunction with the `@catch` decorator. The `@catch`
964
- decorator will execute a no-op task after all retries have been exhausted,
965
- ensuring that the flow execution can continue.
830
+ ```python
831
+ @retry(times=3)
832
+ @checkpoint
833
+ @step
834
+ def train(self):
835
+ # Assume that the task has restarted and the previous attempt of the task
836
+ # saved a checkpoint
837
+ checkpoint_path = None
838
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
839
+ print("Loaded checkpoint from the previous attempt")
840
+ checkpoint_path = current.checkpoint.directory
841
+
842
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
843
+ for i in range(self.epochs):
844
+ ...
845
+ ```
966
846
 
967
847
 
968
848
  Parameters
969
849
  ----------
970
- times : int, default 3
971
- Number of times to retry this task.
972
- minutes_between_retries : int, default 2
973
- Number of minutes between retries.
850
+ load_policy : str, default: "fresh"
851
+ The policy for loading the checkpoint. The following policies are supported:
852
+ - "eager": Loads the the latest available checkpoint within the namespace.
853
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
854
+ will be loaded at the start of the task.
855
+ - "none": Do not load any checkpoint
856
+ - "fresh": Loads the lastest checkpoint created within the running Task.
857
+ This mode helps loading checkpoints across various retry attempts of the same task.
858
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
859
+ created within the task will be loaded when the task is retries execution on failure.
860
+
861
+ temp_dir_root : str, default: None
862
+ The root directory under which `current.checkpoint.directory` will be created.
974
863
  """
975
864
  ...
976
865
 
977
866
  @typing.overload
978
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
867
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
868
+ """
869
+ A simple decorator that demonstrates using CardDecoratorInjector
870
+ to inject a card and render simple markdown content.
871
+ """
979
872
  ...
980
873
 
981
874
  @typing.overload
982
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
875
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
983
876
  ...
984
877
 
985
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
878
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
986
879
  """
987
- Specifies the number of times the task corresponding
988
- to a step needs to be retried.
989
-
990
- This decorator is useful for handling transient errors, such as networking issues.
991
- If your task contains operations that can't be retried safely, e.g. database updates,
992
- it is advisable to annotate it with `@retry(times=0)`.
993
-
994
- This can be used in conjunction with the `@catch` decorator. The `@catch`
995
- decorator will execute a no-op task after all retries have been exhausted,
996
- ensuring that the flow execution can continue.
997
-
998
-
999
- Parameters
1000
- ----------
1001
- times : int, default 3
1002
- Number of times to retry this task.
1003
- minutes_between_retries : int, default 2
1004
- Number of minutes between retries.
880
+ A simple decorator that demonstrates using CardDecoratorInjector
881
+ to inject a card and render simple markdown content.
1005
882
  """
1006
883
  ...
1007
884
 
1008
885
  @typing.overload
1009
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
886
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1010
887
  """
1011
- Specifies the Conda environment for the step.
888
+ Specifies the PyPI packages for the step.
1012
889
 
1013
890
  Information in this decorator will augment any
1014
- attributes set in the `@conda_base` flow-level decorator. Hence,
1015
- you can use `@conda_base` to set packages required by all
1016
- steps and use `@conda` to specify step-specific overrides.
891
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
892
+ you can use `@pypi_base` to set packages required by all
893
+ steps and use `@pypi` to specify step-specific overrides.
1017
894
 
1018
895
 
1019
896
  Parameters
1020
897
  ----------
1021
- packages : Dict[str, str], default {}
898
+ packages : Dict[str, str], default: {}
1022
899
  Packages to use for this step. The key is the name of the package
1023
900
  and the value is the version to use.
1024
- libraries : Dict[str, str], default {}
1025
- Supported for backward compatibility. When used with packages, packages will take precedence.
1026
- python : str, optional, default None
901
+ python : str, optional, default: None
1027
902
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1028
903
  that the version used will correspond to the version of the Python interpreter used to start the run.
1029
- disabled : bool, default False
1030
- If set to True, disables @conda.
1031
904
  """
1032
905
  ...
1033
906
 
1034
907
  @typing.overload
1035
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
908
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1036
909
  ...
1037
910
 
1038
911
  @typing.overload
1039
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
912
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1040
913
  ...
1041
914
 
1042
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
915
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1043
916
  """
1044
- Specifies the Conda environment for the step.
917
+ Specifies the PyPI packages for the step.
1045
918
 
1046
919
  Information in this decorator will augment any
1047
- attributes set in the `@conda_base` flow-level decorator. Hence,
1048
- you can use `@conda_base` to set packages required by all
1049
- steps and use `@conda` to specify step-specific overrides.
920
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
921
+ you can use `@pypi_base` to set packages required by all
922
+ steps and use `@pypi` to specify step-specific overrides.
1050
923
 
1051
924
 
1052
925
  Parameters
1053
926
  ----------
1054
- packages : Dict[str, str], default {}
927
+ packages : Dict[str, str], default: {}
1055
928
  Packages to use for this step. The key is the name of the package
1056
929
  and the value is the version to use.
1057
- libraries : Dict[str, str], default {}
1058
- Supported for backward compatibility. When used with packages, packages will take precedence.
1059
- python : str, optional, default None
930
+ python : str, optional, default: None
1060
931
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1061
932
  that the version used will correspond to the version of the Python interpreter used to start the run.
1062
- disabled : bool, default False
1063
- If set to True, disables @conda.
1064
933
  """
1065
934
  ...
1066
935
 
1067
936
  @typing.overload
1068
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
937
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1069
938
  """
1070
- Specifies secrets to be retrieved and injected as environment variables prior to
1071
- the execution of a step.
1072
-
1073
-
1074
- Parameters
1075
- ----------
1076
- sources : List[Union[str, Dict[str, Any]]], default: []
1077
- List of secret specs, defining how the secrets are to be retrieved
1078
- role : str, optional, default: None
1079
- Role to use for fetching secrets
939
+ Decorator prototype for all step decorators. This function gets specialized
940
+ and imported for all decorators types by _import_plugin_decorators().
1080
941
  """
1081
942
  ...
1082
943
 
1083
944
  @typing.overload
1084
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
945
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1085
946
  ...
1086
947
 
1087
- @typing.overload
1088
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
948
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
949
+ """
950
+ Decorator prototype for all step decorators. This function gets specialized
951
+ and imported for all decorators types by _import_plugin_decorators().
952
+ """
1089
953
  ...
1090
954
 
1091
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
955
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1092
956
  """
1093
- Specifies secrets to be retrieved and injected as environment variables prior to
1094
- the execution of a step.
957
+ Specifies that this step should execute on DGX cloud.
1095
958
 
1096
959
 
1097
960
  Parameters
1098
961
  ----------
1099
- sources : List[Union[str, Dict[str, Any]]], default: []
1100
- List of secret specs, defining how the secrets are to be retrieved
1101
- role : str, optional, default: None
1102
- Role to use for fetching secrets
1103
- """
1104
- ...
1105
-
1106
- @typing.overload
1107
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1108
- """
1109
- A simple decorator that demonstrates using CardDecoratorInjector
1110
- to inject a card and render simple markdown content.
1111
- """
1112
- ...
1113
-
1114
- @typing.overload
1115
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1116
- ...
1117
-
1118
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1119
- """
1120
- A simple decorator that demonstrates using CardDecoratorInjector
1121
- to inject a card and render simple markdown content.
962
+ gpu : int
963
+ Number of GPUs to use.
964
+ gpu_type : str
965
+ Type of Nvidia GPU to use.
966
+ queue_timeout : int
967
+ Time to keep the job in NVCF's queue.
1122
968
  """
1123
969
  ...
1124
970
 
1125
- @typing.overload
1126
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
971
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1127
972
  """
1128
- Enables checkpointing for a step.
1129
-
1130
- > Examples
973
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
1131
974
 
1132
- - Saving Checkpoints
975
+ User code call
976
+ --------------
977
+ @ollama(
978
+ models=[...],
979
+ ...
980
+ )
1133
981
 
1134
- ```python
1135
- @checkpoint
1136
- @step
1137
- def train(self):
1138
- model = create_model(self.parameters, checkpoint_path = None)
1139
- for i in range(self.epochs):
1140
- # some training logic
1141
- loss = model.train(self.dataset)
1142
- if i % 10 == 0:
1143
- model.save(
1144
- current.checkpoint.directory,
1145
- )
1146
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
1147
- # and returns a reference dictionary to the checkpoint saved in the datastore
1148
- self.latest_checkpoint = current.checkpoint.save(
1149
- name="epoch_checkpoint",
1150
- metadata={
1151
- "epoch": i,
1152
- "loss": loss,
1153
- }
1154
- )
1155
- ```
982
+ Valid backend options
983
+ ---------------------
984
+ - 'local': Run as a separate process on the local task machine.
985
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
986
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
1156
987
 
1157
- - Using Loaded Checkpoints
988
+ Valid model options
989
+ -------------------
990
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
1158
991
 
1159
- ```python
1160
- @retry(times=3)
1161
- @checkpoint
1162
- @step
1163
- def train(self):
1164
- # Assume that the task has restarted and the previous attempt of the task
1165
- # saved a checkpoint
1166
- checkpoint_path = None
1167
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
1168
- print("Loaded checkpoint from the previous attempt")
1169
- checkpoint_path = current.checkpoint.directory
1170
992
 
1171
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
1172
- for i in range(self.epochs):
1173
- ...
1174
- ```
993
+ Parameters
994
+ ----------
995
+ models: list[str]
996
+ List of Ollama containers running models in sidecars.
997
+ backend: str
998
+ Determines where and how to run the Ollama process.
999
+ force_pull: bool
1000
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1001
+ cache_update_policy: str
1002
+ Cache update policy: "auto", "force", or "never".
1003
+ force_cache_update: bool
1004
+ Simple override for "force" cache update policy.
1005
+ debug: bool
1006
+ Whether to turn on verbose debugging logs.
1007
+ circuit_breaker_config: dict
1008
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1009
+ timeout_config: dict
1010
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1011
+ """
1012
+ ...
1013
+
1014
+ @typing.overload
1015
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1016
+ """
1017
+ Specifies environment variables to be set prior to the execution of a step.
1175
1018
 
1176
1019
 
1177
1020
  Parameters
1178
1021
  ----------
1179
- load_policy : str, default: "fresh"
1180
- The policy for loading the checkpoint. The following policies are supported:
1181
- - "eager": Loads the the latest available checkpoint within the namespace.
1182
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1183
- will be loaded at the start of the task.
1184
- - "none": Do not load any checkpoint
1185
- - "fresh": Loads the lastest checkpoint created within the running Task.
1186
- This mode helps loading checkpoints across various retry attempts of the same task.
1187
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1188
- created within the task will be loaded when the task is retries execution on failure.
1189
-
1190
- temp_dir_root : str, default: None
1191
- The root directory under which `current.checkpoint.directory` will be created.
1022
+ vars : Dict[str, str], default {}
1023
+ Dictionary of environment variables to set.
1192
1024
  """
1193
1025
  ...
1194
1026
 
1195
1027
  @typing.overload
1196
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1028
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1197
1029
  ...
1198
1030
 
1199
1031
  @typing.overload
1200
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1032
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1201
1033
  ...
1202
1034
 
1203
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
1035
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1204
1036
  """
1205
- Enables checkpointing for a step.
1037
+ Specifies environment variables to be set prior to the execution of a step.
1206
1038
 
1207
- > Examples
1208
1039
 
1209
- - Saving Checkpoints
1040
+ Parameters
1041
+ ----------
1042
+ vars : Dict[str, str], default {}
1043
+ Dictionary of environment variables to set.
1044
+ """
1045
+ ...
1046
+
1047
+ @typing.overload
1048
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1049
+ """
1050
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1051
+ It exists to make it easier for users to know that this decorator should only be used with
1052
+ a Neo Cloud like CoreWeave.
1053
+ """
1054
+ ...
1055
+
1056
+ @typing.overload
1057
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1058
+ ...
1059
+
1060
+ def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1061
+ """
1062
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1063
+ It exists to make it easier for users to know that this decorator should only be used with
1064
+ a Neo Cloud like CoreWeave.
1065
+ """
1066
+ ...
1067
+
1068
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1069
+ """
1070
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
1210
1071
 
1211
- ```python
1212
- @checkpoint
1213
- @step
1214
- def train(self):
1215
- model = create_model(self.parameters, checkpoint_path = None)
1216
- for i in range(self.epochs):
1217
- # some training logic
1218
- loss = model.train(self.dataset)
1219
- if i % 10 == 0:
1220
- model.save(
1221
- current.checkpoint.directory,
1222
- )
1223
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
1224
- # and returns a reference dictionary to the checkpoint saved in the datastore
1225
- self.latest_checkpoint = current.checkpoint.save(
1226
- name="epoch_checkpoint",
1227
- metadata={
1228
- "epoch": i,
1229
- "loss": loss,
1230
- }
1231
- )
1232
- ```
1072
+ User code call
1073
+ --------------
1074
+ @vllm(
1075
+ model="...",
1076
+ ...
1077
+ )
1233
1078
 
1234
- - Using Loaded Checkpoints
1079
+ Valid backend options
1080
+ ---------------------
1081
+ - 'local': Run as a separate process on the local task machine.
1235
1082
 
1236
- ```python
1237
- @retry(times=3)
1238
- @checkpoint
1239
- @step
1240
- def train(self):
1241
- # Assume that the task has restarted and the previous attempt of the task
1242
- # saved a checkpoint
1243
- checkpoint_path = None
1244
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
1245
- print("Loaded checkpoint from the previous attempt")
1246
- checkpoint_path = current.checkpoint.directory
1083
+ Valid model options
1084
+ -------------------
1085
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1247
1086
 
1248
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
1249
- for i in range(self.epochs):
1250
- ...
1251
- ```
1087
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1088
+ If you need multiple models, you must create multiple @vllm decorators.
1252
1089
 
1253
1090
 
1254
1091
  Parameters
1255
1092
  ----------
1256
- load_policy : str, default: "fresh"
1257
- The policy for loading the checkpoint. The following policies are supported:
1258
- - "eager": Loads the the latest available checkpoint within the namespace.
1259
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1260
- will be loaded at the start of the task.
1261
- - "none": Do not load any checkpoint
1262
- - "fresh": Loads the lastest checkpoint created within the running Task.
1263
- This mode helps loading checkpoints across various retry attempts of the same task.
1264
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1265
- created within the task will be loaded when the task is retries execution on failure.
1266
-
1267
- temp_dir_root : str, default: None
1268
- The root directory under which `current.checkpoint.directory` will be created.
1093
+ model: str
1094
+ HuggingFace model identifier to be served by vLLM.
1095
+ backend: str
1096
+ Determines where and how to run the vLLM process.
1097
+ openai_api_server: bool
1098
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1099
+ Default is False (uses native engine).
1100
+ Set to True for backward compatibility with existing code.
1101
+ debug: bool
1102
+ Whether to turn on verbose debugging logs.
1103
+ card_refresh_interval: int
1104
+ Interval in seconds for refreshing the vLLM status card.
1105
+ Only used when openai_api_server=True.
1106
+ max_retries: int
1107
+ Maximum number of retries checking for vLLM server startup.
1108
+ Only used when openai_api_server=True.
1109
+ retry_alert_frequency: int
1110
+ Frequency of alert logs for vLLM server startup retries.
1111
+ Only used when openai_api_server=True.
1112
+ engine_args : dict
1113
+ Additional keyword arguments to pass to the vLLM engine.
1114
+ For example, `tensor_parallel_size=2`.
1269
1115
  """
1270
1116
  ...
1271
1117
 
@@ -1348,104 +1194,192 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
1348
1194
  """
1349
1195
  ...
1350
1196
 
1351
- @typing.overload
1352
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1197
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1353
1198
  """
1354
- Specifies the flow(s) that this flow depends on.
1199
+ Specifies that this step should execute on Kubernetes.
1355
1200
 
1356
- ```
1357
- @trigger_on_finish(flow='FooFlow')
1358
- ```
1359
- or
1360
- ```
1361
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1362
- ```
1363
- This decorator respects the @project decorator and triggers the flow
1364
- when upstream runs within the same namespace complete successfully
1365
1201
 
1366
- Additionally, you can specify project aware upstream flow dependencies
1367
- by specifying the fully qualified project_flow_name.
1368
- ```
1369
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1370
- ```
1371
- or
1372
- ```
1373
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1374
- ```
1202
+ Parameters
1203
+ ----------
1204
+ cpu : int, default 1
1205
+ Number of CPUs required for this step. If `@resources` is
1206
+ also present, the maximum value from all decorators is used.
1207
+ memory : int, default 4096
1208
+ Memory size (in MB) required for this step. If
1209
+ `@resources` is also present, the maximum value from all decorators is
1210
+ used.
1211
+ disk : int, default 10240
1212
+ Disk size (in MB) required for this step. If
1213
+ `@resources` is also present, the maximum value from all decorators is
1214
+ used.
1215
+ image : str, optional, default None
1216
+ Docker image to use when launching on Kubernetes. If not specified, and
1217
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1218
+ not, a default Docker image mapping to the current version of Python is used.
1219
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1220
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1221
+ image_pull_secrets: List[str], default []
1222
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
1223
+ Kubernetes image pull secrets to use when pulling container images
1224
+ in Kubernetes.
1225
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1226
+ Kubernetes service account to use when launching pod in Kubernetes.
1227
+ secrets : List[str], optional, default None
1228
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1229
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1230
+ in Metaflow configuration.
1231
+ node_selector: Union[Dict[str,str], str], optional, default None
1232
+ Kubernetes node selector(s) to apply to the pod running the task.
1233
+ Can be passed in as a comma separated string of values e.g.
1234
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
1235
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
1236
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1237
+ Kubernetes namespace to use when launching pod in Kubernetes.
1238
+ gpu : int, optional, default None
1239
+ Number of GPUs required for this step. A value of zero implies that
1240
+ the scheduled node should not have GPUs.
1241
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1242
+ The vendor of the GPUs to be used for this step.
1243
+ tolerations : List[Dict[str,str]], default []
1244
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1245
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1246
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
1247
+ Kubernetes labels to use when launching pod in Kubernetes.
1248
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
1249
+ Kubernetes annotations to use when launching pod in Kubernetes.
1250
+ use_tmpfs : bool, default False
1251
+ This enables an explicit tmpfs mount for this step.
1252
+ tmpfs_tempdir : bool, default True
1253
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1254
+ tmpfs_size : int, optional, default: None
1255
+ The value for the size (in MiB) of the tmpfs mount for this step.
1256
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1257
+ memory allocated for this step.
1258
+ tmpfs_path : str, optional, default /metaflow_temp
1259
+ Path to tmpfs mount for this step.
1260
+ persistent_volume_claims : Dict[str, str], optional, default None
1261
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1262
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1263
+ shared_memory: int, optional
1264
+ Shared memory size (in MiB) required for this step
1265
+ port: int, optional
1266
+ Port number to specify in the Kubernetes job object
1267
+ compute_pool : str, optional, default None
1268
+ Compute pool to be used for for this step.
1269
+ If not specified, any accessible compute pool within the perimeter is used.
1270
+ hostname_resolution_timeout: int, default 10 * 60
1271
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
1272
+ Only applicable when @parallel is used.
1273
+ qos: str, default: Burstable
1274
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
1275
+
1276
+ security_context: Dict[str, Any], optional, default None
1277
+ Container security context. Applies to the task container. Allows the following keys:
1278
+ - privileged: bool, optional, default None
1279
+ - allow_privilege_escalation: bool, optional, default None
1280
+ - run_as_user: int, optional, default None
1281
+ - run_as_group: int, optional, default None
1282
+ - run_as_non_root: bool, optional, default None
1283
+ """
1284
+ ...
1285
+
1286
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1287
+ """
1288
+ Specifies that this step should execute on DGX cloud.
1375
1289
 
1376
- You can also specify just the project or project branch (other values will be
1377
- inferred from the current project or project branch):
1378
- ```
1379
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1380
- ```
1381
1290
 
1382
- Note that `branch` is typically one of:
1383
- - `prod`
1384
- - `user.bob`
1385
- - `test.my_experiment`
1386
- - `prod.staging`
1291
+ Parameters
1292
+ ----------
1293
+ gpu : int
1294
+ Number of GPUs to use.
1295
+ gpu_type : str
1296
+ Type of Nvidia GPU to use.
1297
+ """
1298
+ ...
1299
+
1300
+ @typing.overload
1301
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1302
+ """
1303
+ Specifies that the step will success under all circumstances.
1304
+
1305
+ The decorator will create an optional artifact, specified by `var`, which
1306
+ contains the exception raised. You can use it to detect the presence
1307
+ of errors, indicating that all happy-path artifacts produced by the step
1308
+ are missing.
1387
1309
 
1388
1310
 
1389
1311
  Parameters
1390
1312
  ----------
1391
- flow : Union[str, Dict[str, str]], optional, default None
1392
- Upstream flow dependency for this flow.
1393
- flows : List[Union[str, Dict[str, str]]], default []
1394
- Upstream flow dependencies for this flow.
1395
- options : Dict[str, Any], default {}
1396
- Backend-specific configuration for tuning eventing behavior.
1313
+ var : str, optional, default None
1314
+ Name of the artifact in which to store the caught exception.
1315
+ If not specified, the exception is not stored.
1316
+ print_exception : bool, default True
1317
+ Determines whether or not the exception is printed to
1318
+ stdout when caught.
1397
1319
  """
1398
1320
  ...
1399
1321
 
1400
1322
  @typing.overload
1401
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1323
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1402
1324
  ...
1403
1325
 
1404
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1326
+ @typing.overload
1327
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1328
+ ...
1329
+
1330
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1405
1331
  """
1406
- Specifies the flow(s) that this flow depends on.
1332
+ Specifies that the step will success under all circumstances.
1407
1333
 
1408
- ```
1409
- @trigger_on_finish(flow='FooFlow')
1410
- ```
1411
- or
1412
- ```
1413
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1414
- ```
1415
- This decorator respects the @project decorator and triggers the flow
1416
- when upstream runs within the same namespace complete successfully
1334
+ The decorator will create an optional artifact, specified by `var`, which
1335
+ contains the exception raised. You can use it to detect the presence
1336
+ of errors, indicating that all happy-path artifacts produced by the step
1337
+ are missing.
1417
1338
 
1418
- Additionally, you can specify project aware upstream flow dependencies
1419
- by specifying the fully qualified project_flow_name.
1420
- ```
1421
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1422
- ```
1423
- or
1424
- ```
1425
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1426
- ```
1427
1339
 
1428
- You can also specify just the project or project branch (other values will be
1429
- inferred from the current project or project branch):
1430
- ```
1431
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1432
- ```
1340
+ Parameters
1341
+ ----------
1342
+ var : str, optional, default None
1343
+ Name of the artifact in which to store the caught exception.
1344
+ If not specified, the exception is not stored.
1345
+ print_exception : bool, default True
1346
+ Determines whether or not the exception is printed to
1347
+ stdout when caught.
1348
+ """
1349
+ ...
1350
+
1351
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1352
+ """
1353
+ Specifies what flows belong to the same project.
1433
1354
 
1434
- Note that `branch` is typically one of:
1435
- - `prod`
1436
- - `user.bob`
1437
- - `test.my_experiment`
1438
- - `prod.staging`
1355
+ A project-specific namespace is created for all flows that
1356
+ use the same `@project(name)`.
1439
1357
 
1440
1358
 
1441
1359
  Parameters
1442
1360
  ----------
1443
- flow : Union[str, Dict[str, str]], optional, default None
1444
- Upstream flow dependency for this flow.
1445
- flows : List[Union[str, Dict[str, str]]], default []
1446
- Upstream flow dependencies for this flow.
1447
- options : Dict[str, Any], default {}
1448
- Backend-specific configuration for tuning eventing behavior.
1361
+ name : str
1362
+ Project name. Make sure that the name is unique amongst all
1363
+ projects that use the same production scheduler. The name may
1364
+ contain only lowercase alphanumeric characters and underscores.
1365
+
1366
+ branch : Optional[str], default None
1367
+ The branch to use. If not specified, the branch is set to
1368
+ `user.<username>` unless `production` is set to `True`. This can
1369
+ also be set on the command line using `--branch` as a top-level option.
1370
+ It is an error to specify `branch` in the decorator and on the command line.
1371
+
1372
+ production : bool, default False
1373
+ Whether or not the branch is the production branch. This can also be set on the
1374
+ command line using `--production` as a top-level option. It is an error to specify
1375
+ `production` in the decorator and on the command line.
1376
+ The project branch name will be:
1377
+ - if `branch` is specified:
1378
+ - if `production` is True: `prod.<branch>`
1379
+ - if `production` is False: `test.<branch>`
1380
+ - if `branch` is not specified:
1381
+ - if `production` is True: `prod`
1382
+ - if `production` is False: `user.<username>`
1449
1383
  """
1450
1384
  ...
1451
1385
 
@@ -1542,6 +1476,49 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1542
1476
  """
1543
1477
  ...
1544
1478
 
1479
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1480
+ """
1481
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1482
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1483
+
1484
+
1485
+ Parameters
1486
+ ----------
1487
+ timeout : int
1488
+ Time, in seconds before the task times out and fails. (Default: 3600)
1489
+ poke_interval : int
1490
+ Time in seconds that the job should wait in between each try. (Default: 60)
1491
+ mode : str
1492
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1493
+ exponential_backoff : bool
1494
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1495
+ pool : str
1496
+ the slot pool this task should run in,
1497
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1498
+ soft_fail : bool
1499
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1500
+ name : str
1501
+ Name of the sensor on Airflow
1502
+ description : str
1503
+ Description of sensor in the Airflow UI
1504
+ external_dag_id : str
1505
+ The dag_id that contains the task you want to wait for.
1506
+ external_task_ids : List[str]
1507
+ The list of task_ids that you want to wait for.
1508
+ If None (default value) the sensor waits for the DAG. (Default: None)
1509
+ allowed_states : List[str]
1510
+ Iterable of allowed states, (Default: ['success'])
1511
+ failed_states : List[str]
1512
+ Iterable of failed or dis-allowed states. (Default: None)
1513
+ execution_delta : datetime.timedelta
1514
+ time difference with the previous execution to look at,
1515
+ the default is the same logical date as the current task or DAG. (Default: None)
1516
+ check_existence: bool
1517
+ Set to True to check if the external task exists or check if
1518
+ the DAG to wait for exists. (Default: True)
1519
+ """
1520
+ ...
1521
+
1545
1522
  def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1546
1523
  """
1547
1524
  Allows setting external datastores to save data for the
@@ -1641,18 +1618,112 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1641
1618
  Parameters:
1642
1619
  ----------
1643
1620
 
1644
- type: str
1645
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1621
+ type: str
1622
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1623
+
1624
+ config: dict or Callable
1625
+ Dictionary of configuration options for the datastore. The following keys are required:
1626
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1627
+ - example: 's3://bucket-name/path/to/root'
1628
+ - example: 'gs://bucket-name/path/to/root'
1629
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1630
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1631
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1632
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1633
+ """
1634
+ ...
1635
+
1636
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1637
+ """
1638
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1639
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1640
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1641
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1642
+ starts only after all sensors finish.
1643
+
1644
+
1645
+ Parameters
1646
+ ----------
1647
+ timeout : int
1648
+ Time, in seconds before the task times out and fails. (Default: 3600)
1649
+ poke_interval : int
1650
+ Time in seconds that the job should wait in between each try. (Default: 60)
1651
+ mode : str
1652
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1653
+ exponential_backoff : bool
1654
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1655
+ pool : str
1656
+ the slot pool this task should run in,
1657
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1658
+ soft_fail : bool
1659
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1660
+ name : str
1661
+ Name of the sensor on Airflow
1662
+ description : str
1663
+ Description of sensor in the Airflow UI
1664
+ bucket_key : Union[str, List[str]]
1665
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1666
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1667
+ bucket_name : str
1668
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1669
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1670
+ wildcard_match : bool
1671
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1672
+ aws_conn_id : str
1673
+ a reference to the s3 connection on Airflow. (Default: None)
1674
+ verify : bool
1675
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1676
+ """
1677
+ ...
1678
+
1679
+ @typing.overload
1680
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1681
+ """
1682
+ Specifies the Conda environment for all steps of the flow.
1683
+
1684
+ Use `@conda_base` to set common libraries required by all
1685
+ steps and use `@conda` to specify step-specific additions.
1686
+
1687
+
1688
+ Parameters
1689
+ ----------
1690
+ packages : Dict[str, str], default {}
1691
+ Packages to use for this flow. The key is the name of the package
1692
+ and the value is the version to use.
1693
+ libraries : Dict[str, str], default {}
1694
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1695
+ python : str, optional, default None
1696
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1697
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1698
+ disabled : bool, default False
1699
+ If set to True, disables Conda.
1700
+ """
1701
+ ...
1702
+
1703
+ @typing.overload
1704
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1705
+ ...
1706
+
1707
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1708
+ """
1709
+ Specifies the Conda environment for all steps of the flow.
1710
+
1711
+ Use `@conda_base` to set common libraries required by all
1712
+ steps and use `@conda` to specify step-specific additions.
1646
1713
 
1647
- config: dict or Callable
1648
- Dictionary of configuration options for the datastore. The following keys are required:
1649
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1650
- - example: 's3://bucket-name/path/to/root'
1651
- - example: 'gs://bucket-name/path/to/root'
1652
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1653
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1654
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1655
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1714
+
1715
+ Parameters
1716
+ ----------
1717
+ packages : Dict[str, str], default {}
1718
+ Packages to use for this flow. The key is the name of the package
1719
+ and the value is the version to use.
1720
+ libraries : Dict[str, str], default {}
1721
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1722
+ python : str, optional, default None
1723
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1724
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1725
+ disabled : bool, default False
1726
+ If set to True, disables Conda.
1656
1727
  """
1657
1728
  ...
1658
1729
 
@@ -1749,174 +1820,103 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1749
1820
  ...
1750
1821
 
1751
1822
  @typing.overload
1752
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1823
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1753
1824
  """
1754
- Specifies the Conda environment for all steps of the flow.
1825
+ Specifies the flow(s) that this flow depends on.
1755
1826
 
1756
- Use `@conda_base` to set common libraries required by all
1757
- steps and use `@conda` to specify step-specific additions.
1827
+ ```
1828
+ @trigger_on_finish(flow='FooFlow')
1829
+ ```
1830
+ or
1831
+ ```
1832
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1833
+ ```
1834
+ This decorator respects the @project decorator and triggers the flow
1835
+ when upstream runs within the same namespace complete successfully
1758
1836
 
1837
+ Additionally, you can specify project aware upstream flow dependencies
1838
+ by specifying the fully qualified project_flow_name.
1839
+ ```
1840
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1841
+ ```
1842
+ or
1843
+ ```
1844
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1845
+ ```
1759
1846
 
1760
- Parameters
1761
- ----------
1762
- packages : Dict[str, str], default {}
1763
- Packages to use for this flow. The key is the name of the package
1764
- and the value is the version to use.
1765
- libraries : Dict[str, str], default {}
1766
- Supported for backward compatibility. When used with packages, packages will take precedence.
1767
- python : str, optional, default None
1768
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1769
- that the version used will correspond to the version of the Python interpreter used to start the run.
1770
- disabled : bool, default False
1771
- If set to True, disables Conda.
1772
- """
1773
- ...
1774
-
1775
- @typing.overload
1776
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1777
- ...
1778
-
1779
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1780
- """
1781
- Specifies the Conda environment for all steps of the flow.
1847
+ You can also specify just the project or project branch (other values will be
1848
+ inferred from the current project or project branch):
1849
+ ```
1850
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1851
+ ```
1782
1852
 
1783
- Use `@conda_base` to set common libraries required by all
1784
- steps and use `@conda` to specify step-specific additions.
1853
+ Note that `branch` is typically one of:
1854
+ - `prod`
1855
+ - `user.bob`
1856
+ - `test.my_experiment`
1857
+ - `prod.staging`
1785
1858
 
1786
1859
 
1787
1860
  Parameters
1788
1861
  ----------
1789
- packages : Dict[str, str], default {}
1790
- Packages to use for this flow. The key is the name of the package
1791
- and the value is the version to use.
1792
- libraries : Dict[str, str], default {}
1793
- Supported for backward compatibility. When used with packages, packages will take precedence.
1794
- python : str, optional, default None
1795
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1796
- that the version used will correspond to the version of the Python interpreter used to start the run.
1797
- disabled : bool, default False
1798
- If set to True, disables Conda.
1862
+ flow : Union[str, Dict[str, str]], optional, default None
1863
+ Upstream flow dependency for this flow.
1864
+ flows : List[Union[str, Dict[str, str]]], default []
1865
+ Upstream flow dependencies for this flow.
1866
+ options : Dict[str, Any], default {}
1867
+ Backend-specific configuration for tuning eventing behavior.
1799
1868
  """
1800
1869
  ...
1801
1870
 
1802
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1803
- """
1804
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1805
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1806
-
1807
-
1808
- Parameters
1809
- ----------
1810
- timeout : int
1811
- Time, in seconds before the task times out and fails. (Default: 3600)
1812
- poke_interval : int
1813
- Time in seconds that the job should wait in between each try. (Default: 60)
1814
- mode : str
1815
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1816
- exponential_backoff : bool
1817
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1818
- pool : str
1819
- the slot pool this task should run in,
1820
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1821
- soft_fail : bool
1822
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1823
- name : str
1824
- Name of the sensor on Airflow
1825
- description : str
1826
- Description of sensor in the Airflow UI
1827
- external_dag_id : str
1828
- The dag_id that contains the task you want to wait for.
1829
- external_task_ids : List[str]
1830
- The list of task_ids that you want to wait for.
1831
- If None (default value) the sensor waits for the DAG. (Default: None)
1832
- allowed_states : List[str]
1833
- Iterable of allowed states, (Default: ['success'])
1834
- failed_states : List[str]
1835
- Iterable of failed or dis-allowed states. (Default: None)
1836
- execution_delta : datetime.timedelta
1837
- time difference with the previous execution to look at,
1838
- the default is the same logical date as the current task or DAG. (Default: None)
1839
- check_existence: bool
1840
- Set to True to check if the external task exists or check if
1841
- the DAG to wait for exists. (Default: True)
1842
- """
1871
+ @typing.overload
1872
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1843
1873
  ...
1844
1874
 
1845
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1875
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1846
1876
  """
1847
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1848
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1849
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1850
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1851
- starts only after all sensors finish.
1877
+ Specifies the flow(s) that this flow depends on.
1852
1878
 
1879
+ ```
1880
+ @trigger_on_finish(flow='FooFlow')
1881
+ ```
1882
+ or
1883
+ ```
1884
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1885
+ ```
1886
+ This decorator respects the @project decorator and triggers the flow
1887
+ when upstream runs within the same namespace complete successfully
1853
1888
 
1854
- Parameters
1855
- ----------
1856
- timeout : int
1857
- Time, in seconds before the task times out and fails. (Default: 3600)
1858
- poke_interval : int
1859
- Time in seconds that the job should wait in between each try. (Default: 60)
1860
- mode : str
1861
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1862
- exponential_backoff : bool
1863
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1864
- pool : str
1865
- the slot pool this task should run in,
1866
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1867
- soft_fail : bool
1868
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1869
- name : str
1870
- Name of the sensor on Airflow
1871
- description : str
1872
- Description of sensor in the Airflow UI
1873
- bucket_key : Union[str, List[str]]
1874
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1875
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1876
- bucket_name : str
1877
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1878
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1879
- wildcard_match : bool
1880
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1881
- aws_conn_id : str
1882
- a reference to the s3 connection on Airflow. (Default: None)
1883
- verify : bool
1884
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1885
- """
1886
- ...
1887
-
1888
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1889
- """
1890
- Specifies what flows belong to the same project.
1889
+ Additionally, you can specify project aware upstream flow dependencies
1890
+ by specifying the fully qualified project_flow_name.
1891
+ ```
1892
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1893
+ ```
1894
+ or
1895
+ ```
1896
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1897
+ ```
1891
1898
 
1892
- A project-specific namespace is created for all flows that
1893
- use the same `@project(name)`.
1899
+ You can also specify just the project or project branch (other values will be
1900
+ inferred from the current project or project branch):
1901
+ ```
1902
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1903
+ ```
1904
+
1905
+ Note that `branch` is typically one of:
1906
+ - `prod`
1907
+ - `user.bob`
1908
+ - `test.my_experiment`
1909
+ - `prod.staging`
1894
1910
 
1895
1911
 
1896
1912
  Parameters
1897
1913
  ----------
1898
- name : str
1899
- Project name. Make sure that the name is unique amongst all
1900
- projects that use the same production scheduler. The name may
1901
- contain only lowercase alphanumeric characters and underscores.
1902
-
1903
- branch : Optional[str], default None
1904
- The branch to use. If not specified, the branch is set to
1905
- `user.<username>` unless `production` is set to `True`. This can
1906
- also be set on the command line using `--branch` as a top-level option.
1907
- It is an error to specify `branch` in the decorator and on the command line.
1908
-
1909
- production : bool, default False
1910
- Whether or not the branch is the production branch. This can also be set on the
1911
- command line using `--production` as a top-level option. It is an error to specify
1912
- `production` in the decorator and on the command line.
1913
- The project branch name will be:
1914
- - if `branch` is specified:
1915
- - if `production` is True: `prod.<branch>`
1916
- - if `production` is False: `test.<branch>`
1917
- - if `branch` is not specified:
1918
- - if `production` is True: `prod`
1919
- - if `production` is False: `user.<username>`
1914
+ flow : Union[str, Dict[str, str]], optional, default None
1915
+ Upstream flow dependency for this flow.
1916
+ flows : List[Union[str, Dict[str, str]]], default []
1917
+ Upstream flow dependencies for this flow.
1918
+ options : Dict[str, Any], default {}
1919
+ Backend-specific configuration for tuning eventing behavior.
1920
1920
  """
1921
1921
  ...
1922
1922