ob-metaflow-stubs 6.0.9.4__py2.py3-none-any.whl → 6.0.10.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (262) hide show
  1. metaflow-stubs/__init__.pyi +986 -986
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +6 -2
  20. metaflow-stubs/metaflow_current.pyi +49 -49
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +3 -3
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +4 -4
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +3 -3
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +3 -3
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +2 -2
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +6 -4
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +4 -4
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +5 -4
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +6 -3
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +3 -3
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +4 -4
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +3 -3
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +3 -3
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +3 -3
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +3 -3
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
  116. metaflow-stubs/multicore_utils.pyi +2 -2
  117. metaflow-stubs/ob_internal.pyi +2 -2
  118. metaflow-stubs/packaging_sys/__init__.pyi +7 -7
  119. metaflow-stubs/packaging_sys/backend.pyi +3 -3
  120. metaflow-stubs/packaging_sys/distribution_support.pyi +3 -3
  121. metaflow-stubs/packaging_sys/tar_backend.pyi +4 -4
  122. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  123. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  124. metaflow-stubs/parameters.pyi +3 -3
  125. metaflow-stubs/plugins/__init__.pyi +11 -11
  126. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  128. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  129. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  131. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  132. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  133. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  134. metaflow-stubs/plugins/argo/argo_client.pyi +6 -4
  135. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  136. metaflow-stubs/plugins/argo/argo_workflows.pyi +10 -3
  137. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  139. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  140. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  141. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  142. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  143. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  144. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  145. metaflow-stubs/plugins/aws/batch/batch.pyi +4 -4
  146. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  147. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  148. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  149. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  150. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  151. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  152. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  153. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +3 -3
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +6 -3
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  157. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  158. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  159. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  160. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  161. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  162. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  163. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  164. metaflow-stubs/plugins/cards/__init__.pyi +6 -6
  165. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  166. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  167. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  168. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  169. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  170. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  171. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  172. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  173. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  174. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  175. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  176. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  177. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  178. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  179. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  180. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  181. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  182. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  183. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  184. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  185. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  186. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  187. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  188. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  189. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  190. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  191. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  192. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  193. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  194. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  195. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  196. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  197. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  198. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  199. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  200. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  201. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  202. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  203. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  204. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  205. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  206. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  207. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  208. metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
  209. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  210. metaflow-stubs/plugins/perimeters.pyi +2 -2
  211. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  212. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  213. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  214. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  215. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  216. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  217. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  218. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  219. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  220. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  221. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  222. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  223. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  224. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  225. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  226. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  227. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  228. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  229. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  230. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  231. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  232. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  233. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  234. metaflow-stubs/profilers/__init__.pyi +2 -2
  235. metaflow-stubs/pylint_wrapper.pyi +2 -2
  236. metaflow-stubs/runner/__init__.pyi +2 -2
  237. metaflow-stubs/runner/deployer.pyi +33 -33
  238. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  239. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  240. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  241. metaflow-stubs/runner/nbrun.pyi +2 -2
  242. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  243. metaflow-stubs/runner/utils.pyi +3 -3
  244. metaflow-stubs/system/__init__.pyi +2 -2
  245. metaflow-stubs/system/system_logger.pyi +2 -2
  246. metaflow-stubs/system/system_monitor.pyi +2 -2
  247. metaflow-stubs/tagging_util.pyi +2 -2
  248. metaflow-stubs/tuple_util.pyi +2 -2
  249. metaflow-stubs/user_configs/__init__.pyi +2 -2
  250. metaflow-stubs/user_configs/config_options.pyi +2 -2
  251. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  252. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  253. metaflow-stubs/user_decorators/common.pyi +2 -2
  254. metaflow-stubs/user_decorators/mutable_flow.pyi +6 -6
  255. metaflow-stubs/user_decorators/mutable_step.pyi +4 -4
  256. metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
  257. metaflow-stubs/user_decorators/user_step_decorator.pyi +4 -4
  258. {ob_metaflow_stubs-6.0.9.4.dist-info → ob_metaflow_stubs-6.0.10.1.dist-info}/METADATA +1 -1
  259. ob_metaflow_stubs-6.0.10.1.dist-info/RECORD +262 -0
  260. ob_metaflow_stubs-6.0.9.4.dist-info/RECORD +0 -262
  261. {ob_metaflow_stubs-6.0.9.4.dist-info → ob_metaflow_stubs-6.0.10.1.dist-info}/WHEEL +0 -0
  262. {ob_metaflow_stubs-6.0.9.4.dist-info → ob_metaflow_stubs-6.0.10.1.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.2.1+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-09-03T10:45:51.965005 #
3
+ # MF version: 2.18.3.2+obcheckpoint(0.2.4);ob(v1) #
4
+ # Generated on 2025-09-09T09:20:35.730188 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -39,8 +39,8 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import tuple_util as tuple_util
43
42
  from . import cards as cards
43
+ from . import tuple_util as tuple_util
44
44
  from . import metaflow_git as metaflow_git
45
45
  from . import events as events
46
46
  from . import runner as runner
@@ -167,71 +167,53 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
167
167
  """
168
168
  ...
169
169
 
170
- @typing.overload
171
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
172
- """
173
- Decorator prototype for all step decorators. This function gets specialized
174
- and imported for all decorators types by _import_plugin_decorators().
175
- """
176
- ...
177
-
178
- @typing.overload
179
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
180
- ...
181
-
182
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
183
- """
184
- Decorator prototype for all step decorators. This function gets specialized
185
- and imported for all decorators types by _import_plugin_decorators().
186
- """
187
- ...
188
-
189
- @typing.overload
190
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
170
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
191
171
  """
192
- Creates a human-readable report, a Metaflow Card, after this step completes.
172
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
193
173
 
194
- Note that you may add multiple `@card` decorators in a step with different parameters.
174
+ User code call
175
+ --------------
176
+ @vllm(
177
+ model="...",
178
+ ...
179
+ )
195
180
 
181
+ Valid backend options
182
+ ---------------------
183
+ - 'local': Run as a separate process on the local task machine.
196
184
 
197
- Parameters
198
- ----------
199
- type : str, default 'default'
200
- Card type.
201
- id : str, optional, default None
202
- If multiple cards are present, use this id to identify this card.
203
- options : Dict[str, Any], default {}
204
- Options passed to the card. The contents depend on the card type.
205
- timeout : int, default 45
206
- Interrupt reporting if it takes more than this many seconds.
207
- """
208
- ...
209
-
210
- @typing.overload
211
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
212
- ...
213
-
214
- @typing.overload
215
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
216
- ...
217
-
218
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
219
- """
220
- Creates a human-readable report, a Metaflow Card, after this step completes.
185
+ Valid model options
186
+ -------------------
187
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
221
188
 
222
- Note that you may add multiple `@card` decorators in a step with different parameters.
189
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
190
+ If you need multiple models, you must create multiple @vllm decorators.
223
191
 
224
192
 
225
193
  Parameters
226
194
  ----------
227
- type : str, default 'default'
228
- Card type.
229
- id : str, optional, default None
230
- If multiple cards are present, use this id to identify this card.
231
- options : Dict[str, Any], default {}
232
- Options passed to the card. The contents depend on the card type.
233
- timeout : int, default 45
234
- Interrupt reporting if it takes more than this many seconds.
195
+ model: str
196
+ HuggingFace model identifier to be served by vLLM.
197
+ backend: str
198
+ Determines where and how to run the vLLM process.
199
+ openai_api_server: bool
200
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
201
+ Default is False (uses native engine).
202
+ Set to True for backward compatibility with existing code.
203
+ debug: bool
204
+ Whether to turn on verbose debugging logs.
205
+ card_refresh_interval: int
206
+ Interval in seconds for refreshing the vLLM status card.
207
+ Only used when openai_api_server=True.
208
+ max_retries: int
209
+ Maximum number of retries checking for vLLM server startup.
210
+ Only used when openai_api_server=True.
211
+ retry_alert_frequency: int
212
+ Frequency of alert logs for vLLM server startup retries.
213
+ Only used when openai_api_server=True.
214
+ engine_args : dict
215
+ Additional keyword arguments to pass to the vLLM engine.
216
+ For example, `tensor_parallel_size=2`.
235
217
  """
236
218
  ...
237
219
 
@@ -286,153 +268,6 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
286
268
  """
287
269
  ...
288
270
 
289
- @typing.overload
290
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
291
- """
292
- Enables checkpointing for a step.
293
-
294
- > Examples
295
-
296
- - Saving Checkpoints
297
-
298
- ```python
299
- @checkpoint
300
- @step
301
- def train(self):
302
- model = create_model(self.parameters, checkpoint_path = None)
303
- for i in range(self.epochs):
304
- # some training logic
305
- loss = model.train(self.dataset)
306
- if i % 10 == 0:
307
- model.save(
308
- current.checkpoint.directory,
309
- )
310
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
311
- # and returns a reference dictionary to the checkpoint saved in the datastore
312
- self.latest_checkpoint = current.checkpoint.save(
313
- name="epoch_checkpoint",
314
- metadata={
315
- "epoch": i,
316
- "loss": loss,
317
- }
318
- )
319
- ```
320
-
321
- - Using Loaded Checkpoints
322
-
323
- ```python
324
- @retry(times=3)
325
- @checkpoint
326
- @step
327
- def train(self):
328
- # Assume that the task has restarted and the previous attempt of the task
329
- # saved a checkpoint
330
- checkpoint_path = None
331
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
332
- print("Loaded checkpoint from the previous attempt")
333
- checkpoint_path = current.checkpoint.directory
334
-
335
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
336
- for i in range(self.epochs):
337
- ...
338
- ```
339
-
340
-
341
- Parameters
342
- ----------
343
- load_policy : str, default: "fresh"
344
- The policy for loading the checkpoint. The following policies are supported:
345
- - "eager": Loads the the latest available checkpoint within the namespace.
346
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
347
- will be loaded at the start of the task.
348
- - "none": Do not load any checkpoint
349
- - "fresh": Loads the lastest checkpoint created within the running Task.
350
- This mode helps loading checkpoints across various retry attempts of the same task.
351
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
352
- created within the task will be loaded when the task is retries execution on failure.
353
-
354
- temp_dir_root : str, default: None
355
- The root directory under which `current.checkpoint.directory` will be created.
356
- """
357
- ...
358
-
359
- @typing.overload
360
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
361
- ...
362
-
363
- @typing.overload
364
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
365
- ...
366
-
367
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
368
- """
369
- Enables checkpointing for a step.
370
-
371
- > Examples
372
-
373
- - Saving Checkpoints
374
-
375
- ```python
376
- @checkpoint
377
- @step
378
- def train(self):
379
- model = create_model(self.parameters, checkpoint_path = None)
380
- for i in range(self.epochs):
381
- # some training logic
382
- loss = model.train(self.dataset)
383
- if i % 10 == 0:
384
- model.save(
385
- current.checkpoint.directory,
386
- )
387
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
388
- # and returns a reference dictionary to the checkpoint saved in the datastore
389
- self.latest_checkpoint = current.checkpoint.save(
390
- name="epoch_checkpoint",
391
- metadata={
392
- "epoch": i,
393
- "loss": loss,
394
- }
395
- )
396
- ```
397
-
398
- - Using Loaded Checkpoints
399
-
400
- ```python
401
- @retry(times=3)
402
- @checkpoint
403
- @step
404
- def train(self):
405
- # Assume that the task has restarted and the previous attempt of the task
406
- # saved a checkpoint
407
- checkpoint_path = None
408
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
409
- print("Loaded checkpoint from the previous attempt")
410
- checkpoint_path = current.checkpoint.directory
411
-
412
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
413
- for i in range(self.epochs):
414
- ...
415
- ```
416
-
417
-
418
- Parameters
419
- ----------
420
- load_policy : str, default: "fresh"
421
- The policy for loading the checkpoint. The following policies are supported:
422
- - "eager": Loads the the latest available checkpoint within the namespace.
423
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
424
- will be loaded at the start of the task.
425
- - "none": Do not load any checkpoint
426
- - "fresh": Loads the lastest checkpoint created within the running Task.
427
- This mode helps loading checkpoints across various retry attempts of the same task.
428
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
429
- created within the task will be loaded when the task is retries execution on failure.
430
-
431
- temp_dir_root : str, default: None
432
- The root directory under which `current.checkpoint.directory` will be created.
433
- """
434
- ...
435
-
436
271
  def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
437
272
  """
438
273
  Specifies that this step should execute on DGX cloud.
@@ -449,170 +284,92 @@ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[t
449
284
  """
450
285
  ...
451
286
 
452
- @typing.overload
453
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
287
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
454
288
  """
455
- Specifies the PyPI packages for the step.
456
-
457
- Information in this decorator will augment any
458
- attributes set in the `@pyi_base` flow-level decorator. Hence,
459
- you can use `@pypi_base` to set packages required by all
460
- steps and use `@pypi` to specify step-specific overrides.
289
+ Specifies that this step should execute on Kubernetes.
461
290
 
462
291
 
463
292
  Parameters
464
293
  ----------
465
- packages : Dict[str, str], default: {}
466
- Packages to use for this step. The key is the name of the package
467
- and the value is the version to use.
468
- python : str, optional, default: None
469
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
470
- that the version used will correspond to the version of the Python interpreter used to start the run.
471
- """
472
- ...
473
-
474
- @typing.overload
475
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
476
- ...
477
-
478
- @typing.overload
479
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
480
- ...
481
-
482
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
483
- """
484
- Specifies the PyPI packages for the step.
485
-
486
- Information in this decorator will augment any
487
- attributes set in the `@pyi_base` flow-level decorator. Hence,
488
- you can use `@pypi_base` to set packages required by all
489
- steps and use `@pypi` to specify step-specific overrides.
490
-
491
-
492
- Parameters
493
- ----------
494
- packages : Dict[str, str], default: {}
495
- Packages to use for this step. The key is the name of the package
496
- and the value is the version to use.
497
- python : str, optional, default: None
498
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
499
- that the version used will correspond to the version of the Python interpreter used to start the run.
500
- """
501
- ...
502
-
503
- @typing.overload
504
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
505
- """
506
- Decorator prototype for all step decorators. This function gets specialized
507
- and imported for all decorators types by _import_plugin_decorators().
508
- """
509
- ...
510
-
511
- @typing.overload
512
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
513
- ...
514
-
515
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
516
- """
517
- Decorator prototype for all step decorators. This function gets specialized
518
- and imported for all decorators types by _import_plugin_decorators().
519
- """
520
- ...
521
-
522
- @typing.overload
523
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
524
- """
525
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
526
- It exists to make it easier for users to know that this decorator should only be used with
527
- a Neo Cloud like CoreWeave.
528
- """
529
- ...
530
-
531
- @typing.overload
532
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
533
- ...
534
-
535
- def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
536
- """
537
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
538
- It exists to make it easier for users to know that this decorator should only be used with
539
- a Neo Cloud like CoreWeave.
540
- """
541
- ...
542
-
543
- @typing.overload
544
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
545
- """
546
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
547
- It exists to make it easier for users to know that this decorator should only be used with
548
- a Neo Cloud like Nebius.
549
- """
550
- ...
551
-
552
- @typing.overload
553
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
554
- ...
555
-
556
- def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
557
- """
558
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
559
- It exists to make it easier for users to know that this decorator should only be used with
560
- a Neo Cloud like Nebius.
561
- """
562
- ...
563
-
564
- @typing.overload
565
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
566
- """
567
- Specifies the number of times the task corresponding
568
- to a step needs to be retried.
569
-
570
- This decorator is useful for handling transient errors, such as networking issues.
571
- If your task contains operations that can't be retried safely, e.g. database updates,
572
- it is advisable to annotate it with `@retry(times=0)`.
573
-
574
- This can be used in conjunction with the `@catch` decorator. The `@catch`
575
- decorator will execute a no-op task after all retries have been exhausted,
576
- ensuring that the flow execution can continue.
577
-
578
-
579
- Parameters
580
- ----------
581
- times : int, default 3
582
- Number of times to retry this task.
583
- minutes_between_retries : int, default 2
584
- Number of minutes between retries.
585
- """
586
- ...
587
-
588
- @typing.overload
589
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
590
- ...
591
-
592
- @typing.overload
593
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
594
- ...
595
-
596
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
597
- """
598
- Specifies the number of times the task corresponding
599
- to a step needs to be retried.
600
-
601
- This decorator is useful for handling transient errors, such as networking issues.
602
- If your task contains operations that can't be retried safely, e.g. database updates,
603
- it is advisable to annotate it with `@retry(times=0)`.
604
-
605
- This can be used in conjunction with the `@catch` decorator. The `@catch`
606
- decorator will execute a no-op task after all retries have been exhausted,
607
- ensuring that the flow execution can continue.
608
-
294
+ cpu : int, default 1
295
+ Number of CPUs required for this step. If `@resources` is
296
+ also present, the maximum value from all decorators is used.
297
+ memory : int, default 4096
298
+ Memory size (in MB) required for this step. If
299
+ `@resources` is also present, the maximum value from all decorators is
300
+ used.
301
+ disk : int, default 10240
302
+ Disk size (in MB) required for this step. If
303
+ `@resources` is also present, the maximum value from all decorators is
304
+ used.
305
+ image : str, optional, default None
306
+ Docker image to use when launching on Kubernetes. If not specified, and
307
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
308
+ not, a default Docker image mapping to the current version of Python is used.
309
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
310
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
311
+ image_pull_secrets: List[str], default []
312
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
313
+ Kubernetes image pull secrets to use when pulling container images
314
+ in Kubernetes.
315
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
316
+ Kubernetes service account to use when launching pod in Kubernetes.
317
+ secrets : List[str], optional, default None
318
+ Kubernetes secrets to use when launching pod in Kubernetes. These
319
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
320
+ in Metaflow configuration.
321
+ node_selector: Union[Dict[str,str], str], optional, default None
322
+ Kubernetes node selector(s) to apply to the pod running the task.
323
+ Can be passed in as a comma separated string of values e.g.
324
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
325
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
326
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
327
+ Kubernetes namespace to use when launching pod in Kubernetes.
328
+ gpu : int, optional, default None
329
+ Number of GPUs required for this step. A value of zero implies that
330
+ the scheduled node should not have GPUs.
331
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
332
+ The vendor of the GPUs to be used for this step.
333
+ tolerations : List[Dict[str,str]], default []
334
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
335
+ Kubernetes tolerations to use when launching pod in Kubernetes.
336
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
337
+ Kubernetes labels to use when launching pod in Kubernetes.
338
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
339
+ Kubernetes annotations to use when launching pod in Kubernetes.
340
+ use_tmpfs : bool, default False
341
+ This enables an explicit tmpfs mount for this step.
342
+ tmpfs_tempdir : bool, default True
343
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
344
+ tmpfs_size : int, optional, default: None
345
+ The value for the size (in MiB) of the tmpfs mount for this step.
346
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
347
+ memory allocated for this step.
348
+ tmpfs_path : str, optional, default /metaflow_temp
349
+ Path to tmpfs mount for this step.
350
+ persistent_volume_claims : Dict[str, str], optional, default None
351
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
352
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
353
+ shared_memory: int, optional
354
+ Shared memory size (in MiB) required for this step
355
+ port: int, optional
356
+ Port number to specify in the Kubernetes job object
357
+ compute_pool : str, optional, default None
358
+ Compute pool to be used for for this step.
359
+ If not specified, any accessible compute pool within the perimeter is used.
360
+ hostname_resolution_timeout: int, default 10 * 60
361
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
362
+ Only applicable when @parallel is used.
363
+ qos: str, default: Burstable
364
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
609
365
 
610
- Parameters
611
- ----------
612
- times : int, default 3
613
- Number of times to retry this task.
614
- minutes_between_retries : int, default 2
615
- Number of minutes between retries.
366
+ security_context: Dict[str, Any], optional, default None
367
+ Container security context. Applies to the task container. Allows the following keys:
368
+ - privileged: bool, optional, default None
369
+ - allow_privilege_escalation: bool, optional, default None
370
+ - run_as_user: int, optional, default None
371
+ - run_as_group: int, optional, default None
372
+ - run_as_non_root: bool, optional, default None
616
373
  """
617
374
  ...
618
375
 
@@ -745,33 +502,277 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
745
502
  """
746
503
  ...
747
504
 
748
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
505
+ @typing.overload
506
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
749
507
  """
750
- Decorator that helps cache, version and store models/datasets from huggingface hub.
751
-
752
- > Examples
508
+ Specifies the resources needed when executing this step.
753
509
 
754
- **Usage: creating references of models from huggingface that may be loaded in downstream steps**
755
- ```python
756
- @huggingface_hub
757
- @step
758
- def pull_model_from_huggingface(self):
759
- # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
760
- # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
761
- # with the same `repo_id` in the backend storage, it will not download the model again. The return
762
- # value of the function is a reference to the model in the backend storage.
763
- # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
510
+ Use `@resources` to specify the resource requirements
511
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
764
512
 
765
- self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
766
- self.llama_model = current.huggingface_hub.snapshot_download(
767
- repo_id=self.model_id,
768
- allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
769
- )
770
- self.next(self.train)
513
+ You can choose the compute layer on the command line by executing e.g.
771
514
  ```
772
-
773
- **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
774
- ```python
515
+ python myflow.py run --with batch
516
+ ```
517
+ or
518
+ ```
519
+ python myflow.py run --with kubernetes
520
+ ```
521
+ which executes the flow on the desired system using the
522
+ requirements specified in `@resources`.
523
+
524
+
525
+ Parameters
526
+ ----------
527
+ cpu : int, default 1
528
+ Number of CPUs required for this step.
529
+ gpu : int, optional, default None
530
+ Number of GPUs required for this step.
531
+ disk : int, optional, default None
532
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
533
+ memory : int, default 4096
534
+ Memory size (in MB) required for this step.
535
+ shared_memory : int, optional, default None
536
+ The value for the size (in MiB) of the /dev/shm volume for this step.
537
+ This parameter maps to the `--shm-size` option in Docker.
538
+ """
539
+ ...
540
+
541
+ @typing.overload
542
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
543
+ ...
544
+
545
+ @typing.overload
546
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
547
+ ...
548
+
549
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
550
+ """
551
+ Specifies the resources needed when executing this step.
552
+
553
+ Use `@resources` to specify the resource requirements
554
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
555
+
556
+ You can choose the compute layer on the command line by executing e.g.
557
+ ```
558
+ python myflow.py run --with batch
559
+ ```
560
+ or
561
+ ```
562
+ python myflow.py run --with kubernetes
563
+ ```
564
+ which executes the flow on the desired system using the
565
+ requirements specified in `@resources`.
566
+
567
+
568
+ Parameters
569
+ ----------
570
+ cpu : int, default 1
571
+ Number of CPUs required for this step.
572
+ gpu : int, optional, default None
573
+ Number of GPUs required for this step.
574
+ disk : int, optional, default None
575
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
576
+ memory : int, default 4096
577
+ Memory size (in MB) required for this step.
578
+ shared_memory : int, optional, default None
579
+ The value for the size (in MiB) of the /dev/shm volume for this step.
580
+ This parameter maps to the `--shm-size` option in Docker.
581
+ """
582
+ ...
583
+
584
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
585
+ """
586
+ S3 Proxy decorator for routing S3 requests through a local proxy service.
587
+
588
+
589
+ Parameters
590
+ ----------
591
+ integration_name : str, optional
592
+ Name of the S3 proxy integration. If not specified, will use the only
593
+ available S3 proxy integration in the namespace (fails if multiple exist).
594
+ write_mode : str, optional
595
+ The desired behavior during write operations to target (origin) S3 bucket.
596
+ allowed options are:
597
+ "origin-and-cache" -> write to both the target S3 bucket and local object
598
+ storage
599
+ "origin" -> only write to the target S3 bucket
600
+ "cache" -> only write to the object storage service used for caching
601
+ debug : bool, optional
602
+ Enable debug logging for proxy operations.
603
+ """
604
+ ...
605
+
606
+ @typing.overload
607
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
608
+ """
609
+ Specifies a timeout for your step.
610
+
611
+ This decorator is useful if this step may hang indefinitely.
612
+
613
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
614
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
615
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
616
+
617
+ Note that all the values specified in parameters are added together so if you specify
618
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
619
+
620
+
621
+ Parameters
622
+ ----------
623
+ seconds : int, default 0
624
+ Number of seconds to wait prior to timing out.
625
+ minutes : int, default 0
626
+ Number of minutes to wait prior to timing out.
627
+ hours : int, default 0
628
+ Number of hours to wait prior to timing out.
629
+ """
630
+ ...
631
+
632
+ @typing.overload
633
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
634
+ ...
635
+
636
+ @typing.overload
637
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
638
+ ...
639
+
640
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
641
+ """
642
+ Specifies a timeout for your step.
643
+
644
+ This decorator is useful if this step may hang indefinitely.
645
+
646
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
647
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
648
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
649
+
650
+ Note that all the values specified in parameters are added together so if you specify
651
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
652
+
653
+
654
+ Parameters
655
+ ----------
656
+ seconds : int, default 0
657
+ Number of seconds to wait prior to timing out.
658
+ minutes : int, default 0
659
+ Number of minutes to wait prior to timing out.
660
+ hours : int, default 0
661
+ Number of hours to wait prior to timing out.
662
+ """
663
+ ...
664
+
665
+ @typing.overload
666
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
667
+ """
668
+ Specifies environment variables to be set prior to the execution of a step.
669
+
670
+
671
+ Parameters
672
+ ----------
673
+ vars : Dict[str, str], default {}
674
+ Dictionary of environment variables to set.
675
+ """
676
+ ...
677
+
678
+ @typing.overload
679
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
680
+ ...
681
+
682
+ @typing.overload
683
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
684
+ ...
685
+
686
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
687
+ """
688
+ Specifies environment variables to be set prior to the execution of a step.
689
+
690
+
691
+ Parameters
692
+ ----------
693
+ vars : Dict[str, str], default {}
694
+ Dictionary of environment variables to set.
695
+ """
696
+ ...
697
+
698
+ @typing.overload
699
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
700
+ """
701
+ Specifies the PyPI packages for the step.
702
+
703
+ Information in this decorator will augment any
704
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
705
+ you can use `@pypi_base` to set packages required by all
706
+ steps and use `@pypi` to specify step-specific overrides.
707
+
708
+
709
+ Parameters
710
+ ----------
711
+ packages : Dict[str, str], default: {}
712
+ Packages to use for this step. The key is the name of the package
713
+ and the value is the version to use.
714
+ python : str, optional, default: None
715
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
716
+ that the version used will correspond to the version of the Python interpreter used to start the run.
717
+ """
718
+ ...
719
+
720
+ @typing.overload
721
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
722
+ ...
723
+
724
+ @typing.overload
725
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
726
+ ...
727
+
728
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
729
+ """
730
+ Specifies the PyPI packages for the step.
731
+
732
+ Information in this decorator will augment any
733
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
734
+ you can use `@pypi_base` to set packages required by all
735
+ steps and use `@pypi` to specify step-specific overrides.
736
+
737
+
738
+ Parameters
739
+ ----------
740
+ packages : Dict[str, str], default: {}
741
+ Packages to use for this step. The key is the name of the package
742
+ and the value is the version to use.
743
+ python : str, optional, default: None
744
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
745
+ that the version used will correspond to the version of the Python interpreter used to start the run.
746
+ """
747
+ ...
748
+
749
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
750
+ """
751
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
752
+
753
+ > Examples
754
+
755
+ **Usage: creating references of models from huggingface that may be loaded in downstream steps**
756
+ ```python
757
+ @huggingface_hub
758
+ @step
759
+ def pull_model_from_huggingface(self):
760
+ # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
761
+ # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
762
+ # with the same `repo_id` in the backend storage, it will not download the model again. The return
763
+ # value of the function is a reference to the model in the backend storage.
764
+ # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
765
+
766
+ self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
767
+ self.llama_model = current.huggingface_hub.snapshot_download(
768
+ repo_id=self.model_id,
769
+ allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
770
+ )
771
+ self.next(self.train)
772
+ ```
773
+
774
+ **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
775
+ ```python
775
776
  @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
776
777
  @step
777
778
  def pull_model_from_huggingface(self):
@@ -825,271 +826,334 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
825
826
  """
826
827
  ...
827
828
 
828
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
829
+ @typing.overload
830
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
831
+ """
832
+ Enables checkpointing for a step.
833
+
834
+ > Examples
835
+
836
+ - Saving Checkpoints
837
+
838
+ ```python
839
+ @checkpoint
840
+ @step
841
+ def train(self):
842
+ model = create_model(self.parameters, checkpoint_path = None)
843
+ for i in range(self.epochs):
844
+ # some training logic
845
+ loss = model.train(self.dataset)
846
+ if i % 10 == 0:
847
+ model.save(
848
+ current.checkpoint.directory,
849
+ )
850
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
851
+ # and returns a reference dictionary to the checkpoint saved in the datastore
852
+ self.latest_checkpoint = current.checkpoint.save(
853
+ name="epoch_checkpoint",
854
+ metadata={
855
+ "epoch": i,
856
+ "loss": loss,
857
+ }
858
+ )
859
+ ```
860
+
861
+ - Using Loaded Checkpoints
862
+
863
+ ```python
864
+ @retry(times=3)
865
+ @checkpoint
866
+ @step
867
+ def train(self):
868
+ # Assume that the task has restarted and the previous attempt of the task
869
+ # saved a checkpoint
870
+ checkpoint_path = None
871
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
872
+ print("Loaded checkpoint from the previous attempt")
873
+ checkpoint_path = current.checkpoint.directory
874
+
875
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
876
+ for i in range(self.epochs):
877
+ ...
878
+ ```
879
+
880
+
881
+ Parameters
882
+ ----------
883
+ load_policy : str, default: "fresh"
884
+ The policy for loading the checkpoint. The following policies are supported:
885
+ - "eager": Loads the the latest available checkpoint within the namespace.
886
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
887
+ will be loaded at the start of the task.
888
+ - "none": Do not load any checkpoint
889
+ - "fresh": Loads the lastest checkpoint created within the running Task.
890
+ This mode helps loading checkpoints across various retry attempts of the same task.
891
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
892
+ created within the task will be loaded when the task is retries execution on failure.
893
+
894
+ temp_dir_root : str, default: None
895
+ The root directory under which `current.checkpoint.directory` will be created.
896
+ """
897
+ ...
898
+
899
+ @typing.overload
900
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
901
+ ...
902
+
903
+ @typing.overload
904
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
905
+ ...
906
+
907
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
908
+ """
909
+ Enables checkpointing for a step.
910
+
911
+ > Examples
912
+
913
+ - Saving Checkpoints
914
+
915
+ ```python
916
+ @checkpoint
917
+ @step
918
+ def train(self):
919
+ model = create_model(self.parameters, checkpoint_path = None)
920
+ for i in range(self.epochs):
921
+ # some training logic
922
+ loss = model.train(self.dataset)
923
+ if i % 10 == 0:
924
+ model.save(
925
+ current.checkpoint.directory,
926
+ )
927
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
928
+ # and returns a reference dictionary to the checkpoint saved in the datastore
929
+ self.latest_checkpoint = current.checkpoint.save(
930
+ name="epoch_checkpoint",
931
+ metadata={
932
+ "epoch": i,
933
+ "loss": loss,
934
+ }
935
+ )
936
+ ```
937
+
938
+ - Using Loaded Checkpoints
939
+
940
+ ```python
941
+ @retry(times=3)
942
+ @checkpoint
943
+ @step
944
+ def train(self):
945
+ # Assume that the task has restarted and the previous attempt of the task
946
+ # saved a checkpoint
947
+ checkpoint_path = None
948
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
949
+ print("Loaded checkpoint from the previous attempt")
950
+ checkpoint_path = current.checkpoint.directory
951
+
952
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
953
+ for i in range(self.epochs):
954
+ ...
955
+ ```
956
+
957
+
958
+ Parameters
959
+ ----------
960
+ load_policy : str, default: "fresh"
961
+ The policy for loading the checkpoint. The following policies are supported:
962
+ - "eager": Loads the the latest available checkpoint within the namespace.
963
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
964
+ will be loaded at the start of the task.
965
+ - "none": Do not load any checkpoint
966
+ - "fresh": Loads the lastest checkpoint created within the running Task.
967
+ This mode helps loading checkpoints across various retry attempts of the same task.
968
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
969
+ created within the task will be loaded when the task is retries execution on failure.
970
+
971
+ temp_dir_root : str, default: None
972
+ The root directory under which `current.checkpoint.directory` will be created.
973
+ """
974
+ ...
975
+
976
+ @typing.overload
977
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
978
+ """
979
+ A simple decorator that demonstrates using CardDecoratorInjector
980
+ to inject a card and render simple markdown content.
981
+ """
982
+ ...
983
+
984
+ @typing.overload
985
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
986
+ ...
987
+
988
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
989
+ """
990
+ A simple decorator that demonstrates using CardDecoratorInjector
991
+ to inject a card and render simple markdown content.
992
+ """
993
+ ...
994
+
995
+ @typing.overload
996
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
997
+ """
998
+ Decorator prototype for all step decorators. This function gets specialized
999
+ and imported for all decorators types by _import_plugin_decorators().
1000
+ """
1001
+ ...
1002
+
1003
+ @typing.overload
1004
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1005
+ ...
1006
+
1007
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
829
1008
  """
830
- Specifies that this step should execute on DGX cloud.
831
-
832
-
833
- Parameters
834
- ----------
835
- gpu : int
836
- Number of GPUs to use.
837
- gpu_type : str
838
- Type of Nvidia GPU to use.
1009
+ Decorator prototype for all step decorators. This function gets specialized
1010
+ and imported for all decorators types by _import_plugin_decorators().
839
1011
  """
840
1012
  ...
841
1013
 
842
1014
  @typing.overload
843
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1015
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
844
1016
  """
845
- Specifies secrets to be retrieved and injected as environment variables prior to
846
- the execution of a step.
1017
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1018
+
1019
+ Note that you may add multiple `@card` decorators in a step with different parameters.
847
1020
 
848
1021
 
849
1022
  Parameters
850
1023
  ----------
851
- sources : List[Union[str, Dict[str, Any]]], default: []
852
- List of secret specs, defining how the secrets are to be retrieved
853
- role : str, optional, default: None
854
- Role to use for fetching secrets
1024
+ type : str, default 'default'
1025
+ Card type.
1026
+ id : str, optional, default None
1027
+ If multiple cards are present, use this id to identify this card.
1028
+ options : Dict[str, Any], default {}
1029
+ Options passed to the card. The contents depend on the card type.
1030
+ timeout : int, default 45
1031
+ Interrupt reporting if it takes more than this many seconds.
855
1032
  """
856
1033
  ...
857
1034
 
858
1035
  @typing.overload
859
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1036
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
860
1037
  ...
861
1038
 
862
1039
  @typing.overload
863
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1040
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
864
1041
  ...
865
1042
 
866
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
1043
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
867
1044
  """
868
- Specifies secrets to be retrieved and injected as environment variables prior to
869
- the execution of a step.
1045
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1046
+
1047
+ Note that you may add multiple `@card` decorators in a step with different parameters.
870
1048
 
871
1049
 
872
1050
  Parameters
873
1051
  ----------
874
- sources : List[Union[str, Dict[str, Any]]], default: []
875
- List of secret specs, defining how the secrets are to be retrieved
876
- role : str, optional, default: None
877
- Role to use for fetching secrets
1052
+ type : str, default 'default'
1053
+ Card type.
1054
+ id : str, optional, default None
1055
+ If multiple cards are present, use this id to identify this card.
1056
+ options : Dict[str, Any], default {}
1057
+ Options passed to the card. The contents depend on the card type.
1058
+ timeout : int, default 45
1059
+ Interrupt reporting if it takes more than this many seconds.
878
1060
  """
879
1061
  ...
880
1062
 
881
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1063
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
882
1064
  """
883
- Specifies that this step should execute on Kubernetes.
1065
+ Specifies that this step should execute on DGX cloud.
884
1066
 
885
1067
 
886
1068
  Parameters
887
1069
  ----------
888
- cpu : int, default 1
889
- Number of CPUs required for this step. If `@resources` is
890
- also present, the maximum value from all decorators is used.
891
- memory : int, default 4096
892
- Memory size (in MB) required for this step. If
893
- `@resources` is also present, the maximum value from all decorators is
894
- used.
895
- disk : int, default 10240
896
- Disk size (in MB) required for this step. If
897
- `@resources` is also present, the maximum value from all decorators is
898
- used.
899
- image : str, optional, default None
900
- Docker image to use when launching on Kubernetes. If not specified, and
901
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
902
- not, a default Docker image mapping to the current version of Python is used.
903
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
904
- If given, the imagePullPolicy to be applied to the Docker image of the step.
905
- image_pull_secrets: List[str], default []
906
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
907
- Kubernetes image pull secrets to use when pulling container images
908
- in Kubernetes.
909
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
910
- Kubernetes service account to use when launching pod in Kubernetes.
911
- secrets : List[str], optional, default None
912
- Kubernetes secrets to use when launching pod in Kubernetes. These
913
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
914
- in Metaflow configuration.
915
- node_selector: Union[Dict[str,str], str], optional, default None
916
- Kubernetes node selector(s) to apply to the pod running the task.
917
- Can be passed in as a comma separated string of values e.g.
918
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
919
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
920
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
921
- Kubernetes namespace to use when launching pod in Kubernetes.
922
- gpu : int, optional, default None
923
- Number of GPUs required for this step. A value of zero implies that
924
- the scheduled node should not have GPUs.
925
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
926
- The vendor of the GPUs to be used for this step.
927
- tolerations : List[Dict[str,str]], default []
928
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
929
- Kubernetes tolerations to use when launching pod in Kubernetes.
930
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
931
- Kubernetes labels to use when launching pod in Kubernetes.
932
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
933
- Kubernetes annotations to use when launching pod in Kubernetes.
934
- use_tmpfs : bool, default False
935
- This enables an explicit tmpfs mount for this step.
936
- tmpfs_tempdir : bool, default True
937
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
938
- tmpfs_size : int, optional, default: None
939
- The value for the size (in MiB) of the tmpfs mount for this step.
940
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
941
- memory allocated for this step.
942
- tmpfs_path : str, optional, default /metaflow_temp
943
- Path to tmpfs mount for this step.
944
- persistent_volume_claims : Dict[str, str], optional, default None
945
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
946
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
947
- shared_memory: int, optional
948
- Shared memory size (in MiB) required for this step
949
- port: int, optional
950
- Port number to specify in the Kubernetes job object
951
- compute_pool : str, optional, default None
952
- Compute pool to be used for for this step.
953
- If not specified, any accessible compute pool within the perimeter is used.
954
- hostname_resolution_timeout: int, default 10 * 60
955
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
956
- Only applicable when @parallel is used.
957
- qos: str, default: Burstable
958
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
959
-
960
- security_context: Dict[str, Any], optional, default None
961
- Container security context. Applies to the task container. Allows the following keys:
962
- - privileged: bool, optional, default None
963
- - allow_privilege_escalation: bool, optional, default None
964
- - run_as_user: int, optional, default None
965
- - run_as_group: int, optional, default None
966
- - run_as_non_root: bool, optional, default None
1070
+ gpu : int
1071
+ Number of GPUs to use.
1072
+ gpu_type : str
1073
+ Type of Nvidia GPU to use.
967
1074
  """
968
1075
  ...
969
1076
 
970
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1077
+ @typing.overload
1078
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
971
1079
  """
972
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
973
-
974
- User code call
975
- --------------
976
- @ollama(
977
- models=[...],
978
- ...
979
- )
980
-
981
- Valid backend options
982
- ---------------------
983
- - 'local': Run as a separate process on the local task machine.
984
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
985
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
986
-
987
- Valid model options
988
- -------------------
989
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
990
-
991
-
992
- Parameters
993
- ----------
994
- models: list[str]
995
- List of Ollama containers running models in sidecars.
996
- backend: str
997
- Determines where and how to run the Ollama process.
998
- force_pull: bool
999
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1000
- cache_update_policy: str
1001
- Cache update policy: "auto", "force", or "never".
1002
- force_cache_update: bool
1003
- Simple override for "force" cache update policy.
1004
- debug: bool
1005
- Whether to turn on verbose debugging logs.
1006
- circuit_breaker_config: dict
1007
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1008
- timeout_config: dict
1009
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1080
+ Decorator prototype for all step decorators. This function gets specialized
1081
+ and imported for all decorators types by _import_plugin_decorators().
1010
1082
  """
1011
1083
  ...
1012
1084
 
1013
1085
  @typing.overload
1014
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1086
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1087
+ ...
1088
+
1089
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1015
1090
  """
1016
- Specifies environment variables to be set prior to the execution of a step.
1017
-
1018
-
1019
- Parameters
1020
- ----------
1021
- vars : Dict[str, str], default {}
1022
- Dictionary of environment variables to set.
1091
+ Decorator prototype for all step decorators. This function gets specialized
1092
+ and imported for all decorators types by _import_plugin_decorators().
1023
1093
  """
1024
1094
  ...
1025
1095
 
1026
1096
  @typing.overload
1027
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1097
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1098
+ """
1099
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1100
+ It exists to make it easier for users to know that this decorator should only be used with
1101
+ a Neo Cloud like Nebius.
1102
+ """
1028
1103
  ...
1029
1104
 
1030
1105
  @typing.overload
1031
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1106
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1032
1107
  ...
1033
1108
 
1034
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1109
+ def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1035
1110
  """
1036
- Specifies environment variables to be set prior to the execution of a step.
1037
-
1038
-
1039
- Parameters
1040
- ----------
1041
- vars : Dict[str, str], default {}
1042
- Dictionary of environment variables to set.
1111
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1112
+ It exists to make it easier for users to know that this decorator should only be used with
1113
+ a Neo Cloud like Nebius.
1043
1114
  """
1044
1115
  ...
1045
1116
 
1046
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1117
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1047
1118
  """
1048
- This decorator is used to run vllm APIs as Metaflow task sidecars.
1119
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
1049
1120
 
1050
1121
  User code call
1051
1122
  --------------
1052
- @vllm(
1053
- model="...",
1123
+ @ollama(
1124
+ models=[...],
1054
1125
  ...
1055
1126
  )
1056
1127
 
1057
1128
  Valid backend options
1058
1129
  ---------------------
1059
1130
  - 'local': Run as a separate process on the local task machine.
1131
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
1132
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
1060
1133
 
1061
1134
  Valid model options
1062
1135
  -------------------
1063
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1064
-
1065
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1066
- If you need multiple models, you must create multiple @vllm decorators.
1136
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
1067
1137
 
1068
1138
 
1069
1139
  Parameters
1070
1140
  ----------
1071
- model: str
1072
- HuggingFace model identifier to be served by vLLM.
1141
+ models: list[str]
1142
+ List of Ollama containers running models in sidecars.
1073
1143
  backend: str
1074
- Determines where and how to run the vLLM process.
1075
- openai_api_server: bool
1076
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1077
- Default is False (uses native engine).
1078
- Set to True for backward compatibility with existing code.
1144
+ Determines where and how to run the Ollama process.
1145
+ force_pull: bool
1146
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1147
+ cache_update_policy: str
1148
+ Cache update policy: "auto", "force", or "never".
1149
+ force_cache_update: bool
1150
+ Simple override for "force" cache update policy.
1079
1151
  debug: bool
1080
1152
  Whether to turn on verbose debugging logs.
1081
- card_refresh_interval: int
1082
- Interval in seconds for refreshing the vLLM status card.
1083
- Only used when openai_api_server=True.
1084
- max_retries: int
1085
- Maximum number of retries checking for vLLM server startup.
1086
- Only used when openai_api_server=True.
1087
- retry_alert_frequency: int
1088
- Frequency of alert logs for vLLM server startup retries.
1089
- Only used when openai_api_server=True.
1090
- engine_args : dict
1091
- Additional keyword arguments to pass to the vLLM engine.
1092
- For example, `tensor_parallel_size=2`.
1153
+ circuit_breaker_config: dict
1154
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1155
+ timeout_config: dict
1156
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1093
1157
  """
1094
1158
  ...
1095
1159
 
@@ -1153,83 +1217,41 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1153
1217
  ...
1154
1218
 
1155
1219
  @typing.overload
1156
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1220
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1157
1221
  """
1158
- Specifies a timeout for your step.
1159
-
1160
- This decorator is useful if this step may hang indefinitely.
1161
-
1162
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1163
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1164
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1165
-
1166
- Note that all the values specified in parameters are added together so if you specify
1167
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1222
+ Specifies secrets to be retrieved and injected as environment variables prior to
1223
+ the execution of a step.
1168
1224
 
1169
1225
 
1170
1226
  Parameters
1171
1227
  ----------
1172
- seconds : int, default 0
1173
- Number of seconds to wait prior to timing out.
1174
- minutes : int, default 0
1175
- Number of minutes to wait prior to timing out.
1176
- hours : int, default 0
1177
- Number of hours to wait prior to timing out.
1228
+ sources : List[Union[str, Dict[str, Any]]], default: []
1229
+ List of secret specs, defining how the secrets are to be retrieved
1230
+ role : str, optional, default: None
1231
+ Role to use for fetching secrets
1178
1232
  """
1179
1233
  ...
1180
1234
 
1181
1235
  @typing.overload
1182
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1236
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1183
1237
  ...
1184
1238
 
1185
1239
  @typing.overload
1186
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1187
- ...
1188
-
1189
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1190
- """
1191
- Specifies a timeout for your step.
1192
-
1193
- This decorator is useful if this step may hang indefinitely.
1194
-
1195
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1196
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1197
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1198
-
1199
- Note that all the values specified in parameters are added together so if you specify
1200
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1201
-
1202
-
1203
- Parameters
1204
- ----------
1205
- seconds : int, default 0
1206
- Number of seconds to wait prior to timing out.
1207
- minutes : int, default 0
1208
- Number of minutes to wait prior to timing out.
1209
- hours : int, default 0
1210
- Number of hours to wait prior to timing out.
1211
- """
1240
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1212
1241
  ...
1213
1242
 
1214
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1243
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
1215
1244
  """
1216
- S3 Proxy decorator for routing S3 requests through a local proxy service.
1245
+ Specifies secrets to be retrieved and injected as environment variables prior to
1246
+ the execution of a step.
1217
1247
 
1218
1248
 
1219
1249
  Parameters
1220
1250
  ----------
1221
- integration_name : str, optional
1222
- Name of the S3 proxy integration. If not specified, will use the only
1223
- available S3 proxy integration in the namespace (fails if multiple exist).
1224
- write_mode : str, optional
1225
- The desired behavior during write operations to target (origin) S3 bucket.
1226
- allowed options are:
1227
- "origin-and-cache" -> write to both the target S3 bucket and local object
1228
- storage
1229
- "origin" -> only write to the target S3 bucket
1230
- "cache" -> only write to the object storage service used for caching
1231
- debug : bool, optional
1232
- Enable debug logging for proxy operations.
1251
+ sources : List[Union[str, Dict[str, Any]]], default: []
1252
+ List of secret specs, defining how the secrets are to be retrieved
1253
+ role : str, optional, default: None
1254
+ Role to use for fetching secrets
1233
1255
  """
1234
1256
  ...
1235
1257
 
@@ -1251,100 +1273,170 @@ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepF
1251
1273
  ...
1252
1274
 
1253
1275
  @typing.overload
1254
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1276
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1255
1277
  """
1256
- A simple decorator that demonstrates using CardDecoratorInjector
1257
- to inject a card and render simple markdown content.
1278
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1279
+ It exists to make it easier for users to know that this decorator should only be used with
1280
+ a Neo Cloud like CoreWeave.
1258
1281
  """
1259
1282
  ...
1260
1283
 
1261
1284
  @typing.overload
1262
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1285
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1263
1286
  ...
1264
1287
 
1265
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1288
+ def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1266
1289
  """
1267
- A simple decorator that demonstrates using CardDecoratorInjector
1268
- to inject a card and render simple markdown content.
1290
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1291
+ It exists to make it easier for users to know that this decorator should only be used with
1292
+ a Neo Cloud like CoreWeave.
1269
1293
  """
1270
1294
  ...
1271
1295
 
1272
1296
  @typing.overload
1273
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1297
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1274
1298
  """
1275
- Specifies the resources needed when executing this step.
1299
+ Specifies the number of times the task corresponding
1300
+ to a step needs to be retried.
1276
1301
 
1277
- Use `@resources` to specify the resource requirements
1278
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1302
+ This decorator is useful for handling transient errors, such as networking issues.
1303
+ If your task contains operations that can't be retried safely, e.g. database updates,
1304
+ it is advisable to annotate it with `@retry(times=0)`.
1279
1305
 
1280
- You can choose the compute layer on the command line by executing e.g.
1281
- ```
1282
- python myflow.py run --with batch
1283
- ```
1284
- or
1285
- ```
1286
- python myflow.py run --with kubernetes
1287
- ```
1288
- which executes the flow on the desired system using the
1289
- requirements specified in `@resources`.
1306
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1307
+ decorator will execute a no-op task after all retries have been exhausted,
1308
+ ensuring that the flow execution can continue.
1290
1309
 
1291
1310
 
1292
1311
  Parameters
1293
1312
  ----------
1294
- cpu : int, default 1
1295
- Number of CPUs required for this step.
1296
- gpu : int, optional, default None
1297
- Number of GPUs required for this step.
1298
- disk : int, optional, default None
1299
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1300
- memory : int, default 4096
1301
- Memory size (in MB) required for this step.
1302
- shared_memory : int, optional, default None
1303
- The value for the size (in MiB) of the /dev/shm volume for this step.
1304
- This parameter maps to the `--shm-size` option in Docker.
1313
+ times : int, default 3
1314
+ Number of times to retry this task.
1315
+ minutes_between_retries : int, default 2
1316
+ Number of minutes between retries.
1305
1317
  """
1306
1318
  ...
1307
1319
 
1308
1320
  @typing.overload
1309
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1321
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1310
1322
  ...
1311
1323
 
1312
1324
  @typing.overload
1313
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1325
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1314
1326
  ...
1315
1327
 
1316
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1328
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1317
1329
  """
1318
- Specifies the resources needed when executing this step.
1330
+ Specifies the number of times the task corresponding
1331
+ to a step needs to be retried.
1319
1332
 
1320
- Use `@resources` to specify the resource requirements
1321
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1333
+ This decorator is useful for handling transient errors, such as networking issues.
1334
+ If your task contains operations that can't be retried safely, e.g. database updates,
1335
+ it is advisable to annotate it with `@retry(times=0)`.
1322
1336
 
1323
- You can choose the compute layer on the command line by executing e.g.
1324
- ```
1325
- python myflow.py run --with batch
1326
- ```
1327
- or
1328
- ```
1329
- python myflow.py run --with kubernetes
1330
- ```
1331
- which executes the flow on the desired system using the
1332
- requirements specified in `@resources`.
1337
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1338
+ decorator will execute a no-op task after all retries have been exhausted,
1339
+ ensuring that the flow execution can continue.
1340
+
1341
+
1342
+ Parameters
1343
+ ----------
1344
+ times : int, default 3
1345
+ Number of times to retry this task.
1346
+ minutes_between_retries : int, default 2
1347
+ Number of minutes between retries.
1348
+ """
1349
+ ...
1350
+
1351
+ @typing.overload
1352
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1353
+ """
1354
+ Specifies the PyPI packages for all steps of the flow.
1355
+
1356
+ Use `@pypi_base` to set common packages required by all
1357
+ steps and use `@pypi` to specify step-specific overrides.
1358
+
1359
+ Parameters
1360
+ ----------
1361
+ packages : Dict[str, str], default: {}
1362
+ Packages to use for this flow. The key is the name of the package
1363
+ and the value is the version to use.
1364
+ python : str, optional, default: None
1365
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1366
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1367
+ """
1368
+ ...
1369
+
1370
+ @typing.overload
1371
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1372
+ ...
1373
+
1374
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1375
+ """
1376
+ Specifies the PyPI packages for all steps of the flow.
1377
+
1378
+ Use `@pypi_base` to set common packages required by all
1379
+ steps and use `@pypi` to specify step-specific overrides.
1380
+
1381
+ Parameters
1382
+ ----------
1383
+ packages : Dict[str, str], default: {}
1384
+ Packages to use for this flow. The key is the name of the package
1385
+ and the value is the version to use.
1386
+ python : str, optional, default: None
1387
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1388
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1389
+ """
1390
+ ...
1391
+
1392
+ @typing.overload
1393
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1394
+ """
1395
+ Specifies the times when the flow should be run when running on a
1396
+ production scheduler.
1397
+
1398
+
1399
+ Parameters
1400
+ ----------
1401
+ hourly : bool, default False
1402
+ Run the workflow hourly.
1403
+ daily : bool, default True
1404
+ Run the workflow daily.
1405
+ weekly : bool, default False
1406
+ Run the workflow weekly.
1407
+ cron : str, optional, default None
1408
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1409
+ specified by this expression.
1410
+ timezone : str, optional, default None
1411
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1412
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1413
+ """
1414
+ ...
1415
+
1416
+ @typing.overload
1417
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1418
+ ...
1419
+
1420
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1421
+ """
1422
+ Specifies the times when the flow should be run when running on a
1423
+ production scheduler.
1333
1424
 
1334
1425
 
1335
1426
  Parameters
1336
1427
  ----------
1337
- cpu : int, default 1
1338
- Number of CPUs required for this step.
1339
- gpu : int, optional, default None
1340
- Number of GPUs required for this step.
1341
- disk : int, optional, default None
1342
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1343
- memory : int, default 4096
1344
- Memory size (in MB) required for this step.
1345
- shared_memory : int, optional, default None
1346
- The value for the size (in MiB) of the /dev/shm volume for this step.
1347
- This parameter maps to the `--shm-size` option in Docker.
1428
+ hourly : bool, default False
1429
+ Run the workflow hourly.
1430
+ daily : bool, default True
1431
+ Run the workflow daily.
1432
+ weekly : bool, default False
1433
+ Run the workflow weekly.
1434
+ cron : str, optional, default None
1435
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1436
+ specified by this expression.
1437
+ timezone : str, optional, default None
1438
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1439
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1348
1440
  """
1349
1441
  ...
1350
1442
 
@@ -1383,56 +1475,51 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1383
1475
  """
1384
1476
  ...
1385
1477
 
1386
- @typing.overload
1387
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1478
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1388
1479
  """
1389
- Specifies the event(s) that this flow depends on.
1390
-
1391
- ```
1392
- @trigger(event='foo')
1393
- ```
1394
- or
1395
- ```
1396
- @trigger(events=['foo', 'bar'])
1397
- ```
1398
-
1399
- Additionally, you can specify the parameter mappings
1400
- to map event payload to Metaflow parameters for the flow.
1401
- ```
1402
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1403
- ```
1404
- or
1405
- ```
1406
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1407
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1408
- ```
1409
-
1410
- 'parameters' can also be a list of strings and tuples like so:
1411
- ```
1412
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1413
- ```
1414
- This is equivalent to:
1415
- ```
1416
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1417
- ```
1480
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1481
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1482
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1483
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1484
+ starts only after all sensors finish.
1418
1485
 
1419
1486
 
1420
1487
  Parameters
1421
1488
  ----------
1422
- event : Union[str, Dict[str, Any]], optional, default None
1423
- Event dependency for this flow.
1424
- events : List[Union[str, Dict[str, Any]]], default []
1425
- Events dependency for this flow.
1426
- options : Dict[str, Any], default {}
1427
- Backend-specific configuration for tuning eventing behavior.
1489
+ timeout : int
1490
+ Time, in seconds before the task times out and fails. (Default: 3600)
1491
+ poke_interval : int
1492
+ Time in seconds that the job should wait in between each try. (Default: 60)
1493
+ mode : str
1494
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1495
+ exponential_backoff : bool
1496
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1497
+ pool : str
1498
+ the slot pool this task should run in,
1499
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1500
+ soft_fail : bool
1501
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1502
+ name : str
1503
+ Name of the sensor on Airflow
1504
+ description : str
1505
+ Description of sensor in the Airflow UI
1506
+ bucket_key : Union[str, List[str]]
1507
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1508
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1509
+ bucket_name : str
1510
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1511
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1512
+ wildcard_match : bool
1513
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1514
+ aws_conn_id : str
1515
+ a reference to the s3 connection on Airflow. (Default: None)
1516
+ verify : bool
1517
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1428
1518
  """
1429
1519
  ...
1430
1520
 
1431
1521
  @typing.overload
1432
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1433
- ...
1434
-
1435
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1522
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1436
1523
  """
1437
1524
  Specifies the event(s) that this flow depends on.
1438
1525
 
@@ -1470,202 +1557,100 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1470
1557
  event : Union[str, Dict[str, Any]], optional, default None
1471
1558
  Event dependency for this flow.
1472
1559
  events : List[Union[str, Dict[str, Any]]], default []
1473
- Events dependency for this flow.
1474
- options : Dict[str, Any], default {}
1475
- Backend-specific configuration for tuning eventing behavior.
1476
- """
1477
- ...
1478
-
1479
- @typing.overload
1480
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1481
- """
1482
- Specifies the flow(s) that this flow depends on.
1483
-
1484
- ```
1485
- @trigger_on_finish(flow='FooFlow')
1486
- ```
1487
- or
1488
- ```
1489
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1490
- ```
1491
- This decorator respects the @project decorator and triggers the flow
1492
- when upstream runs within the same namespace complete successfully
1493
-
1494
- Additionally, you can specify project aware upstream flow dependencies
1495
- by specifying the fully qualified project_flow_name.
1496
- ```
1497
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1498
- ```
1499
- or
1500
- ```
1501
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1502
- ```
1503
-
1504
- You can also specify just the project or project branch (other values will be
1505
- inferred from the current project or project branch):
1506
- ```
1507
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1508
- ```
1509
-
1510
- Note that `branch` is typically one of:
1511
- - `prod`
1512
- - `user.bob`
1513
- - `test.my_experiment`
1514
- - `prod.staging`
1515
-
1516
-
1517
- Parameters
1518
- ----------
1519
- flow : Union[str, Dict[str, str]], optional, default None
1520
- Upstream flow dependency for this flow.
1521
- flows : List[Union[str, Dict[str, str]]], default []
1522
- Upstream flow dependencies for this flow.
1523
- options : Dict[str, Any], default {}
1524
- Backend-specific configuration for tuning eventing behavior.
1525
- """
1526
- ...
1527
-
1528
- @typing.overload
1529
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1530
- ...
1531
-
1532
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1533
- """
1534
- Specifies the flow(s) that this flow depends on.
1535
-
1536
- ```
1537
- @trigger_on_finish(flow='FooFlow')
1538
- ```
1539
- or
1540
- ```
1541
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1542
- ```
1543
- This decorator respects the @project decorator and triggers the flow
1544
- when upstream runs within the same namespace complete successfully
1545
-
1546
- Additionally, you can specify project aware upstream flow dependencies
1547
- by specifying the fully qualified project_flow_name.
1548
- ```
1549
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1550
- ```
1551
- or
1552
- ```
1553
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1554
- ```
1555
-
1556
- You can also specify just the project or project branch (other values will be
1557
- inferred from the current project or project branch):
1558
- ```
1559
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1560
- ```
1561
-
1562
- Note that `branch` is typically one of:
1563
- - `prod`
1564
- - `user.bob`
1565
- - `test.my_experiment`
1566
- - `prod.staging`
1567
-
1568
-
1569
- Parameters
1570
- ----------
1571
- flow : Union[str, Dict[str, str]], optional, default None
1572
- Upstream flow dependency for this flow.
1573
- flows : List[Union[str, Dict[str, str]]], default []
1574
- Upstream flow dependencies for this flow.
1575
- options : Dict[str, Any], default {}
1576
- Backend-specific configuration for tuning eventing behavior.
1577
- """
1578
- ...
1579
-
1580
- @typing.overload
1581
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1582
- """
1583
- Specifies the times when the flow should be run when running on a
1584
- production scheduler.
1585
-
1586
-
1587
- Parameters
1588
- ----------
1589
- hourly : bool, default False
1590
- Run the workflow hourly.
1591
- daily : bool, default True
1592
- Run the workflow daily.
1593
- weekly : bool, default False
1594
- Run the workflow weekly.
1595
- cron : str, optional, default None
1596
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1597
- specified by this expression.
1598
- timezone : str, optional, default None
1599
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1600
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1601
- """
1602
- ...
1603
-
1604
- @typing.overload
1605
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1606
- ...
1607
-
1608
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1609
- """
1610
- Specifies the times when the flow should be run when running on a
1611
- production scheduler.
1612
-
1613
-
1614
- Parameters
1615
- ----------
1616
- hourly : bool, default False
1617
- Run the workflow hourly.
1618
- daily : bool, default True
1619
- Run the workflow daily.
1620
- weekly : bool, default False
1621
- Run the workflow weekly.
1622
- cron : str, optional, default None
1623
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1624
- specified by this expression.
1625
- timezone : str, optional, default None
1626
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1627
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1560
+ Events dependency for this flow.
1561
+ options : Dict[str, Any], default {}
1562
+ Backend-specific configuration for tuning eventing behavior.
1628
1563
  """
1629
1564
  ...
1630
1565
 
1631
1566
  @typing.overload
1632
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1567
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1568
+ ...
1569
+
1570
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1633
1571
  """
1634
- Specifies the PyPI packages for all steps of the flow.
1572
+ Specifies the event(s) that this flow depends on.
1573
+
1574
+ ```
1575
+ @trigger(event='foo')
1576
+ ```
1577
+ or
1578
+ ```
1579
+ @trigger(events=['foo', 'bar'])
1580
+ ```
1581
+
1582
+ Additionally, you can specify the parameter mappings
1583
+ to map event payload to Metaflow parameters for the flow.
1584
+ ```
1585
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1586
+ ```
1587
+ or
1588
+ ```
1589
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1590
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1591
+ ```
1592
+
1593
+ 'parameters' can also be a list of strings and tuples like so:
1594
+ ```
1595
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1596
+ ```
1597
+ This is equivalent to:
1598
+ ```
1599
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1600
+ ```
1635
1601
 
1636
- Use `@pypi_base` to set common packages required by all
1637
- steps and use `@pypi` to specify step-specific overrides.
1638
1602
 
1639
1603
  Parameters
1640
1604
  ----------
1641
- packages : Dict[str, str], default: {}
1642
- Packages to use for this flow. The key is the name of the package
1643
- and the value is the version to use.
1644
- python : str, optional, default: None
1645
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1646
- that the version used will correspond to the version of the Python interpreter used to start the run.
1605
+ event : Union[str, Dict[str, Any]], optional, default None
1606
+ Event dependency for this flow.
1607
+ events : List[Union[str, Dict[str, Any]]], default []
1608
+ Events dependency for this flow.
1609
+ options : Dict[str, Any], default {}
1610
+ Backend-specific configuration for tuning eventing behavior.
1647
1611
  """
1648
1612
  ...
1649
1613
 
1650
- @typing.overload
1651
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1652
- ...
1653
-
1654
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1614
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1655
1615
  """
1656
- Specifies the PyPI packages for all steps of the flow.
1616
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1617
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1657
1618
 
1658
- Use `@pypi_base` to set common packages required by all
1659
- steps and use `@pypi` to specify step-specific overrides.
1660
1619
 
1661
1620
  Parameters
1662
1621
  ----------
1663
- packages : Dict[str, str], default: {}
1664
- Packages to use for this flow. The key is the name of the package
1665
- and the value is the version to use.
1666
- python : str, optional, default: None
1667
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1668
- that the version used will correspond to the version of the Python interpreter used to start the run.
1622
+ timeout : int
1623
+ Time, in seconds before the task times out and fails. (Default: 3600)
1624
+ poke_interval : int
1625
+ Time in seconds that the job should wait in between each try. (Default: 60)
1626
+ mode : str
1627
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1628
+ exponential_backoff : bool
1629
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1630
+ pool : str
1631
+ the slot pool this task should run in,
1632
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1633
+ soft_fail : bool
1634
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1635
+ name : str
1636
+ Name of the sensor on Airflow
1637
+ description : str
1638
+ Description of sensor in the Airflow UI
1639
+ external_dag_id : str
1640
+ The dag_id that contains the task you want to wait for.
1641
+ external_task_ids : List[str]
1642
+ The list of task_ids that you want to wait for.
1643
+ If None (default value) the sensor waits for the DAG. (Default: None)
1644
+ allowed_states : List[str]
1645
+ Iterable of allowed states, (Default: ['success'])
1646
+ failed_states : List[str]
1647
+ Iterable of failed or dis-allowed states. (Default: None)
1648
+ execution_delta : datetime.timedelta
1649
+ time difference with the previous execution to look at,
1650
+ the default is the same logical date as the current task or DAG. (Default: None)
1651
+ check_existence: bool
1652
+ Set to True to check if the external task exists or check if
1653
+ the DAG to wait for exists. (Default: True)
1669
1654
  """
1670
1655
  ...
1671
1656
 
@@ -1834,89 +1819,104 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1834
1819
  """
1835
1820
  ...
1836
1821
 
1837
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1822
+ @typing.overload
1823
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1838
1824
  """
1839
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1840
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1825
+ Specifies the flow(s) that this flow depends on.
1826
+
1827
+ ```
1828
+ @trigger_on_finish(flow='FooFlow')
1829
+ ```
1830
+ or
1831
+ ```
1832
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1833
+ ```
1834
+ This decorator respects the @project decorator and triggers the flow
1835
+ when upstream runs within the same namespace complete successfully
1836
+
1837
+ Additionally, you can specify project aware upstream flow dependencies
1838
+ by specifying the fully qualified project_flow_name.
1839
+ ```
1840
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1841
+ ```
1842
+ or
1843
+ ```
1844
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1845
+ ```
1846
+
1847
+ You can also specify just the project or project branch (other values will be
1848
+ inferred from the current project or project branch):
1849
+ ```
1850
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1851
+ ```
1852
+
1853
+ Note that `branch` is typically one of:
1854
+ - `prod`
1855
+ - `user.bob`
1856
+ - `test.my_experiment`
1857
+ - `prod.staging`
1841
1858
 
1842
1859
 
1843
1860
  Parameters
1844
1861
  ----------
1845
- timeout : int
1846
- Time, in seconds before the task times out and fails. (Default: 3600)
1847
- poke_interval : int
1848
- Time in seconds that the job should wait in between each try. (Default: 60)
1849
- mode : str
1850
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1851
- exponential_backoff : bool
1852
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1853
- pool : str
1854
- the slot pool this task should run in,
1855
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1856
- soft_fail : bool
1857
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1858
- name : str
1859
- Name of the sensor on Airflow
1860
- description : str
1861
- Description of sensor in the Airflow UI
1862
- external_dag_id : str
1863
- The dag_id that contains the task you want to wait for.
1864
- external_task_ids : List[str]
1865
- The list of task_ids that you want to wait for.
1866
- If None (default value) the sensor waits for the DAG. (Default: None)
1867
- allowed_states : List[str]
1868
- Iterable of allowed states, (Default: ['success'])
1869
- failed_states : List[str]
1870
- Iterable of failed or dis-allowed states. (Default: None)
1871
- execution_delta : datetime.timedelta
1872
- time difference with the previous execution to look at,
1873
- the default is the same logical date as the current task or DAG. (Default: None)
1874
- check_existence: bool
1875
- Set to True to check if the external task exists or check if
1876
- the DAG to wait for exists. (Default: True)
1862
+ flow : Union[str, Dict[str, str]], optional, default None
1863
+ Upstream flow dependency for this flow.
1864
+ flows : List[Union[str, Dict[str, str]]], default []
1865
+ Upstream flow dependencies for this flow.
1866
+ options : Dict[str, Any], default {}
1867
+ Backend-specific configuration for tuning eventing behavior.
1877
1868
  """
1878
1869
  ...
1879
1870
 
1880
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1871
+ @typing.overload
1872
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1873
+ ...
1874
+
1875
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1881
1876
  """
1882
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1883
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1884
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1885
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1886
- starts only after all sensors finish.
1877
+ Specifies the flow(s) that this flow depends on.
1878
+
1879
+ ```
1880
+ @trigger_on_finish(flow='FooFlow')
1881
+ ```
1882
+ or
1883
+ ```
1884
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1885
+ ```
1886
+ This decorator respects the @project decorator and triggers the flow
1887
+ when upstream runs within the same namespace complete successfully
1888
+
1889
+ Additionally, you can specify project aware upstream flow dependencies
1890
+ by specifying the fully qualified project_flow_name.
1891
+ ```
1892
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1893
+ ```
1894
+ or
1895
+ ```
1896
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1897
+ ```
1898
+
1899
+ You can also specify just the project or project branch (other values will be
1900
+ inferred from the current project or project branch):
1901
+ ```
1902
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1903
+ ```
1904
+
1905
+ Note that `branch` is typically one of:
1906
+ - `prod`
1907
+ - `user.bob`
1908
+ - `test.my_experiment`
1909
+ - `prod.staging`
1887
1910
 
1888
1911
 
1889
1912
  Parameters
1890
1913
  ----------
1891
- timeout : int
1892
- Time, in seconds before the task times out and fails. (Default: 3600)
1893
- poke_interval : int
1894
- Time in seconds that the job should wait in between each try. (Default: 60)
1895
- mode : str
1896
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1897
- exponential_backoff : bool
1898
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1899
- pool : str
1900
- the slot pool this task should run in,
1901
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1902
- soft_fail : bool
1903
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1904
- name : str
1905
- Name of the sensor on Airflow
1906
- description : str
1907
- Description of sensor in the Airflow UI
1908
- bucket_key : Union[str, List[str]]
1909
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1910
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1911
- bucket_name : str
1912
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1913
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1914
- wildcard_match : bool
1915
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1916
- aws_conn_id : str
1917
- a reference to the s3 connection on Airflow. (Default: None)
1918
- verify : bool
1919
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1914
+ flow : Union[str, Dict[str, str]], optional, default None
1915
+ Upstream flow dependency for this flow.
1916
+ flows : List[Union[str, Dict[str, str]]], default []
1917
+ Upstream flow dependencies for this flow.
1918
+ options : Dict[str, Any], default {}
1919
+ Backend-specific configuration for tuning eventing behavior.
1920
1920
  """
1921
1921
  ...
1922
1922