ob-metaflow-stubs 6.0.9.0__py2.py3-none-any.whl → 6.0.9.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-stubs might be problematic. Click here for more details.

Files changed (262) hide show
  1. metaflow-stubs/__init__.pyi +862 -862
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +2 -2
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/meta_files.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +1 -1
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +36 -36
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +2 -2
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +1 -1
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +2 -2
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +2 -2
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +1 -1
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +1 -1
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +2 -2
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +1 -1
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +1 -1
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +2 -2
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +2 -2
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +2 -2
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +2 -2
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +2 -2
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +3 -3
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +2 -2
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +1 -1
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +2 -2
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +2 -2
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +2 -2
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +1 -1
  116. metaflow-stubs/multicore_utils.pyi +1 -1
  117. metaflow-stubs/ob_internal.pyi +1 -1
  118. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  119. metaflow-stubs/packaging_sys/backend.pyi +3 -3
  120. metaflow-stubs/packaging_sys/distribution_support.pyi +3 -3
  121. metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
  122. metaflow-stubs/packaging_sys/utils.pyi +1 -1
  123. metaflow-stubs/packaging_sys/v1.pyi +1 -1
  124. metaflow-stubs/parameters.pyi +2 -2
  125. metaflow-stubs/plugins/__init__.pyi +11 -11
  126. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  128. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  129. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  130. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  131. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  132. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  133. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  134. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  135. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  136. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  137. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +1 -1
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  139. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  140. metaflow-stubs/plugins/argo/exit_hooks.pyi +1 -1
  141. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  142. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  143. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  144. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  145. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  146. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  147. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  148. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  149. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  150. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  151. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  152. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  153. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  157. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  158. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  159. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  160. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  161. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  162. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  163. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  164. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  165. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  166. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  167. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  168. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  169. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  170. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  171. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  172. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  173. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  174. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  175. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  176. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  177. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  178. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  179. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  180. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  181. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  182. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  183. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  184. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  185. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  186. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  187. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  188. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  189. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/exit_hook/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +1 -1
  192. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  193. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  194. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  195. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  196. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  197. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  198. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  199. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  200. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  201. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  202. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  203. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  204. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  205. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  206. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  207. metaflow-stubs/plugins/ollama/__init__.pyi +1 -1
  208. metaflow-stubs/plugins/optuna/__init__.pyi +1 -1
  209. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  210. metaflow-stubs/plugins/perimeters.pyi +1 -1
  211. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  212. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  213. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  214. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  215. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  216. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  217. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  218. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  219. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  220. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  221. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  222. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  223. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  224. metaflow-stubs/plugins/secrets/secrets_func.pyi +1 -1
  225. metaflow-stubs/plugins/secrets/secrets_spec.pyi +1 -1
  226. metaflow-stubs/plugins/secrets/utils.pyi +1 -1
  227. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  228. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  229. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  230. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  231. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  232. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  233. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  234. metaflow-stubs/profilers/__init__.pyi +1 -1
  235. metaflow-stubs/pylint_wrapper.pyi +1 -1
  236. metaflow-stubs/runner/__init__.pyi +1 -1
  237. metaflow-stubs/runner/deployer.pyi +4 -4
  238. metaflow-stubs/runner/deployer_impl.pyi +1 -1
  239. metaflow-stubs/runner/metaflow_runner.pyi +1 -1
  240. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  241. metaflow-stubs/runner/nbrun.pyi +1 -1
  242. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  243. metaflow-stubs/runner/utils.pyi +2 -2
  244. metaflow-stubs/system/__init__.pyi +1 -1
  245. metaflow-stubs/system/system_logger.pyi +1 -1
  246. metaflow-stubs/system/system_monitor.pyi +1 -1
  247. metaflow-stubs/tagging_util.pyi +1 -1
  248. metaflow-stubs/tuple_util.pyi +1 -1
  249. metaflow-stubs/user_configs/__init__.pyi +1 -1
  250. metaflow-stubs/user_configs/config_options.pyi +1 -1
  251. metaflow-stubs/user_configs/config_parameters.pyi +4 -4
  252. metaflow-stubs/user_decorators/__init__.pyi +1 -1
  253. metaflow-stubs/user_decorators/common.pyi +1 -1
  254. metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
  255. metaflow-stubs/user_decorators/mutable_step.pyi +4 -4
  256. metaflow-stubs/user_decorators/user_flow_decorator.pyi +2 -2
  257. metaflow-stubs/user_decorators/user_step_decorator.pyi +3 -3
  258. {ob_metaflow_stubs-6.0.9.0.dist-info → ob_metaflow_stubs-6.0.9.1.dist-info}/METADATA +1 -1
  259. ob_metaflow_stubs-6.0.9.1.dist-info/RECORD +262 -0
  260. ob_metaflow_stubs-6.0.9.0.dist-info/RECORD +0 -262
  261. {ob_metaflow_stubs-6.0.9.0.dist-info → ob_metaflow_stubs-6.0.9.1.dist-info}/WHEEL +0 -0
  262. {ob_metaflow_stubs-6.0.9.0.dist-info → ob_metaflow_stubs-6.0.9.1.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.18.0.1+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-08-27T22:09:03.717615 #
4
+ # Generated on 2025-08-28T00:53:38.278497 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -48,9 +48,9 @@ from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
51
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
52
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
56
56
  from .client.core import get_namespace as get_namespace
@@ -218,180 +218,138 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
218
218
  """
219
219
  ...
220
220
 
221
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
221
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
222
222
  """
223
- Specifies that this step should execute on Kubernetes.
223
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
224
+
225
+ User code call
226
+ --------------
227
+ @ollama(
228
+ models=[...],
229
+ ...
230
+ )
231
+
232
+ Valid backend options
233
+ ---------------------
234
+ - 'local': Run as a separate process on the local task machine.
235
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
236
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
237
+
238
+ Valid model options
239
+ -------------------
240
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
224
241
 
225
242
 
226
243
  Parameters
227
244
  ----------
228
- cpu : int, default 1
229
- Number of CPUs required for this step. If `@resources` is
230
- also present, the maximum value from all decorators is used.
231
- memory : int, default 4096
232
- Memory size (in MB) required for this step. If
233
- `@resources` is also present, the maximum value from all decorators is
234
- used.
235
- disk : int, default 10240
236
- Disk size (in MB) required for this step. If
237
- `@resources` is also present, the maximum value from all decorators is
238
- used.
239
- image : str, optional, default None
240
- Docker image to use when launching on Kubernetes. If not specified, and
241
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
242
- not, a default Docker image mapping to the current version of Python is used.
243
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
244
- If given, the imagePullPolicy to be applied to the Docker image of the step.
245
- image_pull_secrets: List[str], default []
246
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
247
- Kubernetes image pull secrets to use when pulling container images
248
- in Kubernetes.
249
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
250
- Kubernetes service account to use when launching pod in Kubernetes.
251
- secrets : List[str], optional, default None
252
- Kubernetes secrets to use when launching pod in Kubernetes. These
253
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
254
- in Metaflow configuration.
255
- node_selector: Union[Dict[str,str], str], optional, default None
256
- Kubernetes node selector(s) to apply to the pod running the task.
257
- Can be passed in as a comma separated string of values e.g.
258
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
259
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
260
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
261
- Kubernetes namespace to use when launching pod in Kubernetes.
262
- gpu : int, optional, default None
263
- Number of GPUs required for this step. A value of zero implies that
264
- the scheduled node should not have GPUs.
265
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
266
- The vendor of the GPUs to be used for this step.
267
- tolerations : List[Dict[str,str]], default []
268
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
269
- Kubernetes tolerations to use when launching pod in Kubernetes.
270
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
271
- Kubernetes labels to use when launching pod in Kubernetes.
272
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
273
- Kubernetes annotations to use when launching pod in Kubernetes.
274
- use_tmpfs : bool, default False
275
- This enables an explicit tmpfs mount for this step.
276
- tmpfs_tempdir : bool, default True
277
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
278
- tmpfs_size : int, optional, default: None
279
- The value for the size (in MiB) of the tmpfs mount for this step.
280
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
281
- memory allocated for this step.
282
- tmpfs_path : str, optional, default /metaflow_temp
283
- Path to tmpfs mount for this step.
284
- persistent_volume_claims : Dict[str, str], optional, default None
285
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
286
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
287
- shared_memory: int, optional
288
- Shared memory size (in MiB) required for this step
289
- port: int, optional
290
- Port number to specify in the Kubernetes job object
291
- compute_pool : str, optional, default None
292
- Compute pool to be used for for this step.
293
- If not specified, any accessible compute pool within the perimeter is used.
294
- hostname_resolution_timeout: int, default 10 * 60
295
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
296
- Only applicable when @parallel is used.
297
- qos: str, default: Burstable
298
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
299
-
300
- security_context: Dict[str, Any], optional, default None
301
- Container security context. Applies to the task container. Allows the following keys:
302
- - privileged: bool, optional, default None
303
- - allow_privilege_escalation: bool, optional, default None
304
- - run_as_user: int, optional, default None
305
- - run_as_group: int, optional, default None
306
- - run_as_non_root: bool, optional, default None
245
+ models: list[str]
246
+ List of Ollama containers running models in sidecars.
247
+ backend: str
248
+ Determines where and how to run the Ollama process.
249
+ force_pull: bool
250
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
251
+ cache_update_policy: str
252
+ Cache update policy: "auto", "force", or "never".
253
+ force_cache_update: bool
254
+ Simple override for "force" cache update policy.
255
+ debug: bool
256
+ Whether to turn on verbose debugging logs.
257
+ circuit_breaker_config: dict
258
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
259
+ timeout_config: dict
260
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
307
261
  """
308
262
  ...
309
263
 
310
264
  @typing.overload
311
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
265
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
312
266
  """
313
- Decorator prototype for all step decorators. This function gets specialized
314
- and imported for all decorators types by _import_plugin_decorators().
267
+ Specifies the Conda environment for the step.
268
+
269
+ Information in this decorator will augment any
270
+ attributes set in the `@conda_base` flow-level decorator. Hence,
271
+ you can use `@conda_base` to set packages required by all
272
+ steps and use `@conda` to specify step-specific overrides.
273
+
274
+
275
+ Parameters
276
+ ----------
277
+ packages : Dict[str, str], default {}
278
+ Packages to use for this step. The key is the name of the package
279
+ and the value is the version to use.
280
+ libraries : Dict[str, str], default {}
281
+ Supported for backward compatibility. When used with packages, packages will take precedence.
282
+ python : str, optional, default None
283
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
284
+ that the version used will correspond to the version of the Python interpreter used to start the run.
285
+ disabled : bool, default False
286
+ If set to True, disables @conda.
315
287
  """
316
288
  ...
317
289
 
318
290
  @typing.overload
319
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
291
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
320
292
  ...
321
293
 
322
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
323
- """
324
- Decorator prototype for all step decorators. This function gets specialized
325
- and imported for all decorators types by _import_plugin_decorators().
326
- """
294
+ @typing.overload
295
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
327
296
  ...
328
297
 
329
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
298
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
330
299
  """
331
- This decorator is used to run vllm APIs as Metaflow task sidecars.
332
-
333
- User code call
334
- --------------
335
- @vllm(
336
- model="...",
337
- ...
338
- )
339
-
340
- Valid backend options
341
- ---------------------
342
- - 'local': Run as a separate process on the local task machine.
343
-
344
- Valid model options
345
- -------------------
346
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
300
+ Specifies the Conda environment for the step.
347
301
 
348
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
349
- If you need multiple models, you must create multiple @vllm decorators.
302
+ Information in this decorator will augment any
303
+ attributes set in the `@conda_base` flow-level decorator. Hence,
304
+ you can use `@conda_base` to set packages required by all
305
+ steps and use `@conda` to specify step-specific overrides.
350
306
 
351
307
 
352
308
  Parameters
353
309
  ----------
354
- model: str
355
- HuggingFace model identifier to be served by vLLM.
356
- backend: str
357
- Determines where and how to run the vLLM process.
358
- openai_api_server: bool
359
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
360
- Default is False (uses native engine).
361
- Set to True for backward compatibility with existing code.
362
- debug: bool
363
- Whether to turn on verbose debugging logs.
364
- card_refresh_interval: int
365
- Interval in seconds for refreshing the vLLM status card.
366
- Only used when openai_api_server=True.
367
- max_retries: int
368
- Maximum number of retries checking for vLLM server startup.
369
- Only used when openai_api_server=True.
370
- retry_alert_frequency: int
371
- Frequency of alert logs for vLLM server startup retries.
372
- Only used when openai_api_server=True.
373
- engine_args : dict
374
- Additional keyword arguments to pass to the vLLM engine.
375
- For example, `tensor_parallel_size=2`.
310
+ packages : Dict[str, str], default {}
311
+ Packages to use for this step. The key is the name of the package
312
+ and the value is the version to use.
313
+ libraries : Dict[str, str], default {}
314
+ Supported for backward compatibility. When used with packages, packages will take precedence.
315
+ python : str, optional, default None
316
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
317
+ that the version used will correspond to the version of the Python interpreter used to start the run.
318
+ disabled : bool, default False
319
+ If set to True, disables @conda.
376
320
  """
377
321
  ...
378
322
 
379
323
  @typing.overload
380
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
324
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
381
325
  """
382
- A simple decorator that demonstrates using CardDecoratorInjector
383
- to inject a card and render simple markdown content.
326
+ Specifies environment variables to be set prior to the execution of a step.
327
+
328
+
329
+ Parameters
330
+ ----------
331
+ vars : Dict[str, str], default {}
332
+ Dictionary of environment variables to set.
384
333
  """
385
334
  ...
386
335
 
387
336
  @typing.overload
388
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
337
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
389
338
  ...
390
339
 
391
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
340
+ @typing.overload
341
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
342
+ ...
343
+
344
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
392
345
  """
393
- A simple decorator that demonstrates using CardDecoratorInjector
394
- to inject a card and render simple markdown content.
346
+ Specifies environment variables to be set prior to the execution of a step.
347
+
348
+
349
+ Parameters
350
+ ----------
351
+ vars : Dict[str, str], default {}
352
+ Dictionary of environment variables to set.
395
353
  """
396
354
  ...
397
355
 
@@ -476,186 +434,23 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
476
434
  ...
477
435
 
478
436
  @typing.overload
479
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
437
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
480
438
  """
481
- Specifies environment variables to be set prior to the execution of a step.
482
-
483
-
484
- Parameters
485
- ----------
486
- vars : Dict[str, str], default {}
487
- Dictionary of environment variables to set.
439
+ Internal decorator to support Fast bakery
488
440
  """
489
441
  ...
490
442
 
491
443
  @typing.overload
492
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
444
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
493
445
  ...
494
446
 
495
- @typing.overload
496
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
447
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
448
+ """
449
+ Internal decorator to support Fast bakery
450
+ """
497
451
  ...
498
452
 
499
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
500
- """
501
- Specifies environment variables to be set prior to the execution of a step.
502
-
503
-
504
- Parameters
505
- ----------
506
- vars : Dict[str, str], default {}
507
- Dictionary of environment variables to set.
508
- """
509
- ...
510
-
511
- @typing.overload
512
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
513
- """
514
- Enables checkpointing for a step.
515
-
516
- > Examples
517
-
518
- - Saving Checkpoints
519
-
520
- ```python
521
- @checkpoint
522
- @step
523
- def train(self):
524
- model = create_model(self.parameters, checkpoint_path = None)
525
- for i in range(self.epochs):
526
- # some training logic
527
- loss = model.train(self.dataset)
528
- if i % 10 == 0:
529
- model.save(
530
- current.checkpoint.directory,
531
- )
532
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
533
- # and returns a reference dictionary to the checkpoint saved in the datastore
534
- self.latest_checkpoint = current.checkpoint.save(
535
- name="epoch_checkpoint",
536
- metadata={
537
- "epoch": i,
538
- "loss": loss,
539
- }
540
- )
541
- ```
542
-
543
- - Using Loaded Checkpoints
544
-
545
- ```python
546
- @retry(times=3)
547
- @checkpoint
548
- @step
549
- def train(self):
550
- # Assume that the task has restarted and the previous attempt of the task
551
- # saved a checkpoint
552
- checkpoint_path = None
553
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
554
- print("Loaded checkpoint from the previous attempt")
555
- checkpoint_path = current.checkpoint.directory
556
-
557
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
558
- for i in range(self.epochs):
559
- ...
560
- ```
561
-
562
-
563
- Parameters
564
- ----------
565
- load_policy : str, default: "fresh"
566
- The policy for loading the checkpoint. The following policies are supported:
567
- - "eager": Loads the the latest available checkpoint within the namespace.
568
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
569
- will be loaded at the start of the task.
570
- - "none": Do not load any checkpoint
571
- - "fresh": Loads the lastest checkpoint created within the running Task.
572
- This mode helps loading checkpoints across various retry attempts of the same task.
573
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
574
- created within the task will be loaded when the task is retries execution on failure.
575
-
576
- temp_dir_root : str, default: None
577
- The root directory under which `current.checkpoint.directory` will be created.
578
- """
579
- ...
580
-
581
- @typing.overload
582
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
583
- ...
584
-
585
- @typing.overload
586
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
587
- ...
588
-
589
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
590
- """
591
- Enables checkpointing for a step.
592
-
593
- > Examples
594
-
595
- - Saving Checkpoints
596
-
597
- ```python
598
- @checkpoint
599
- @step
600
- def train(self):
601
- model = create_model(self.parameters, checkpoint_path = None)
602
- for i in range(self.epochs):
603
- # some training logic
604
- loss = model.train(self.dataset)
605
- if i % 10 == 0:
606
- model.save(
607
- current.checkpoint.directory,
608
- )
609
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
610
- # and returns a reference dictionary to the checkpoint saved in the datastore
611
- self.latest_checkpoint = current.checkpoint.save(
612
- name="epoch_checkpoint",
613
- metadata={
614
- "epoch": i,
615
- "loss": loss,
616
- }
617
- )
618
- ```
619
-
620
- - Using Loaded Checkpoints
621
-
622
- ```python
623
- @retry(times=3)
624
- @checkpoint
625
- @step
626
- def train(self):
627
- # Assume that the task has restarted and the previous attempt of the task
628
- # saved a checkpoint
629
- checkpoint_path = None
630
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
631
- print("Loaded checkpoint from the previous attempt")
632
- checkpoint_path = current.checkpoint.directory
633
-
634
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
635
- for i in range(self.epochs):
636
- ...
637
- ```
638
-
639
-
640
- Parameters
641
- ----------
642
- load_policy : str, default: "fresh"
643
- The policy for loading the checkpoint. The following policies are supported:
644
- - "eager": Loads the the latest available checkpoint within the namespace.
645
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
646
- will be loaded at the start of the task.
647
- - "none": Do not load any checkpoint
648
- - "fresh": Loads the lastest checkpoint created within the running Task.
649
- This mode helps loading checkpoints across various retry attempts of the same task.
650
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
651
- created within the task will be loaded when the task is retries execution on failure.
652
-
653
- temp_dir_root : str, default: None
654
- The root directory under which `current.checkpoint.directory` will be created.
655
- """
656
- ...
657
-
658
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
453
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
659
454
  """
660
455
  S3 Proxy decorator for routing S3 requests through a local proxy service.
661
456
 
@@ -678,231 +473,351 @@ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typin
678
473
  ...
679
474
 
680
475
  @typing.overload
681
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
476
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
682
477
  """
683
- Specifies that the step will success under all circumstances.
684
-
685
- The decorator will create an optional artifact, specified by `var`, which
686
- contains the exception raised. You can use it to detect the presence
687
- of errors, indicating that all happy-path artifacts produced by the step
688
- are missing.
478
+ Specifies secrets to be retrieved and injected as environment variables prior to
479
+ the execution of a step.
689
480
 
690
481
 
691
482
  Parameters
692
483
  ----------
693
- var : str, optional, default None
694
- Name of the artifact in which to store the caught exception.
695
- If not specified, the exception is not stored.
696
- print_exception : bool, default True
697
- Determines whether or not the exception is printed to
698
- stdout when caught.
484
+ sources : List[Union[str, Dict[str, Any]]], default: []
485
+ List of secret specs, defining how the secrets are to be retrieved
486
+ role : str, optional, default: None
487
+ Role to use for fetching secrets
699
488
  """
700
489
  ...
701
490
 
702
491
  @typing.overload
703
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
492
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
704
493
  ...
705
494
 
706
495
  @typing.overload
707
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
496
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
708
497
  ...
709
498
 
710
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
499
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
711
500
  """
712
- Specifies that the step will success under all circumstances.
713
-
714
- The decorator will create an optional artifact, specified by `var`, which
715
- contains the exception raised. You can use it to detect the presence
716
- of errors, indicating that all happy-path artifacts produced by the step
717
- are missing.
501
+ Specifies secrets to be retrieved and injected as environment variables prior to
502
+ the execution of a step.
718
503
 
719
504
 
720
505
  Parameters
721
506
  ----------
722
- var : str, optional, default None
723
- Name of the artifact in which to store the caught exception.
724
- If not specified, the exception is not stored.
725
- print_exception : bool, default True
726
- Determines whether or not the exception is printed to
727
- stdout when caught.
507
+ sources : List[Union[str, Dict[str, Any]]], default: []
508
+ List of secret specs, defining how the secrets are to be retrieved
509
+ role : str, optional, default: None
510
+ Role to use for fetching secrets
728
511
  """
729
512
  ...
730
513
 
731
514
  @typing.overload
732
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
515
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
733
516
  """
734
- Specifies a timeout for your step.
735
-
736
- This decorator is useful if this step may hang indefinitely.
517
+ Specifies the number of times the task corresponding
518
+ to a step needs to be retried.
737
519
 
738
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
739
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
740
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
520
+ This decorator is useful for handling transient errors, such as networking issues.
521
+ If your task contains operations that can't be retried safely, e.g. database updates,
522
+ it is advisable to annotate it with `@retry(times=0)`.
741
523
 
742
- Note that all the values specified in parameters are added together so if you specify
743
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
524
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
525
+ decorator will execute a no-op task after all retries have been exhausted,
526
+ ensuring that the flow execution can continue.
744
527
 
745
528
 
746
529
  Parameters
747
530
  ----------
748
- seconds : int, default 0
749
- Number of seconds to wait prior to timing out.
750
- minutes : int, default 0
751
- Number of minutes to wait prior to timing out.
752
- hours : int, default 0
753
- Number of hours to wait prior to timing out.
531
+ times : int, default 3
532
+ Number of times to retry this task.
533
+ minutes_between_retries : int, default 2
534
+ Number of minutes between retries.
754
535
  """
755
536
  ...
756
537
 
757
538
  @typing.overload
758
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
539
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
759
540
  ...
760
541
 
761
542
  @typing.overload
762
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
543
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
763
544
  ...
764
545
 
765
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
546
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
766
547
  """
767
- Specifies a timeout for your step.
768
-
769
- This decorator is useful if this step may hang indefinitely.
548
+ Specifies the number of times the task corresponding
549
+ to a step needs to be retried.
770
550
 
771
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
772
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
773
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
551
+ This decorator is useful for handling transient errors, such as networking issues.
552
+ If your task contains operations that can't be retried safely, e.g. database updates,
553
+ it is advisable to annotate it with `@retry(times=0)`.
774
554
 
775
- Note that all the values specified in parameters are added together so if you specify
776
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
555
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
556
+ decorator will execute a no-op task after all retries have been exhausted,
557
+ ensuring that the flow execution can continue.
777
558
 
778
559
 
779
560
  Parameters
780
561
  ----------
781
- seconds : int, default 0
782
- Number of seconds to wait prior to timing out.
783
- minutes : int, default 0
784
- Number of minutes to wait prior to timing out.
785
- hours : int, default 0
786
- Number of hours to wait prior to timing out.
562
+ times : int, default 3
563
+ Number of times to retry this task.
564
+ minutes_between_retries : int, default 2
565
+ Number of minutes between retries.
787
566
  """
788
567
  ...
789
568
 
790
- @typing.overload
791
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
569
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
792
570
  """
793
- Specifies secrets to be retrieved and injected as environment variables prior to
794
- the execution of a step.
571
+ Specifies that this step should execute on Kubernetes.
795
572
 
796
573
 
797
574
  Parameters
798
575
  ----------
799
- sources : List[Union[str, Dict[str, Any]]], default: []
800
- List of secret specs, defining how the secrets are to be retrieved
801
- role : str, optional, default: None
802
- Role to use for fetching secrets
576
+ cpu : int, default 1
577
+ Number of CPUs required for this step. If `@resources` is
578
+ also present, the maximum value from all decorators is used.
579
+ memory : int, default 4096
580
+ Memory size (in MB) required for this step. If
581
+ `@resources` is also present, the maximum value from all decorators is
582
+ used.
583
+ disk : int, default 10240
584
+ Disk size (in MB) required for this step. If
585
+ `@resources` is also present, the maximum value from all decorators is
586
+ used.
587
+ image : str, optional, default None
588
+ Docker image to use when launching on Kubernetes. If not specified, and
589
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
590
+ not, a default Docker image mapping to the current version of Python is used.
591
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
592
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
593
+ image_pull_secrets: List[str], default []
594
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
595
+ Kubernetes image pull secrets to use when pulling container images
596
+ in Kubernetes.
597
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
598
+ Kubernetes service account to use when launching pod in Kubernetes.
599
+ secrets : List[str], optional, default None
600
+ Kubernetes secrets to use when launching pod in Kubernetes. These
601
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
602
+ in Metaflow configuration.
603
+ node_selector: Union[Dict[str,str], str], optional, default None
604
+ Kubernetes node selector(s) to apply to the pod running the task.
605
+ Can be passed in as a comma separated string of values e.g.
606
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
607
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
608
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
609
+ Kubernetes namespace to use when launching pod in Kubernetes.
610
+ gpu : int, optional, default None
611
+ Number of GPUs required for this step. A value of zero implies that
612
+ the scheduled node should not have GPUs.
613
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
614
+ The vendor of the GPUs to be used for this step.
615
+ tolerations : List[Dict[str,str]], default []
616
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
617
+ Kubernetes tolerations to use when launching pod in Kubernetes.
618
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
619
+ Kubernetes labels to use when launching pod in Kubernetes.
620
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
621
+ Kubernetes annotations to use when launching pod in Kubernetes.
622
+ use_tmpfs : bool, default False
623
+ This enables an explicit tmpfs mount for this step.
624
+ tmpfs_tempdir : bool, default True
625
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
626
+ tmpfs_size : int, optional, default: None
627
+ The value for the size (in MiB) of the tmpfs mount for this step.
628
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
629
+ memory allocated for this step.
630
+ tmpfs_path : str, optional, default /metaflow_temp
631
+ Path to tmpfs mount for this step.
632
+ persistent_volume_claims : Dict[str, str], optional, default None
633
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
634
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
635
+ shared_memory: int, optional
636
+ Shared memory size (in MiB) required for this step
637
+ port: int, optional
638
+ Port number to specify in the Kubernetes job object
639
+ compute_pool : str, optional, default None
640
+ Compute pool to be used for for this step.
641
+ If not specified, any accessible compute pool within the perimeter is used.
642
+ hostname_resolution_timeout: int, default 10 * 60
643
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
644
+ Only applicable when @parallel is used.
645
+ qos: str, default: Burstable
646
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
647
+
648
+ security_context: Dict[str, Any], optional, default None
649
+ Container security context. Applies to the task container. Allows the following keys:
650
+ - privileged: bool, optional, default None
651
+ - allow_privilege_escalation: bool, optional, default None
652
+ - run_as_user: int, optional, default None
653
+ - run_as_group: int, optional, default None
654
+ - run_as_non_root: bool, optional, default None
803
655
  """
804
656
  ...
805
657
 
806
658
  @typing.overload
807
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
659
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
660
+ """
661
+ Decorator prototype for all step decorators. This function gets specialized
662
+ and imported for all decorators types by _import_plugin_decorators().
663
+ """
808
664
  ...
809
665
 
810
666
  @typing.overload
811
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
667
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
812
668
  ...
813
669
 
814
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
670
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
815
671
  """
816
- Specifies secrets to be retrieved and injected as environment variables prior to
817
- the execution of a step.
672
+ Decorator prototype for all step decorators. This function gets specialized
673
+ and imported for all decorators types by _import_plugin_decorators().
674
+ """
675
+ ...
676
+
677
+ @typing.overload
678
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
679
+ """
680
+ Creates a human-readable report, a Metaflow Card, after this step completes.
681
+
682
+ Note that you may add multiple `@card` decorators in a step with different parameters.
818
683
 
819
684
 
820
685
  Parameters
821
686
  ----------
822
- sources : List[Union[str, Dict[str, Any]]], default: []
823
- List of secret specs, defining how the secrets are to be retrieved
824
- role : str, optional, default: None
825
- Role to use for fetching secrets
687
+ type : str, default 'default'
688
+ Card type.
689
+ id : str, optional, default None
690
+ If multiple cards are present, use this id to identify this card.
691
+ options : Dict[str, Any], default {}
692
+ Options passed to the card. The contents depend on the card type.
693
+ timeout : int, default 45
694
+ Interrupt reporting if it takes more than this many seconds.
826
695
  """
827
696
  ...
828
697
 
829
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
698
+ @typing.overload
699
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
700
+ ...
701
+
702
+ @typing.overload
703
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
704
+ ...
705
+
706
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
830
707
  """
831
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
708
+ Creates a human-readable report, a Metaflow Card, after this step completes.
832
709
 
833
- User code call
834
- --------------
835
- @ollama(
836
- models=[...],
837
- ...
838
- )
710
+ Note that you may add multiple `@card` decorators in a step with different parameters.
839
711
 
840
- Valid backend options
841
- ---------------------
842
- - 'local': Run as a separate process on the local task machine.
843
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
844
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
845
712
 
846
- Valid model options
847
- -------------------
848
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
713
+ Parameters
714
+ ----------
715
+ type : str, default 'default'
716
+ Card type.
717
+ id : str, optional, default None
718
+ If multiple cards are present, use this id to identify this card.
719
+ options : Dict[str, Any], default {}
720
+ Options passed to the card. The contents depend on the card type.
721
+ timeout : int, default 45
722
+ Interrupt reporting if it takes more than this many seconds.
723
+ """
724
+ ...
725
+
726
+ @typing.overload
727
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
728
+ """
729
+ Specifies the resources needed when executing this step.
730
+
731
+ Use `@resources` to specify the resource requirements
732
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
733
+
734
+ You can choose the compute layer on the command line by executing e.g.
735
+ ```
736
+ python myflow.py run --with batch
737
+ ```
738
+ or
739
+ ```
740
+ python myflow.py run --with kubernetes
741
+ ```
742
+ which executes the flow on the desired system using the
743
+ requirements specified in `@resources`.
849
744
 
850
745
 
851
746
  Parameters
852
747
  ----------
853
- models: list[str]
854
- List of Ollama containers running models in sidecars.
855
- backend: str
856
- Determines where and how to run the Ollama process.
857
- force_pull: bool
858
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
859
- cache_update_policy: str
860
- Cache update policy: "auto", "force", or "never".
861
- force_cache_update: bool
862
- Simple override for "force" cache update policy.
863
- debug: bool
864
- Whether to turn on verbose debugging logs.
865
- circuit_breaker_config: dict
866
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
867
- timeout_config: dict
868
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
748
+ cpu : int, default 1
749
+ Number of CPUs required for this step.
750
+ gpu : int, optional, default None
751
+ Number of GPUs required for this step.
752
+ disk : int, optional, default None
753
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
754
+ memory : int, default 4096
755
+ Memory size (in MB) required for this step.
756
+ shared_memory : int, optional, default None
757
+ The value for the size (in MiB) of the /dev/shm volume for this step.
758
+ This parameter maps to the `--shm-size` option in Docker.
869
759
  """
870
760
  ...
871
761
 
872
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
762
+ @typing.overload
763
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
764
+ ...
765
+
766
+ @typing.overload
767
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
768
+ ...
769
+
770
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
873
771
  """
874
- Specifies that this step should execute on DGX cloud.
772
+ Specifies the resources needed when executing this step.
773
+
774
+ Use `@resources` to specify the resource requirements
775
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
776
+
777
+ You can choose the compute layer on the command line by executing e.g.
778
+ ```
779
+ python myflow.py run --with batch
780
+ ```
781
+ or
782
+ ```
783
+ python myflow.py run --with kubernetes
784
+ ```
785
+ which executes the flow on the desired system using the
786
+ requirements specified in `@resources`.
875
787
 
876
788
 
877
789
  Parameters
878
790
  ----------
879
- gpu : int
880
- Number of GPUs to use.
881
- gpu_type : str
882
- Type of Nvidia GPU to use.
883
- queue_timeout : int
884
- Time to keep the job in NVCF's queue.
791
+ cpu : int, default 1
792
+ Number of CPUs required for this step.
793
+ gpu : int, optional, default None
794
+ Number of GPUs required for this step.
795
+ disk : int, optional, default None
796
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
797
+ memory : int, default 4096
798
+ Memory size (in MB) required for this step.
799
+ shared_memory : int, optional, default None
800
+ The value for the size (in MiB) of the /dev/shm volume for this step.
801
+ This parameter maps to the `--shm-size` option in Docker.
885
802
  """
886
803
  ...
887
804
 
888
805
  @typing.overload
889
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
806
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
890
807
  """
891
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
892
- It exists to make it easier for users to know that this decorator should only be used with
893
- a Neo Cloud like Nebius.
808
+ A simple decorator that demonstrates using CardDecoratorInjector
809
+ to inject a card and render simple markdown content.
894
810
  """
895
811
  ...
896
812
 
897
813
  @typing.overload
898
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
814
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
899
815
  ...
900
816
 
901
- def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
817
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
902
818
  """
903
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
904
- It exists to make it easier for users to know that this decorator should only be used with
905
- a Neo Cloud like Nebius.
819
+ A simple decorator that demonstrates using CardDecoratorInjector
820
+ to inject a card and render simple markdown content.
906
821
  """
907
822
  ...
908
823
 
@@ -927,6 +842,56 @@ def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFla
927
842
  """
928
843
  ...
929
844
 
845
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
846
+ """
847
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
848
+
849
+ User code call
850
+ --------------
851
+ @vllm(
852
+ model="...",
853
+ ...
854
+ )
855
+
856
+ Valid backend options
857
+ ---------------------
858
+ - 'local': Run as a separate process on the local task machine.
859
+
860
+ Valid model options
861
+ -------------------
862
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
863
+
864
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
865
+ If you need multiple models, you must create multiple @vllm decorators.
866
+
867
+
868
+ Parameters
869
+ ----------
870
+ model: str
871
+ HuggingFace model identifier to be served by vLLM.
872
+ backend: str
873
+ Determines where and how to run the vLLM process.
874
+ openai_api_server: bool
875
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
876
+ Default is False (uses native engine).
877
+ Set to True for backward compatibility with existing code.
878
+ debug: bool
879
+ Whether to turn on verbose debugging logs.
880
+ card_refresh_interval: int
881
+ Interval in seconds for refreshing the vLLM status card.
882
+ Only used when openai_api_server=True.
883
+ max_retries: int
884
+ Maximum number of retries checking for vLLM server startup.
885
+ Only used when openai_api_server=True.
886
+ retry_alert_frequency: int
887
+ Frequency of alert logs for vLLM server startup retries.
888
+ Only used when openai_api_server=True.
889
+ engine_args : dict
890
+ Additional keyword arguments to pass to the vLLM engine.
891
+ For example, `tensor_parallel_size=2`.
892
+ """
893
+ ...
894
+
930
895
  @typing.overload
931
896
  def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
932
897
  """
@@ -946,6 +911,81 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
946
911
  """
947
912
  ...
948
913
 
914
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
915
+ """
916
+ Specifies that this step should execute on DGX cloud.
917
+
918
+
919
+ Parameters
920
+ ----------
921
+ gpu : int
922
+ Number of GPUs to use.
923
+ gpu_type : str
924
+ Type of Nvidia GPU to use.
925
+ queue_timeout : int
926
+ Time to keep the job in NVCF's queue.
927
+ """
928
+ ...
929
+
930
+ @typing.overload
931
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
932
+ """
933
+ Specifies a timeout for your step.
934
+
935
+ This decorator is useful if this step may hang indefinitely.
936
+
937
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
938
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
939
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
940
+
941
+ Note that all the values specified in parameters are added together so if you specify
942
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
943
+
944
+
945
+ Parameters
946
+ ----------
947
+ seconds : int, default 0
948
+ Number of seconds to wait prior to timing out.
949
+ minutes : int, default 0
950
+ Number of minutes to wait prior to timing out.
951
+ hours : int, default 0
952
+ Number of hours to wait prior to timing out.
953
+ """
954
+ ...
955
+
956
+ @typing.overload
957
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
958
+ ...
959
+
960
+ @typing.overload
961
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
962
+ ...
963
+
964
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
965
+ """
966
+ Specifies a timeout for your step.
967
+
968
+ This decorator is useful if this step may hang indefinitely.
969
+
970
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
971
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
972
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
973
+
974
+ Note that all the values specified in parameters are added together so if you specify
975
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
976
+
977
+
978
+ Parameters
979
+ ----------
980
+ seconds : int, default 0
981
+ Number of seconds to wait prior to timing out.
982
+ minutes : int, default 0
983
+ Number of minutes to wait prior to timing out.
984
+ hours : int, default 0
985
+ Number of hours to wait prior to timing out.
986
+ """
987
+ ...
988
+
949
989
  @typing.overload
950
990
  def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
951
991
  """
@@ -1070,458 +1110,432 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1070
1110
  the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1071
1111
  If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1072
1112
 
1073
- temp_dir_root : str, default: None
1074
- The root directory under which `current.model.loaded` will store loaded models
1075
- """
1076
- ...
1077
-
1078
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1079
- """
1080
- Specifies that this step should execute on DGX cloud.
1081
-
1082
-
1083
- Parameters
1084
- ----------
1085
- gpu : int
1086
- Number of GPUs to use.
1087
- gpu_type : str
1088
- Type of Nvidia GPU to use.
1089
- """
1090
- ...
1091
-
1092
- @typing.overload
1093
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1094
- """
1095
- Specifies the Conda environment for the step.
1096
-
1097
- Information in this decorator will augment any
1098
- attributes set in the `@conda_base` flow-level decorator. Hence,
1099
- you can use `@conda_base` to set packages required by all
1100
- steps and use `@conda` to specify step-specific overrides.
1101
-
1102
-
1103
- Parameters
1104
- ----------
1105
- packages : Dict[str, str], default {}
1106
- Packages to use for this step. The key is the name of the package
1107
- and the value is the version to use.
1108
- libraries : Dict[str, str], default {}
1109
- Supported for backward compatibility. When used with packages, packages will take precedence.
1110
- python : str, optional, default None
1111
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1112
- that the version used will correspond to the version of the Python interpreter used to start the run.
1113
- disabled : bool, default False
1114
- If set to True, disables @conda.
1113
+ temp_dir_root : str, default: None
1114
+ The root directory under which `current.model.loaded` will store loaded models
1115
1115
  """
1116
1116
  ...
1117
1117
 
1118
1118
  @typing.overload
1119
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1120
- ...
1121
-
1122
- @typing.overload
1123
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1124
- ...
1125
-
1126
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1119
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1127
1120
  """
1128
- Specifies the Conda environment for the step.
1121
+ Specifies that the step will success under all circumstances.
1129
1122
 
1130
- Information in this decorator will augment any
1131
- attributes set in the `@conda_base` flow-level decorator. Hence,
1132
- you can use `@conda_base` to set packages required by all
1133
- steps and use `@conda` to specify step-specific overrides.
1123
+ The decorator will create an optional artifact, specified by `var`, which
1124
+ contains the exception raised. You can use it to detect the presence
1125
+ of errors, indicating that all happy-path artifacts produced by the step
1126
+ are missing.
1134
1127
 
1135
1128
 
1136
1129
  Parameters
1137
1130
  ----------
1138
- packages : Dict[str, str], default {}
1139
- Packages to use for this step. The key is the name of the package
1140
- and the value is the version to use.
1141
- libraries : Dict[str, str], default {}
1142
- Supported for backward compatibility. When used with packages, packages will take precedence.
1143
- python : str, optional, default None
1144
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1145
- that the version used will correspond to the version of the Python interpreter used to start the run.
1146
- disabled : bool, default False
1147
- If set to True, disables @conda.
1131
+ var : str, optional, default None
1132
+ Name of the artifact in which to store the caught exception.
1133
+ If not specified, the exception is not stored.
1134
+ print_exception : bool, default True
1135
+ Determines whether or not the exception is printed to
1136
+ stdout when caught.
1148
1137
  """
1149
1138
  ...
1150
1139
 
1151
1140
  @typing.overload
1152
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1153
- """
1154
- Internal decorator to support Fast bakery
1155
- """
1141
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1156
1142
  ...
1157
1143
 
1158
1144
  @typing.overload
1159
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1145
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1160
1146
  ...
1161
1147
 
1162
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1148
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1163
1149
  """
1164
- Internal decorator to support Fast bakery
1150
+ Specifies that the step will success under all circumstances.
1151
+
1152
+ The decorator will create an optional artifact, specified by `var`, which
1153
+ contains the exception raised. You can use it to detect the presence
1154
+ of errors, indicating that all happy-path artifacts produced by the step
1155
+ are missing.
1156
+
1157
+
1158
+ Parameters
1159
+ ----------
1160
+ var : str, optional, default None
1161
+ Name of the artifact in which to store the caught exception.
1162
+ If not specified, the exception is not stored.
1163
+ print_exception : bool, default True
1164
+ Determines whether or not the exception is printed to
1165
+ stdout when caught.
1165
1166
  """
1166
1167
  ...
1167
1168
 
1168
1169
  @typing.overload
1169
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1170
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1170
1171
  """
1171
- Specifies the number of times the task corresponding
1172
- to a step needs to be retried.
1172
+ Enables checkpointing for a step.
1173
1173
 
1174
- This decorator is useful for handling transient errors, such as networking issues.
1175
- If your task contains operations that can't be retried safely, e.g. database updates,
1176
- it is advisable to annotate it with `@retry(times=0)`.
1174
+ > Examples
1177
1175
 
1178
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1179
- decorator will execute a no-op task after all retries have been exhausted,
1180
- ensuring that the flow execution can continue.
1176
+ - Saving Checkpoints
1177
+
1178
+ ```python
1179
+ @checkpoint
1180
+ @step
1181
+ def train(self):
1182
+ model = create_model(self.parameters, checkpoint_path = None)
1183
+ for i in range(self.epochs):
1184
+ # some training logic
1185
+ loss = model.train(self.dataset)
1186
+ if i % 10 == 0:
1187
+ model.save(
1188
+ current.checkpoint.directory,
1189
+ )
1190
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
1191
+ # and returns a reference dictionary to the checkpoint saved in the datastore
1192
+ self.latest_checkpoint = current.checkpoint.save(
1193
+ name="epoch_checkpoint",
1194
+ metadata={
1195
+ "epoch": i,
1196
+ "loss": loss,
1197
+ }
1198
+ )
1199
+ ```
1200
+
1201
+ - Using Loaded Checkpoints
1202
+
1203
+ ```python
1204
+ @retry(times=3)
1205
+ @checkpoint
1206
+ @step
1207
+ def train(self):
1208
+ # Assume that the task has restarted and the previous attempt of the task
1209
+ # saved a checkpoint
1210
+ checkpoint_path = None
1211
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
1212
+ print("Loaded checkpoint from the previous attempt")
1213
+ checkpoint_path = current.checkpoint.directory
1214
+
1215
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
1216
+ for i in range(self.epochs):
1217
+ ...
1218
+ ```
1181
1219
 
1182
1220
 
1183
1221
  Parameters
1184
1222
  ----------
1185
- times : int, default 3
1186
- Number of times to retry this task.
1187
- minutes_between_retries : int, default 2
1188
- Number of minutes between retries.
1223
+ load_policy : str, default: "fresh"
1224
+ The policy for loading the checkpoint. The following policies are supported:
1225
+ - "eager": Loads the the latest available checkpoint within the namespace.
1226
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1227
+ will be loaded at the start of the task.
1228
+ - "none": Do not load any checkpoint
1229
+ - "fresh": Loads the lastest checkpoint created within the running Task.
1230
+ This mode helps loading checkpoints across various retry attempts of the same task.
1231
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1232
+ created within the task will be loaded when the task is retries execution on failure.
1233
+
1234
+ temp_dir_root : str, default: None
1235
+ The root directory under which `current.checkpoint.directory` will be created.
1189
1236
  """
1190
1237
  ...
1191
1238
 
1192
1239
  @typing.overload
1193
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1240
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1194
1241
  ...
1195
1242
 
1196
1243
  @typing.overload
1197
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1244
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1198
1245
  ...
1199
1246
 
1200
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1247
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
1201
1248
  """
1202
- Specifies the number of times the task corresponding
1203
- to a step needs to be retried.
1249
+ Enables checkpointing for a step.
1204
1250
 
1205
- This decorator is useful for handling transient errors, such as networking issues.
1206
- If your task contains operations that can't be retried safely, e.g. database updates,
1207
- it is advisable to annotate it with `@retry(times=0)`.
1251
+ > Examples
1208
1252
 
1209
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1210
- decorator will execute a no-op task after all retries have been exhausted,
1211
- ensuring that the flow execution can continue.
1253
+ - Saving Checkpoints
1254
+
1255
+ ```python
1256
+ @checkpoint
1257
+ @step
1258
+ def train(self):
1259
+ model = create_model(self.parameters, checkpoint_path = None)
1260
+ for i in range(self.epochs):
1261
+ # some training logic
1262
+ loss = model.train(self.dataset)
1263
+ if i % 10 == 0:
1264
+ model.save(
1265
+ current.checkpoint.directory,
1266
+ )
1267
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
1268
+ # and returns a reference dictionary to the checkpoint saved in the datastore
1269
+ self.latest_checkpoint = current.checkpoint.save(
1270
+ name="epoch_checkpoint",
1271
+ metadata={
1272
+ "epoch": i,
1273
+ "loss": loss,
1274
+ }
1275
+ )
1276
+ ```
1277
+
1278
+ - Using Loaded Checkpoints
1279
+
1280
+ ```python
1281
+ @retry(times=3)
1282
+ @checkpoint
1283
+ @step
1284
+ def train(self):
1285
+ # Assume that the task has restarted and the previous attempt of the task
1286
+ # saved a checkpoint
1287
+ checkpoint_path = None
1288
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
1289
+ print("Loaded checkpoint from the previous attempt")
1290
+ checkpoint_path = current.checkpoint.directory
1291
+
1292
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
1293
+ for i in range(self.epochs):
1294
+ ...
1295
+ ```
1212
1296
 
1213
1297
 
1214
1298
  Parameters
1215
1299
  ----------
1216
- times : int, default 3
1217
- Number of times to retry this task.
1218
- minutes_between_retries : int, default 2
1219
- Number of minutes between retries.
1300
+ load_policy : str, default: "fresh"
1301
+ The policy for loading the checkpoint. The following policies are supported:
1302
+ - "eager": Loads the the latest available checkpoint within the namespace.
1303
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1304
+ will be loaded at the start of the task.
1305
+ - "none": Do not load any checkpoint
1306
+ - "fresh": Loads the lastest checkpoint created within the running Task.
1307
+ This mode helps loading checkpoints across various retry attempts of the same task.
1308
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1309
+ created within the task will be loaded when the task is retries execution on failure.
1310
+
1311
+ temp_dir_root : str, default: None
1312
+ The root directory under which `current.checkpoint.directory` will be created.
1220
1313
  """
1221
1314
  ...
1222
1315
 
1223
1316
  @typing.overload
1224
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1317
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1225
1318
  """
1226
- Creates a human-readable report, a Metaflow Card, after this step completes.
1227
-
1228
- Note that you may add multiple `@card` decorators in a step with different parameters.
1229
-
1230
-
1231
- Parameters
1232
- ----------
1233
- type : str, default 'default'
1234
- Card type.
1235
- id : str, optional, default None
1236
- If multiple cards are present, use this id to identify this card.
1237
- options : Dict[str, Any], default {}
1238
- Options passed to the card. The contents depend on the card type.
1239
- timeout : int, default 45
1240
- Interrupt reporting if it takes more than this many seconds.
1319
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1320
+ It exists to make it easier for users to know that this decorator should only be used with
1321
+ a Neo Cloud like Nebius.
1241
1322
  """
1242
1323
  ...
1243
1324
 
1244
1325
  @typing.overload
1245
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1326
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1246
1327
  ...
1247
1328
 
1248
- @typing.overload
1249
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1329
+ def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1330
+ """
1331
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1332
+ It exists to make it easier for users to know that this decorator should only be used with
1333
+ a Neo Cloud like Nebius.
1334
+ """
1250
1335
  ...
1251
1336
 
1252
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1337
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1253
1338
  """
1254
- Creates a human-readable report, a Metaflow Card, after this step completes.
1255
-
1256
- Note that you may add multiple `@card` decorators in a step with different parameters.
1339
+ Specifies that this step should execute on DGX cloud.
1257
1340
 
1258
1341
 
1259
1342
  Parameters
1260
1343
  ----------
1261
- type : str, default 'default'
1262
- Card type.
1263
- id : str, optional, default None
1264
- If multiple cards are present, use this id to identify this card.
1265
- options : Dict[str, Any], default {}
1266
- Options passed to the card. The contents depend on the card type.
1267
- timeout : int, default 45
1268
- Interrupt reporting if it takes more than this many seconds.
1344
+ gpu : int
1345
+ Number of GPUs to use.
1346
+ gpu_type : str
1347
+ Type of Nvidia GPU to use.
1269
1348
  """
1270
1349
  ...
1271
1350
 
1272
1351
  @typing.overload
1273
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1352
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1274
1353
  """
1275
- Specifies the resources needed when executing this step.
1276
-
1277
- Use `@resources` to specify the resource requirements
1278
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1279
-
1280
- You can choose the compute layer on the command line by executing e.g.
1281
- ```
1282
- python myflow.py run --with batch
1283
- ```
1284
- or
1285
- ```
1286
- python myflow.py run --with kubernetes
1287
- ```
1288
- which executes the flow on the desired system using the
1289
- requirements specified in `@resources`.
1354
+ Specifies the PyPI packages for all steps of the flow.
1290
1355
 
1356
+ Use `@pypi_base` to set common packages required by all
1357
+ steps and use `@pypi` to specify step-specific overrides.
1291
1358
 
1292
1359
  Parameters
1293
1360
  ----------
1294
- cpu : int, default 1
1295
- Number of CPUs required for this step.
1296
- gpu : int, optional, default None
1297
- Number of GPUs required for this step.
1298
- disk : int, optional, default None
1299
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1300
- memory : int, default 4096
1301
- Memory size (in MB) required for this step.
1302
- shared_memory : int, optional, default None
1303
- The value for the size (in MiB) of the /dev/shm volume for this step.
1304
- This parameter maps to the `--shm-size` option in Docker.
1361
+ packages : Dict[str, str], default: {}
1362
+ Packages to use for this flow. The key is the name of the package
1363
+ and the value is the version to use.
1364
+ python : str, optional, default: None
1365
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1366
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1305
1367
  """
1306
1368
  ...
1307
1369
 
1308
1370
  @typing.overload
1309
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1310
- ...
1311
-
1312
- @typing.overload
1313
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1371
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1314
1372
  ...
1315
1373
 
1316
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1374
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1317
1375
  """
1318
- Specifies the resources needed when executing this step.
1319
-
1320
- Use `@resources` to specify the resource requirements
1321
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1322
-
1323
- You can choose the compute layer on the command line by executing e.g.
1324
- ```
1325
- python myflow.py run --with batch
1326
- ```
1327
- or
1328
- ```
1329
- python myflow.py run --with kubernetes
1330
- ```
1331
- which executes the flow on the desired system using the
1332
- requirements specified in `@resources`.
1376
+ Specifies the PyPI packages for all steps of the flow.
1333
1377
 
1378
+ Use `@pypi_base` to set common packages required by all
1379
+ steps and use `@pypi` to specify step-specific overrides.
1334
1380
 
1335
1381
  Parameters
1336
1382
  ----------
1337
- cpu : int, default 1
1338
- Number of CPUs required for this step.
1339
- gpu : int, optional, default None
1340
- Number of GPUs required for this step.
1341
- disk : int, optional, default None
1342
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1343
- memory : int, default 4096
1344
- Memory size (in MB) required for this step.
1345
- shared_memory : int, optional, default None
1346
- The value for the size (in MiB) of the /dev/shm volume for this step.
1347
- This parameter maps to the `--shm-size` option in Docker.
1383
+ packages : Dict[str, str], default: {}
1384
+ Packages to use for this flow. The key is the name of the package
1385
+ and the value is the version to use.
1386
+ python : str, optional, default: None
1387
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1388
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1348
1389
  """
1349
1390
  ...
1350
1391
 
1351
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1392
+ @typing.overload
1393
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1352
1394
  """
1353
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1354
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1355
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1356
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1357
- starts only after all sensors finish.
1395
+ Specifies the times when the flow should be run when running on a
1396
+ production scheduler.
1358
1397
 
1359
1398
 
1360
1399
  Parameters
1361
1400
  ----------
1362
- timeout : int
1363
- Time, in seconds before the task times out and fails. (Default: 3600)
1364
- poke_interval : int
1365
- Time in seconds that the job should wait in between each try. (Default: 60)
1366
- mode : str
1367
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1368
- exponential_backoff : bool
1369
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1370
- pool : str
1371
- the slot pool this task should run in,
1372
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1373
- soft_fail : bool
1374
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1375
- name : str
1376
- Name of the sensor on Airflow
1377
- description : str
1378
- Description of sensor in the Airflow UI
1379
- bucket_key : Union[str, List[str]]
1380
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1381
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1382
- bucket_name : str
1383
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1384
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1385
- wildcard_match : bool
1386
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1387
- aws_conn_id : str
1388
- a reference to the s3 connection on Airflow. (Default: None)
1389
- verify : bool
1390
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1401
+ hourly : bool, default False
1402
+ Run the workflow hourly.
1403
+ daily : bool, default True
1404
+ Run the workflow daily.
1405
+ weekly : bool, default False
1406
+ Run the workflow weekly.
1407
+ cron : str, optional, default None
1408
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1409
+ specified by this expression.
1410
+ timezone : str, optional, default None
1411
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1412
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1391
1413
  """
1392
1414
  ...
1393
1415
 
1394
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1416
+ @typing.overload
1417
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1418
+ ...
1419
+
1420
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1395
1421
  """
1396
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1397
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1422
+ Specifies the times when the flow should be run when running on a
1423
+ production scheduler.
1398
1424
 
1399
1425
 
1400
1426
  Parameters
1401
1427
  ----------
1402
- timeout : int
1403
- Time, in seconds before the task times out and fails. (Default: 3600)
1404
- poke_interval : int
1405
- Time in seconds that the job should wait in between each try. (Default: 60)
1406
- mode : str
1407
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1408
- exponential_backoff : bool
1409
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1410
- pool : str
1411
- the slot pool this task should run in,
1412
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1413
- soft_fail : bool
1414
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1415
- name : str
1416
- Name of the sensor on Airflow
1417
- description : str
1418
- Description of sensor in the Airflow UI
1419
- external_dag_id : str
1420
- The dag_id that contains the task you want to wait for.
1421
- external_task_ids : List[str]
1422
- The list of task_ids that you want to wait for.
1423
- If None (default value) the sensor waits for the DAG. (Default: None)
1424
- allowed_states : List[str]
1425
- Iterable of allowed states, (Default: ['success'])
1426
- failed_states : List[str]
1427
- Iterable of failed or dis-allowed states. (Default: None)
1428
- execution_delta : datetime.timedelta
1429
- time difference with the previous execution to look at,
1430
- the default is the same logical date as the current task or DAG. (Default: None)
1431
- check_existence: bool
1432
- Set to True to check if the external task exists or check if
1433
- the DAG to wait for exists. (Default: True)
1428
+ hourly : bool, default False
1429
+ Run the workflow hourly.
1430
+ daily : bool, default True
1431
+ Run the workflow daily.
1432
+ weekly : bool, default False
1433
+ Run the workflow weekly.
1434
+ cron : str, optional, default None
1435
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1436
+ specified by this expression.
1437
+ timezone : str, optional, default None
1438
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1439
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1434
1440
  """
1435
1441
  ...
1436
1442
 
1437
1443
  @typing.overload
1438
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1444
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1439
1445
  """
1440
- Specifies the event(s) that this flow depends on.
1446
+ Specifies the flow(s) that this flow depends on.
1441
1447
 
1442
1448
  ```
1443
- @trigger(event='foo')
1449
+ @trigger_on_finish(flow='FooFlow')
1444
1450
  ```
1445
1451
  or
1446
1452
  ```
1447
- @trigger(events=['foo', 'bar'])
1453
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1448
1454
  ```
1455
+ This decorator respects the @project decorator and triggers the flow
1456
+ when upstream runs within the same namespace complete successfully
1449
1457
 
1450
- Additionally, you can specify the parameter mappings
1451
- to map event payload to Metaflow parameters for the flow.
1458
+ Additionally, you can specify project aware upstream flow dependencies
1459
+ by specifying the fully qualified project_flow_name.
1452
1460
  ```
1453
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1461
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1454
1462
  ```
1455
1463
  or
1456
1464
  ```
1457
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1458
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1465
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1459
1466
  ```
1460
1467
 
1461
- 'parameters' can also be a list of strings and tuples like so:
1462
- ```
1463
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1464
- ```
1465
- This is equivalent to:
1468
+ You can also specify just the project or project branch (other values will be
1469
+ inferred from the current project or project branch):
1466
1470
  ```
1467
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1471
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1468
1472
  ```
1469
1473
 
1474
+ Note that `branch` is typically one of:
1475
+ - `prod`
1476
+ - `user.bob`
1477
+ - `test.my_experiment`
1478
+ - `prod.staging`
1479
+
1470
1480
 
1471
1481
  Parameters
1472
1482
  ----------
1473
- event : Union[str, Dict[str, Any]], optional, default None
1474
- Event dependency for this flow.
1475
- events : List[Union[str, Dict[str, Any]]], default []
1476
- Events dependency for this flow.
1483
+ flow : Union[str, Dict[str, str]], optional, default None
1484
+ Upstream flow dependency for this flow.
1485
+ flows : List[Union[str, Dict[str, str]]], default []
1486
+ Upstream flow dependencies for this flow.
1477
1487
  options : Dict[str, Any], default {}
1478
1488
  Backend-specific configuration for tuning eventing behavior.
1479
1489
  """
1480
1490
  ...
1481
1491
 
1482
1492
  @typing.overload
1483
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1493
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1484
1494
  ...
1485
1495
 
1486
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1496
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1487
1497
  """
1488
- Specifies the event(s) that this flow depends on.
1498
+ Specifies the flow(s) that this flow depends on.
1489
1499
 
1490
1500
  ```
1491
- @trigger(event='foo')
1501
+ @trigger_on_finish(flow='FooFlow')
1492
1502
  ```
1493
1503
  or
1494
1504
  ```
1495
- @trigger(events=['foo', 'bar'])
1505
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1496
1506
  ```
1507
+ This decorator respects the @project decorator and triggers the flow
1508
+ when upstream runs within the same namespace complete successfully
1497
1509
 
1498
- Additionally, you can specify the parameter mappings
1499
- to map event payload to Metaflow parameters for the flow.
1500
- ```
1501
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1502
- ```
1503
- or
1504
- ```
1505
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1506
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1510
+ Additionally, you can specify project aware upstream flow dependencies
1511
+ by specifying the fully qualified project_flow_name.
1507
1512
  ```
1508
-
1509
- 'parameters' can also be a list of strings and tuples like so:
1513
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1510
1514
  ```
1511
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1515
+ or
1512
1516
  ```
1513
- This is equivalent to:
1517
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1514
1518
  ```
1515
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1519
+
1520
+ You can also specify just the project or project branch (other values will be
1521
+ inferred from the current project or project branch):
1516
1522
  ```
1523
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1524
+ ```
1525
+
1526
+ Note that `branch` is typically one of:
1527
+ - `prod`
1528
+ - `user.bob`
1529
+ - `test.my_experiment`
1530
+ - `prod.staging`
1517
1531
 
1518
1532
 
1519
1533
  Parameters
1520
1534
  ----------
1521
- event : Union[str, Dict[str, Any]], optional, default None
1522
- Event dependency for this flow.
1523
- events : List[Union[str, Dict[str, Any]]], default []
1524
- Events dependency for this flow.
1535
+ flow : Union[str, Dict[str, str]], optional, default None
1536
+ Upstream flow dependency for this flow.
1537
+ flows : List[Union[str, Dict[str, str]]], default []
1538
+ Upstream flow dependencies for this flow.
1525
1539
  options : Dict[str, Any], default {}
1526
1540
  Backend-specific configuration for tuning eventing behavior.
1527
1541
  """
@@ -1613,44 +1627,46 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1613
1627
  """
1614
1628
  ...
1615
1629
 
1616
- @typing.overload
1617
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1618
- """
1619
- Specifies the PyPI packages for all steps of the flow.
1620
-
1621
- Use `@pypi_base` to set common packages required by all
1622
- steps and use `@pypi` to specify step-specific overrides.
1623
-
1624
- Parameters
1625
- ----------
1626
- packages : Dict[str, str], default: {}
1627
- Packages to use for this flow. The key is the name of the package
1628
- and the value is the version to use.
1629
- python : str, optional, default: None
1630
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1631
- that the version used will correspond to the version of the Python interpreter used to start the run.
1632
- """
1633
- ...
1634
-
1635
- @typing.overload
1636
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1637
- ...
1638
-
1639
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1630
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1640
1631
  """
1641
- Specifies the PyPI packages for all steps of the flow.
1632
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1633
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1634
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1635
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1636
+ starts only after all sensors finish.
1642
1637
 
1643
- Use `@pypi_base` to set common packages required by all
1644
- steps and use `@pypi` to specify step-specific overrides.
1645
1638
 
1646
1639
  Parameters
1647
1640
  ----------
1648
- packages : Dict[str, str], default: {}
1649
- Packages to use for this flow. The key is the name of the package
1650
- and the value is the version to use.
1651
- python : str, optional, default: None
1652
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1653
- that the version used will correspond to the version of the Python interpreter used to start the run.
1641
+ timeout : int
1642
+ Time, in seconds before the task times out and fails. (Default: 3600)
1643
+ poke_interval : int
1644
+ Time in seconds that the job should wait in between each try. (Default: 60)
1645
+ mode : str
1646
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1647
+ exponential_backoff : bool
1648
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1649
+ pool : str
1650
+ the slot pool this task should run in,
1651
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1652
+ soft_fail : bool
1653
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1654
+ name : str
1655
+ Name of the sensor on Airflow
1656
+ description : str
1657
+ Description of sensor in the Airflow UI
1658
+ bucket_key : Union[str, List[str]]
1659
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1660
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1661
+ bucket_name : str
1662
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1663
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1664
+ wildcard_match : bool
1665
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1666
+ aws_conn_id : str
1667
+ a reference to the s3 connection on Airflow. (Default: None)
1668
+ verify : bool
1669
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1654
1670
  """
1655
1671
  ...
1656
1672
 
@@ -1769,154 +1785,138 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1769
1785
  ...
1770
1786
 
1771
1787
  @typing.overload
1772
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1788
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1773
1789
  """
1774
- Specifies the flow(s) that this flow depends on.
1790
+ Specifies the event(s) that this flow depends on.
1775
1791
 
1776
1792
  ```
1777
- @trigger_on_finish(flow='FooFlow')
1793
+ @trigger(event='foo')
1778
1794
  ```
1779
1795
  or
1780
1796
  ```
1781
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1797
+ @trigger(events=['foo', 'bar'])
1782
1798
  ```
1783
- This decorator respects the @project decorator and triggers the flow
1784
- when upstream runs within the same namespace complete successfully
1785
1799
 
1786
- Additionally, you can specify project aware upstream flow dependencies
1787
- by specifying the fully qualified project_flow_name.
1800
+ Additionally, you can specify the parameter mappings
1801
+ to map event payload to Metaflow parameters for the flow.
1788
1802
  ```
1789
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1803
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1790
1804
  ```
1791
1805
  or
1792
1806
  ```
1793
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1807
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1808
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1794
1809
  ```
1795
1810
 
1796
- You can also specify just the project or project branch (other values will be
1797
- inferred from the current project or project branch):
1811
+ 'parameters' can also be a list of strings and tuples like so:
1798
1812
  ```
1799
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1813
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1814
+ ```
1815
+ This is equivalent to:
1816
+ ```
1817
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1800
1818
  ```
1801
-
1802
- Note that `branch` is typically one of:
1803
- - `prod`
1804
- - `user.bob`
1805
- - `test.my_experiment`
1806
- - `prod.staging`
1807
1819
 
1808
1820
 
1809
1821
  Parameters
1810
1822
  ----------
1811
- flow : Union[str, Dict[str, str]], optional, default None
1812
- Upstream flow dependency for this flow.
1813
- flows : List[Union[str, Dict[str, str]]], default []
1814
- Upstream flow dependencies for this flow.
1823
+ event : Union[str, Dict[str, Any]], optional, default None
1824
+ Event dependency for this flow.
1825
+ events : List[Union[str, Dict[str, Any]]], default []
1826
+ Events dependency for this flow.
1815
1827
  options : Dict[str, Any], default {}
1816
1828
  Backend-specific configuration for tuning eventing behavior.
1817
1829
  """
1818
1830
  ...
1819
1831
 
1820
1832
  @typing.overload
1821
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1833
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1822
1834
  ...
1823
1835
 
1824
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1836
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1825
1837
  """
1826
- Specifies the flow(s) that this flow depends on.
1838
+ Specifies the event(s) that this flow depends on.
1827
1839
 
1828
1840
  ```
1829
- @trigger_on_finish(flow='FooFlow')
1841
+ @trigger(event='foo')
1830
1842
  ```
1831
1843
  or
1832
1844
  ```
1833
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1845
+ @trigger(events=['foo', 'bar'])
1834
1846
  ```
1835
- This decorator respects the @project decorator and triggers the flow
1836
- when upstream runs within the same namespace complete successfully
1837
1847
 
1838
- Additionally, you can specify project aware upstream flow dependencies
1839
- by specifying the fully qualified project_flow_name.
1848
+ Additionally, you can specify the parameter mappings
1849
+ to map event payload to Metaflow parameters for the flow.
1840
1850
  ```
1841
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1851
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1842
1852
  ```
1843
1853
  or
1844
1854
  ```
1845
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1855
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1856
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1846
1857
  ```
1847
1858
 
1848
- You can also specify just the project or project branch (other values will be
1849
- inferred from the current project or project branch):
1859
+ 'parameters' can also be a list of strings and tuples like so:
1850
1860
  ```
1851
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1861
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1862
+ ```
1863
+ This is equivalent to:
1864
+ ```
1865
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1852
1866
  ```
1853
-
1854
- Note that `branch` is typically one of:
1855
- - `prod`
1856
- - `user.bob`
1857
- - `test.my_experiment`
1858
- - `prod.staging`
1859
1867
 
1860
1868
 
1861
1869
  Parameters
1862
1870
  ----------
1863
- flow : Union[str, Dict[str, str]], optional, default None
1864
- Upstream flow dependency for this flow.
1865
- flows : List[Union[str, Dict[str, str]]], default []
1866
- Upstream flow dependencies for this flow.
1871
+ event : Union[str, Dict[str, Any]], optional, default None
1872
+ Event dependency for this flow.
1873
+ events : List[Union[str, Dict[str, Any]]], default []
1874
+ Events dependency for this flow.
1867
1875
  options : Dict[str, Any], default {}
1868
1876
  Backend-specific configuration for tuning eventing behavior.
1869
1877
  """
1870
1878
  ...
1871
1879
 
1872
- @typing.overload
1873
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1874
- """
1875
- Specifies the times when the flow should be run when running on a
1876
- production scheduler.
1877
-
1878
-
1879
- Parameters
1880
- ----------
1881
- hourly : bool, default False
1882
- Run the workflow hourly.
1883
- daily : bool, default True
1884
- Run the workflow daily.
1885
- weekly : bool, default False
1886
- Run the workflow weekly.
1887
- cron : str, optional, default None
1888
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1889
- specified by this expression.
1890
- timezone : str, optional, default None
1891
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1892
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1893
- """
1894
- ...
1895
-
1896
- @typing.overload
1897
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1898
- ...
1899
-
1900
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1880
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1901
1881
  """
1902
- Specifies the times when the flow should be run when running on a
1903
- production scheduler.
1882
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1883
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1904
1884
 
1905
1885
 
1906
1886
  Parameters
1907
1887
  ----------
1908
- hourly : bool, default False
1909
- Run the workflow hourly.
1910
- daily : bool, default True
1911
- Run the workflow daily.
1912
- weekly : bool, default False
1913
- Run the workflow weekly.
1914
- cron : str, optional, default None
1915
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1916
- specified by this expression.
1917
- timezone : str, optional, default None
1918
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1919
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1888
+ timeout : int
1889
+ Time, in seconds before the task times out and fails. (Default: 3600)
1890
+ poke_interval : int
1891
+ Time in seconds that the job should wait in between each try. (Default: 60)
1892
+ mode : str
1893
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1894
+ exponential_backoff : bool
1895
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1896
+ pool : str
1897
+ the slot pool this task should run in,
1898
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1899
+ soft_fail : bool
1900
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1901
+ name : str
1902
+ Name of the sensor on Airflow
1903
+ description : str
1904
+ Description of sensor in the Airflow UI
1905
+ external_dag_id : str
1906
+ The dag_id that contains the task you want to wait for.
1907
+ external_task_ids : List[str]
1908
+ The list of task_ids that you want to wait for.
1909
+ If None (default value) the sensor waits for the DAG. (Default: None)
1910
+ allowed_states : List[str]
1911
+ Iterable of allowed states, (Default: ['success'])
1912
+ failed_states : List[str]
1913
+ Iterable of failed or dis-allowed states. (Default: None)
1914
+ execution_delta : datetime.timedelta
1915
+ time difference with the previous execution to look at,
1916
+ the default is the same logical date as the current task or DAG. (Default: None)
1917
+ check_existence: bool
1918
+ Set to True to check if the external task exists or check if
1919
+ the DAG to wait for exists. (Default: True)
1920
1920
  """
1921
1921
  ...
1922
1922