ob-metaflow-stubs 6.0.10.15__py2.py3-none-any.whl → 6.0.10.16__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-stubs might be problematic. Click here for more details.

Files changed (266) hide show
  1. metaflow-stubs/__init__.pyi +955 -955
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +54 -54
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +3 -3
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +4 -4
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +3 -3
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/cards/__init__.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/cards/hf_hub_card.pyi +4 -4
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +3 -3
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  64. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  65. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +3 -3
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +3 -3
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +4 -4
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +2 -2
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +3 -3
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +4 -4
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +5 -5
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +4 -4
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +3 -3
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +3 -3
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +4 -4
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +4 -4
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +3 -3
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  110. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  111. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  112. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  113. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  114. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  116. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  117. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
  118. metaflow-stubs/multicore_utils.pyi +2 -2
  119. metaflow-stubs/ob_internal.pyi +2 -2
  120. metaflow-stubs/packaging_sys/__init__.pyi +7 -7
  121. metaflow-stubs/packaging_sys/backend.pyi +3 -3
  122. metaflow-stubs/packaging_sys/distribution_support.pyi +5 -5
  123. metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
  124. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  125. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  126. metaflow-stubs/parameters.pyi +4 -4
  127. metaflow-stubs/plugins/__init__.pyi +15 -15
  128. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  129. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  130. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  131. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  132. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  133. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  134. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  135. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  136. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  137. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  138. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  139. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  140. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  141. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  142. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  143. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  144. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  145. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  146. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  147. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  148. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  149. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  150. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  151. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  152. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  153. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  154. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  155. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  157. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  158. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  159. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  160. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  161. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  162. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  163. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  164. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  165. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  166. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  167. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  168. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  169. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  170. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  171. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  172. metaflow-stubs/plugins/cards/card_modules/basic.pyi +5 -3
  173. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  174. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  175. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +3 -3
  176. metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +3 -3
  177. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  178. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  179. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  180. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  181. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  182. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  183. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  184. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  185. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  186. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  187. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  188. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  189. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  190. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  191. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  192. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  193. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  194. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  195. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  196. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  197. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  198. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  199. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  200. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  201. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  202. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  203. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  204. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  205. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  206. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  207. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  208. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  209. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  210. metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
  211. metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
  212. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  213. metaflow-stubs/plugins/parsers.pyi +2 -2
  214. metaflow-stubs/plugins/perimeters.pyi +2 -2
  215. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  216. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  217. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  218. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  219. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  220. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  221. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  222. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  223. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  224. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  225. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  226. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  227. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  228. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  229. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  230. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  231. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  232. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  233. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  234. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  235. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  236. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  237. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  238. metaflow-stubs/profilers/__init__.pyi +2 -2
  239. metaflow-stubs/pylint_wrapper.pyi +2 -2
  240. metaflow-stubs/runner/__init__.pyi +2 -2
  241. metaflow-stubs/runner/deployer.pyi +34 -34
  242. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  243. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  244. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  245. metaflow-stubs/runner/nbrun.pyi +2 -2
  246. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  247. metaflow-stubs/runner/utils.pyi +2 -2
  248. metaflow-stubs/system/__init__.pyi +2 -2
  249. metaflow-stubs/system/system_logger.pyi +3 -3
  250. metaflow-stubs/system/system_monitor.pyi +2 -2
  251. metaflow-stubs/tagging_util.pyi +2 -2
  252. metaflow-stubs/tuple_util.pyi +2 -2
  253. metaflow-stubs/user_configs/__init__.pyi +2 -2
  254. metaflow-stubs/user_configs/config_options.pyi +3 -3
  255. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  256. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  257. metaflow-stubs/user_decorators/common.pyi +2 -2
  258. metaflow-stubs/user_decorators/mutable_flow.pyi +6 -6
  259. metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
  260. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  261. metaflow-stubs/user_decorators/user_step_decorator.pyi +7 -7
  262. {ob_metaflow_stubs-6.0.10.15.dist-info → ob_metaflow_stubs-6.0.10.16.dist-info}/METADATA +1 -1
  263. ob_metaflow_stubs-6.0.10.16.dist-info/RECORD +266 -0
  264. ob_metaflow_stubs-6.0.10.15.dist-info/RECORD +0 -266
  265. {ob_metaflow_stubs-6.0.10.15.dist-info → ob_metaflow_stubs-6.0.10.16.dist-info}/WHEEL +0 -0
  266. {ob_metaflow_stubs-6.0.10.15.dist-info → ob_metaflow_stubs-6.0.10.16.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.10.1+obcheckpoint(0.2.8);ob(v1) #
4
- # Generated on 2025-10-09T09:15:42.339267 #
3
+ # MF version: 2.18.11.1+obcheckpoint(0.2.8);ob(v1) #
4
+ # Generated on 2025-10-13T07:07:26.927215 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -39,18 +39,18 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import metaflow_git as metaflow_git
43
42
  from . import cards as cards
44
43
  from . import tuple_util as tuple_util
44
+ from . import metaflow_git as metaflow_git
45
45
  from . import events as events
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
51
  from .plugins.parsers import yaml_parser as yaml_parser
52
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
54
54
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
55
55
  from . import client as client
56
56
  from .client.core import namespace as namespace
@@ -169,141 +169,53 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
169
169
  """
170
170
  ...
171
171
 
172
- @typing.overload
173
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
174
- """
175
- Specifies the resources needed when executing this step.
176
-
177
- Use `@resources` to specify the resource requirements
178
- independently of the specific compute layer (`@batch`, `@kubernetes`).
179
-
180
- You can choose the compute layer on the command line by executing e.g.
181
- ```
182
- python myflow.py run --with batch
183
- ```
184
- or
185
- ```
186
- python myflow.py run --with kubernetes
187
- ```
188
- which executes the flow on the desired system using the
189
- requirements specified in `@resources`.
190
-
191
-
192
- Parameters
193
- ----------
194
- cpu : int, default 1
195
- Number of CPUs required for this step.
196
- gpu : int, optional, default None
197
- Number of GPUs required for this step.
198
- disk : int, optional, default None
199
- Disk size (in MB) required for this step. Only applies on Kubernetes.
200
- memory : int, default 4096
201
- Memory size (in MB) required for this step.
202
- shared_memory : int, optional, default None
203
- The value for the size (in MiB) of the /dev/shm volume for this step.
204
- This parameter maps to the `--shm-size` option in Docker.
205
- """
206
- ...
207
-
208
- @typing.overload
209
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
210
- ...
211
-
212
- @typing.overload
213
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
214
- ...
215
-
216
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
217
- """
218
- Specifies the resources needed when executing this step.
219
-
220
- Use `@resources` to specify the resource requirements
221
- independently of the specific compute layer (`@batch`, `@kubernetes`).
222
-
223
- You can choose the compute layer on the command line by executing e.g.
224
- ```
225
- python myflow.py run --with batch
226
- ```
227
- or
228
- ```
229
- python myflow.py run --with kubernetes
230
- ```
231
- which executes the flow on the desired system using the
232
- requirements specified in `@resources`.
233
-
234
-
235
- Parameters
236
- ----------
237
- cpu : int, default 1
238
- Number of CPUs required for this step.
239
- gpu : int, optional, default None
240
- Number of GPUs required for this step.
241
- disk : int, optional, default None
242
- Disk size (in MB) required for this step. Only applies on Kubernetes.
243
- memory : int, default 4096
244
- Memory size (in MB) required for this step.
245
- shared_memory : int, optional, default None
246
- The value for the size (in MiB) of the /dev/shm volume for this step.
247
- This parameter maps to the `--shm-size` option in Docker.
248
- """
249
- ...
250
-
251
- @typing.overload
252
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
172
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
253
173
  """
254
- Specifies the Conda environment for the step.
174
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
255
175
 
256
- Information in this decorator will augment any
257
- attributes set in the `@conda_base` flow-level decorator. Hence,
258
- you can use `@conda_base` to set packages required by all
259
- steps and use `@conda` to specify step-specific overrides.
176
+ User code call
177
+ --------------
178
+ @vllm(
179
+ model="...",
180
+ ...
181
+ )
260
182
 
183
+ Valid backend options
184
+ ---------------------
185
+ - 'local': Run as a separate process on the local task machine.
261
186
 
262
- Parameters
263
- ----------
264
- packages : Dict[str, str], default {}
265
- Packages to use for this step. The key is the name of the package
266
- and the value is the version to use.
267
- libraries : Dict[str, str], default {}
268
- Supported for backward compatibility. When used with packages, packages will take precedence.
269
- python : str, optional, default None
270
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
271
- that the version used will correspond to the version of the Python interpreter used to start the run.
272
- disabled : bool, default False
273
- If set to True, disables @conda.
274
- """
275
- ...
276
-
277
- @typing.overload
278
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
279
- ...
280
-
281
- @typing.overload
282
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
283
- ...
284
-
285
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
286
- """
287
- Specifies the Conda environment for the step.
187
+ Valid model options
188
+ -------------------
189
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
288
190
 
289
- Information in this decorator will augment any
290
- attributes set in the `@conda_base` flow-level decorator. Hence,
291
- you can use `@conda_base` to set packages required by all
292
- steps and use `@conda` to specify step-specific overrides.
191
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
192
+ If you need multiple models, you must create multiple @vllm decorators.
293
193
 
294
194
 
295
195
  Parameters
296
196
  ----------
297
- packages : Dict[str, str], default {}
298
- Packages to use for this step. The key is the name of the package
299
- and the value is the version to use.
300
- libraries : Dict[str, str], default {}
301
- Supported for backward compatibility. When used with packages, packages will take precedence.
302
- python : str, optional, default None
303
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
304
- that the version used will correspond to the version of the Python interpreter used to start the run.
305
- disabled : bool, default False
306
- If set to True, disables @conda.
197
+ model: str
198
+ HuggingFace model identifier to be served by vLLM.
199
+ backend: str
200
+ Determines where and how to run the vLLM process.
201
+ openai_api_server: bool
202
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
203
+ Default is False (uses native engine).
204
+ Set to True for backward compatibility with existing code.
205
+ debug: bool
206
+ Whether to turn on verbose debugging logs.
207
+ card_refresh_interval: int
208
+ Interval in seconds for refreshing the vLLM status card.
209
+ Only used when openai_api_server=True.
210
+ max_retries: int
211
+ Maximum number of retries checking for vLLM server startup.
212
+ Only used when openai_api_server=True.
213
+ retry_alert_frequency: int
214
+ Frequency of alert logs for vLLM server startup retries.
215
+ Only used when openai_api_server=True.
216
+ engine_args : dict
217
+ Additional keyword arguments to pass to the vLLM engine.
218
+ For example, `tensor_parallel_size=2`.
307
219
  """
308
220
  ...
309
221
 
@@ -409,155 +321,111 @@ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typin
409
321
  """
410
322
  ...
411
323
 
412
- @typing.overload
413
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
414
- """
415
- Creates a human-readable report, a Metaflow Card, after this step completes.
416
-
417
- Note that you may add multiple `@card` decorators in a step with different parameters.
418
-
419
-
420
- Parameters
421
- ----------
422
- type : str, default 'default'
423
- Card type.
424
- id : str, optional, default None
425
- If multiple cards are present, use this id to identify this card.
426
- options : Dict[str, Any], default {}
427
- Options passed to the card. The contents depend on the card type.
428
- timeout : int, default 45
429
- Interrupt reporting if it takes more than this many seconds.
430
- """
431
- ...
432
-
433
- @typing.overload
434
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
435
- ...
436
-
437
- @typing.overload
438
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
439
- ...
440
-
441
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
324
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
442
325
  """
443
- Creates a human-readable report, a Metaflow Card, after this step completes.
444
-
445
- Note that you may add multiple `@card` decorators in a step with different parameters.
326
+ Specifies that this step should execute on Kubernetes.
446
327
 
447
328
 
448
329
  Parameters
449
330
  ----------
450
- type : str, default 'default'
451
- Card type.
452
- id : str, optional, default None
453
- If multiple cards are present, use this id to identify this card.
454
- options : Dict[str, Any], default {}
455
- Options passed to the card. The contents depend on the card type.
456
- timeout : int, default 45
457
- Interrupt reporting if it takes more than this many seconds.
458
- """
459
- ...
460
-
461
- def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
462
- """
463
- `@nebius_s3_proxy` is a Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
464
- It exists to make it easier for users to know that this decorator should only be used with
465
- a Neo Cloud like Nebius. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
466
-
467
-
468
- Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
469
- for S3 read and write requests.
470
-
471
- This decorator requires an integration in the Outerbounds platform that
472
- points to an external bucket. It affects S3 operations performed via
473
- Metaflow's `get_aws_client` and `S3` within a `@step`.
474
-
475
- Read operations
476
- ---------------
477
- All read operations pass through the proxy. If an object does not already
478
- exist in the external bucket, it is cached there. For example, if code reads
479
- from buckets `FOO` and `BAR` using the `S3` interface, objects from both
480
- buckets are cached in the external bucket.
481
-
482
- During task execution, all S3‑related read requests are routed through the
483
- proxy:
484
- - If the object is present in the external object store, the proxy
485
- streams it directly from there without accessing the requested origin
486
- bucket.
487
- - If the object is not present in the external storage, the proxy
488
- fetches it from the requested bucket, caches it in the external
489
- storage, and streams the response from the origin bucket.
490
-
491
- Warning
492
- -------
493
- All READ operations (e.g., GetObject, HeadObject) pass through the external
494
- bucket regardless of the bucket specified in user code. Even
495
- `S3(run=self)` and `S3(s3root="mybucketfoo")` requests go through the
496
- external bucket cache.
497
-
498
- Write operations
499
- ----------------
500
- Write behavior is controlled by the `write_mode` parameter, which determines
501
- whether writes also persist objects in the cache.
502
-
503
- `write_mode` values:
504
- - `origin-and-cache`: objects are written both to the cache and to their
505
- intended origin bucket.
506
- - `origin`: objects are written only to their intended origin bucket.
507
-
331
+ cpu : int, default 1
332
+ Number of CPUs required for this step. If `@resources` is
333
+ also present, the maximum value from all decorators is used.
334
+ memory : int, default 4096
335
+ Memory size (in MB) required for this step. If
336
+ `@resources` is also present, the maximum value from all decorators is
337
+ used.
338
+ disk : int, default 10240
339
+ Disk size (in MB) required for this step. If
340
+ `@resources` is also present, the maximum value from all decorators is
341
+ used.
342
+ image : str, optional, default None
343
+ Docker image to use when launching on Kubernetes. If not specified, and
344
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
345
+ not, a default Docker image mapping to the current version of Python is used.
346
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
347
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
348
+ image_pull_secrets: List[str], default []
349
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
350
+ Kubernetes image pull secrets to use when pulling container images
351
+ in Kubernetes.
352
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
353
+ Kubernetes service account to use when launching pod in Kubernetes.
354
+ secrets : List[str], optional, default None
355
+ Kubernetes secrets to use when launching pod in Kubernetes. These
356
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
357
+ in Metaflow configuration.
358
+ node_selector: Union[Dict[str,str], str], optional, default None
359
+ Kubernetes node selector(s) to apply to the pod running the task.
360
+ Can be passed in as a comma separated string of values e.g.
361
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
362
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
363
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
364
+ Kubernetes namespace to use when launching pod in Kubernetes.
365
+ gpu : int, optional, default None
366
+ Number of GPUs required for this step. A value of zero implies that
367
+ the scheduled node should not have GPUs.
368
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
369
+ The vendor of the GPUs to be used for this step.
370
+ tolerations : List[Dict[str,str]], default []
371
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
372
+ Kubernetes tolerations to use when launching pod in Kubernetes.
373
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
374
+ Kubernetes labels to use when launching pod in Kubernetes.
375
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
376
+ Kubernetes annotations to use when launching pod in Kubernetes.
377
+ use_tmpfs : bool, default False
378
+ This enables an explicit tmpfs mount for this step.
379
+ tmpfs_tempdir : bool, default True
380
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
381
+ tmpfs_size : int, optional, default: None
382
+ The value for the size (in MiB) of the tmpfs mount for this step.
383
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
384
+ memory allocated for this step.
385
+ tmpfs_path : str, optional, default /metaflow_temp
386
+ Path to tmpfs mount for this step.
387
+ persistent_volume_claims : Dict[str, str], optional, default None
388
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
389
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
390
+ shared_memory: int, optional
391
+ Shared memory size (in MiB) required for this step
392
+ port: int, optional
393
+ Port number to specify in the Kubernetes job object
394
+ compute_pool : str, optional, default None
395
+ Compute pool to be used for for this step.
396
+ If not specified, any accessible compute pool within the perimeter is used.
397
+ hostname_resolution_timeout: int, default 10 * 60
398
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
399
+ Only applicable when @parallel is used.
400
+ qos: str, default: Burstable
401
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
508
402
 
509
- Parameters
510
- ----------
511
- integration_name : str, optional
512
- [Outerbounds integration name](https://docs.outerbounds.com/outerbounds/configuring-secrets/#integrations-view)
513
- that holds the configuration for the external, S3‑compatible object
514
- storage bucket. If not specified, the only available S3 proxy
515
- integration in the namespace is used (fails if multiple exist).
516
- write_mode : str, optional
517
- Controls whether writes also go to the external bucket.
518
- - `origin` (default)
519
- - `origin-and-cache`
520
- debug : bool, optional
521
- Enables debug logging for proxy operations.
403
+ security_context: Dict[str, Any], optional, default None
404
+ Container security context. Applies to the task container. Allows the following keys:
405
+ - privileged: bool, optional, default None
406
+ - allow_privilege_escalation: bool, optional, default None
407
+ - run_as_user: int, optional, default None
408
+ - run_as_group: int, optional, default None
409
+ - run_as_non_root: bool, optional, default None
522
410
  """
523
411
  ...
524
412
 
525
413
  @typing.overload
526
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
414
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
527
415
  """
528
- Specifies secrets to be retrieved and injected as environment variables prior to
529
- the execution of a step.
530
-
531
-
532
- Parameters
533
- ----------
534
- sources : List[Union[str, Dict[str, Any]]], default: []
535
- List of secret specs, defining how the secrets are to be retrieved
536
- role : str, optional, default: None
537
- Role to use for fetching secrets
416
+ Decorator prototype for all step decorators. This function gets specialized
417
+ and imported for all decorators types by _import_plugin_decorators().
538
418
  """
539
419
  ...
540
420
 
541
421
  @typing.overload
542
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
543
- ...
544
-
545
- @typing.overload
546
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
422
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
547
423
  ...
548
424
 
549
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
425
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
550
426
  """
551
- Specifies secrets to be retrieved and injected as environment variables prior to
552
- the execution of a step.
553
-
554
-
555
- Parameters
556
- ----------
557
- sources : List[Union[str, Dict[str, Any]]], default: []
558
- List of secret specs, defining how the secrets are to be retrieved
559
- role : str, optional, default: None
560
- Role to use for fetching secrets
427
+ Decorator prototype for all step decorators. This function gets specialized
428
+ and imported for all decorators types by _import_plugin_decorators().
561
429
  """
562
430
  ...
563
431
 
@@ -678,7 +546,88 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope:
678
546
  """
679
547
  ...
680
548
 
681
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
549
+ def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
550
+ """
551
+ `@nebius_s3_proxy` is a Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
552
+ It exists to make it easier for users to know that this decorator should only be used with
553
+ a Neo Cloud like Nebius. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
554
+
555
+
556
+ Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
557
+ for S3 read and write requests.
558
+
559
+ This decorator requires an integration in the Outerbounds platform that
560
+ points to an external bucket. It affects S3 operations performed via
561
+ Metaflow's `get_aws_client` and `S3` within a `@step`.
562
+
563
+ Read operations
564
+ ---------------
565
+ All read operations pass through the proxy. If an object does not already
566
+ exist in the external bucket, it is cached there. For example, if code reads
567
+ from buckets `FOO` and `BAR` using the `S3` interface, objects from both
568
+ buckets are cached in the external bucket.
569
+
570
+ During task execution, all S3‑related read requests are routed through the
571
+ proxy:
572
+ - If the object is present in the external object store, the proxy
573
+ streams it directly from there without accessing the requested origin
574
+ bucket.
575
+ - If the object is not present in the external storage, the proxy
576
+ fetches it from the requested bucket, caches it in the external
577
+ storage, and streams the response from the origin bucket.
578
+
579
+ Warning
580
+ -------
581
+ All READ operations (e.g., GetObject, HeadObject) pass through the external
582
+ bucket regardless of the bucket specified in user code. Even
583
+ `S3(run=self)` and `S3(s3root="mybucketfoo")` requests go through the
584
+ external bucket cache.
585
+
586
+ Write operations
587
+ ----------------
588
+ Write behavior is controlled by the `write_mode` parameter, which determines
589
+ whether writes also persist objects in the cache.
590
+
591
+ `write_mode` values:
592
+ - `origin-and-cache`: objects are written both to the cache and to their
593
+ intended origin bucket.
594
+ - `origin`: objects are written only to their intended origin bucket.
595
+
596
+
597
+ Parameters
598
+ ----------
599
+ integration_name : str, optional
600
+ [Outerbounds integration name](https://docs.outerbounds.com/outerbounds/configuring-secrets/#integrations-view)
601
+ that holds the configuration for the external, S3‑compatible object
602
+ storage bucket. If not specified, the only available S3 proxy
603
+ integration in the namespace is used (fails if multiple exist).
604
+ write_mode : str, optional
605
+ Controls whether writes also go to the external bucket.
606
+ - `origin` (default)
607
+ - `origin-and-cache`
608
+ debug : bool, optional
609
+ Enables debug logging for proxy operations.
610
+ """
611
+ ...
612
+
613
+ @typing.overload
614
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
615
+ """
616
+ Internal decorator to support Fast bakery
617
+ """
618
+ ...
619
+
620
+ @typing.overload
621
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
622
+ ...
623
+
624
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
625
+ """
626
+ Internal decorator to support Fast bakery
627
+ """
628
+ ...
629
+
630
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
682
631
  """
683
632
  Specifies that this step should execute on DGX cloud.
684
633
 
@@ -689,30 +638,280 @@ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[t
689
638
  Number of GPUs to use.
690
639
  gpu_type : str
691
640
  Type of Nvidia GPU to use.
692
- queue_timeout : int
693
- Time to keep the job in NVCF's queue.
694
641
  """
695
642
  ...
696
643
 
697
644
  @typing.overload
698
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
645
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
699
646
  """
700
- Enables checkpointing for a step.
647
+ Enables loading / saving of models within a step.
701
648
 
702
649
  > Examples
650
+ - Saving Models
651
+ ```python
652
+ @model
653
+ @step
654
+ def train(self):
655
+ # current.model.save returns a dictionary reference to the model saved
656
+ self.my_model = current.model.save(
657
+ path_to_my_model,
658
+ label="my_model",
659
+ metadata={
660
+ "epochs": 10,
661
+ "batch-size": 32,
662
+ "learning-rate": 0.001,
663
+ }
664
+ )
665
+ self.next(self.test)
703
666
 
704
- - Saving Checkpoints
667
+ @model(load="my_model")
668
+ @step
669
+ def test(self):
670
+ # `current.model.loaded` returns a dictionary of the loaded models
671
+ # where the key is the name of the artifact and the value is the path to the model
672
+ print(os.listdir(current.model.loaded["my_model"]))
673
+ self.next(self.end)
674
+ ```
705
675
 
676
+ - Loading models
706
677
  ```python
707
- @checkpoint
708
678
  @step
709
679
  def train(self):
710
- model = create_model(self.parameters, checkpoint_path = None)
711
- for i in range(self.epochs):
712
- # some training logic
713
- loss = model.train(self.dataset)
714
- if i % 10 == 0:
715
- model.save(
680
+ # current.model.load returns the path to the model loaded
681
+ checkpoint_path = current.model.load(
682
+ self.checkpoint_key,
683
+ )
684
+ model_path = current.model.load(
685
+ self.model,
686
+ )
687
+ self.next(self.test)
688
+ ```
689
+
690
+
691
+ Parameters
692
+ ----------
693
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
694
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
695
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
696
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
697
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
698
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
699
+
700
+ temp_dir_root : str, default: None
701
+ The root directory under which `current.model.loaded` will store loaded models
702
+ """
703
+ ...
704
+
705
+ @typing.overload
706
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
707
+ ...
708
+
709
+ @typing.overload
710
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
711
+ ...
712
+
713
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
714
+ """
715
+ Enables loading / saving of models within a step.
716
+
717
+ > Examples
718
+ - Saving Models
719
+ ```python
720
+ @model
721
+ @step
722
+ def train(self):
723
+ # current.model.save returns a dictionary reference to the model saved
724
+ self.my_model = current.model.save(
725
+ path_to_my_model,
726
+ label="my_model",
727
+ metadata={
728
+ "epochs": 10,
729
+ "batch-size": 32,
730
+ "learning-rate": 0.001,
731
+ }
732
+ )
733
+ self.next(self.test)
734
+
735
+ @model(load="my_model")
736
+ @step
737
+ def test(self):
738
+ # `current.model.loaded` returns a dictionary of the loaded models
739
+ # where the key is the name of the artifact and the value is the path to the model
740
+ print(os.listdir(current.model.loaded["my_model"]))
741
+ self.next(self.end)
742
+ ```
743
+
744
+ - Loading models
745
+ ```python
746
+ @step
747
+ def train(self):
748
+ # current.model.load returns the path to the model loaded
749
+ checkpoint_path = current.model.load(
750
+ self.checkpoint_key,
751
+ )
752
+ model_path = current.model.load(
753
+ self.model,
754
+ )
755
+ self.next(self.test)
756
+ ```
757
+
758
+
759
+ Parameters
760
+ ----------
761
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
762
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
763
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
764
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
765
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
766
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
767
+
768
+ temp_dir_root : str, default: None
769
+ The root directory under which `current.model.loaded` will store loaded models
770
+ """
771
+ ...
772
+
773
+ @typing.overload
774
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
775
+ """
776
+ Specifies the number of times the task corresponding
777
+ to a step needs to be retried.
778
+
779
+ This decorator is useful for handling transient errors, such as networking issues.
780
+ If your task contains operations that can't be retried safely, e.g. database updates,
781
+ it is advisable to annotate it with `@retry(times=0)`.
782
+
783
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
784
+ decorator will execute a no-op task after all retries have been exhausted,
785
+ ensuring that the flow execution can continue.
786
+
787
+
788
+ Parameters
789
+ ----------
790
+ times : int, default 3
791
+ Number of times to retry this task.
792
+ minutes_between_retries : int, default 2
793
+ Number of minutes between retries.
794
+ """
795
+ ...
796
+
797
+ @typing.overload
798
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
799
+ ...
800
+
801
+ @typing.overload
802
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
803
+ ...
804
+
805
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
806
+ """
807
+ Specifies the number of times the task corresponding
808
+ to a step needs to be retried.
809
+
810
+ This decorator is useful for handling transient errors, such as networking issues.
811
+ If your task contains operations that can't be retried safely, e.g. database updates,
812
+ it is advisable to annotate it with `@retry(times=0)`.
813
+
814
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
815
+ decorator will execute a no-op task after all retries have been exhausted,
816
+ ensuring that the flow execution can continue.
817
+
818
+
819
+ Parameters
820
+ ----------
821
+ times : int, default 3
822
+ Number of times to retry this task.
823
+ minutes_between_retries : int, default 2
824
+ Number of minutes between retries.
825
+ """
826
+ ...
827
+
828
+ @typing.overload
829
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
830
+ """
831
+ Creates a human-readable report, a Metaflow Card, after this step completes.
832
+
833
+ Note that you may add multiple `@card` decorators in a step with different parameters.
834
+
835
+
836
+ Parameters
837
+ ----------
838
+ type : str, default 'default'
839
+ Card type.
840
+ id : str, optional, default None
841
+ If multiple cards are present, use this id to identify this card.
842
+ options : Dict[str, Any], default {}
843
+ Options passed to the card. The contents depend on the card type.
844
+ timeout : int, default 45
845
+ Interrupt reporting if it takes more than this many seconds.
846
+ """
847
+ ...
848
+
849
+ @typing.overload
850
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
851
+ ...
852
+
853
+ @typing.overload
854
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
855
+ ...
856
+
857
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
858
+ """
859
+ Creates a human-readable report, a Metaflow Card, after this step completes.
860
+
861
+ Note that you may add multiple `@card` decorators in a step with different parameters.
862
+
863
+
864
+ Parameters
865
+ ----------
866
+ type : str, default 'default'
867
+ Card type.
868
+ id : str, optional, default None
869
+ If multiple cards are present, use this id to identify this card.
870
+ options : Dict[str, Any], default {}
871
+ Options passed to the card. The contents depend on the card type.
872
+ timeout : int, default 45
873
+ Interrupt reporting if it takes more than this many seconds.
874
+ """
875
+ ...
876
+
877
+ @typing.overload
878
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
879
+ """
880
+ Decorator prototype for all step decorators. This function gets specialized
881
+ and imported for all decorators types by _import_plugin_decorators().
882
+ """
883
+ ...
884
+
885
+ @typing.overload
886
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
887
+ ...
888
+
889
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
890
+ """
891
+ Decorator prototype for all step decorators. This function gets specialized
892
+ and imported for all decorators types by _import_plugin_decorators().
893
+ """
894
+ ...
895
+
896
+ @typing.overload
897
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
898
+ """
899
+ Enables checkpointing for a step.
900
+
901
+ > Examples
902
+
903
+ - Saving Checkpoints
904
+
905
+ ```python
906
+ @checkpoint
907
+ @step
908
+ def train(self):
909
+ model = create_model(self.parameters, checkpoint_path = None)
910
+ for i in range(self.epochs):
911
+ # some training logic
912
+ loss = model.train(self.dataset)
913
+ if i % 10 == 0:
914
+ model.save(
716
915
  current.checkpoint.directory,
717
916
  )
718
917
  # saves the contents of the `current.checkpoint.directory` as a checkpoint
@@ -842,513 +1041,337 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
842
1041
  ...
843
1042
 
844
1043
  @typing.overload
845
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
846
- """
847
- A simple decorator that demonstrates using CardDecoratorInjector
848
- to inject a card and render simple markdown content.
849
- """
850
- ...
851
-
852
- @typing.overload
853
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
854
- ...
855
-
856
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
857
- """
858
- A simple decorator that demonstrates using CardDecoratorInjector
859
- to inject a card and render simple markdown content.
860
- """
861
- ...
862
-
863
- def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1044
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
864
1045
  """
865
- `@coreweave_s3_proxy` is a CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
866
- It exists to make it easier for users to know that this decorator should only be used with
867
- a Neo Cloud like CoreWeave. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
868
-
869
-
870
- Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
871
- for S3 read and write requests.
872
-
873
- This decorator requires an integration in the Outerbounds platform that
874
- points to an external bucket. It affects S3 operations performed via
875
- Metaflow's `get_aws_client` and `S3` within a `@step`.
876
-
877
- Read operations
878
- ---------------
879
- All read operations pass through the proxy. If an object does not already
880
- exist in the external bucket, it is cached there. For example, if code reads
881
- from buckets `FOO` and `BAR` using the `S3` interface, objects from both
882
- buckets are cached in the external bucket.
883
-
884
- During task execution, all S3‑related read requests are routed through the
885
- proxy:
886
- - If the object is present in the external object store, the proxy
887
- streams it directly from there without accessing the requested origin
888
- bucket.
889
- - If the object is not present in the external storage, the proxy
890
- fetches it from the requested bucket, caches it in the external
891
- storage, and streams the response from the origin bucket.
892
-
893
- Warning
894
- -------
895
- All READ operations (e.g., GetObject, HeadObject) pass through the external
896
- bucket regardless of the bucket specified in user code. Even
897
- `S3(run=self)` and `S3(s3root="mybucketfoo")` requests go through the
898
- external bucket cache.
899
-
900
- Write operations
901
- ----------------
902
- Write behavior is controlled by the `write_mode` parameter, which determines
903
- whether writes also persist objects in the cache.
904
-
905
- `write_mode` values:
906
- - `origin-and-cache`: objects are written both to the cache and to their
907
- intended origin bucket.
908
- - `origin`: objects are written only to their intended origin bucket.
909
-
910
-
911
- Parameters
912
- ----------
913
- integration_name : str, optional
914
- [Outerbounds integration name](https://docs.outerbounds.com/outerbounds/configuring-secrets/#integrations-view)
915
- that holds the configuration for the external, S3‑compatible object
916
- storage bucket. If not specified, the only available S3 proxy
917
- integration in the namespace is used (fails if multiple exist).
918
- write_mode : str, optional
919
- Controls whether writes also go to the external bucket.
920
- - `origin` (default)
921
- - `origin-and-cache`
922
- debug : bool, optional
923
- Enables debug logging for proxy operations.
924
- """
925
- ...
926
-
927
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
928
- """
929
- Specifies that this step should execute on DGX cloud.
930
-
931
-
932
- Parameters
933
- ----------
934
- gpu : int
935
- Number of GPUs to use.
936
- gpu_type : str
937
- Type of Nvidia GPU to use.
938
- """
939
- ...
940
-
941
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
942
- """
943
- This decorator is used to run vllm APIs as Metaflow task sidecars.
944
-
945
- User code call
946
- --------------
947
- @vllm(
948
- model="...",
949
- ...
950
- )
1046
+ Specifies a timeout for your step.
951
1047
 
952
- Valid backend options
953
- ---------------------
954
- - 'local': Run as a separate process on the local task machine.
1048
+ This decorator is useful if this step may hang indefinitely.
955
1049
 
956
- Valid model options
957
- -------------------
958
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1050
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1051
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1052
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
959
1053
 
960
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
961
- If you need multiple models, you must create multiple @vllm decorators.
1054
+ Note that all the values specified in parameters are added together so if you specify
1055
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
962
1056
 
963
1057
 
964
1058
  Parameters
965
1059
  ----------
966
- model: str
967
- HuggingFace model identifier to be served by vLLM.
968
- backend: str
969
- Determines where and how to run the vLLM process.
970
- openai_api_server: bool
971
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
972
- Default is False (uses native engine).
973
- Set to True for backward compatibility with existing code.
974
- debug: bool
975
- Whether to turn on verbose debugging logs.
976
- card_refresh_interval: int
977
- Interval in seconds for refreshing the vLLM status card.
978
- Only used when openai_api_server=True.
979
- max_retries: int
980
- Maximum number of retries checking for vLLM server startup.
981
- Only used when openai_api_server=True.
982
- retry_alert_frequency: int
983
- Frequency of alert logs for vLLM server startup retries.
984
- Only used when openai_api_server=True.
985
- engine_args : dict
986
- Additional keyword arguments to pass to the vLLM engine.
987
- For example, `tensor_parallel_size=2`.
1060
+ seconds : int, default 0
1061
+ Number of seconds to wait prior to timing out.
1062
+ minutes : int, default 0
1063
+ Number of minutes to wait prior to timing out.
1064
+ hours : int, default 0
1065
+ Number of hours to wait prior to timing out.
988
1066
  """
989
1067
  ...
990
1068
 
991
1069
  @typing.overload
992
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
993
- """
994
- Decorator prototype for all step decorators. This function gets specialized
995
- and imported for all decorators types by _import_plugin_decorators().
996
- """
1070
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
997
1071
  ...
998
1072
 
999
1073
  @typing.overload
1000
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1001
- ...
1002
-
1003
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1004
- """
1005
- Decorator prototype for all step decorators. This function gets specialized
1006
- and imported for all decorators types by _import_plugin_decorators().
1007
- """
1074
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1008
1075
  ...
1009
1076
 
1010
- @typing.overload
1011
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1077
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1012
1078
  """
1013
- Enables loading / saving of models within a step.
1079
+ Specifies a timeout for your step.
1014
1080
 
1015
- > Examples
1016
- - Saving Models
1017
- ```python
1018
- @model
1019
- @step
1020
- def train(self):
1021
- # current.model.save returns a dictionary reference to the model saved
1022
- self.my_model = current.model.save(
1023
- path_to_my_model,
1024
- label="my_model",
1025
- metadata={
1026
- "epochs": 10,
1027
- "batch-size": 32,
1028
- "learning-rate": 0.001,
1029
- }
1030
- )
1031
- self.next(self.test)
1081
+ This decorator is useful if this step may hang indefinitely.
1032
1082
 
1033
- @model(load="my_model")
1034
- @step
1035
- def test(self):
1036
- # `current.model.loaded` returns a dictionary of the loaded models
1037
- # where the key is the name of the artifact and the value is the path to the model
1038
- print(os.listdir(current.model.loaded["my_model"]))
1039
- self.next(self.end)
1040
- ```
1083
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1084
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1085
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1041
1086
 
1042
- - Loading models
1043
- ```python
1044
- @step
1045
- def train(self):
1046
- # current.model.load returns the path to the model loaded
1047
- checkpoint_path = current.model.load(
1048
- self.checkpoint_key,
1049
- )
1050
- model_path = current.model.load(
1051
- self.model,
1052
- )
1053
- self.next(self.test)
1054
- ```
1087
+ Note that all the values specified in parameters are added together so if you specify
1088
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1055
1089
 
1056
1090
 
1057
1091
  Parameters
1058
1092
  ----------
1059
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1060
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1061
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1062
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1063
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1064
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1065
-
1066
- temp_dir_root : str, default: None
1067
- The root directory under which `current.model.loaded` will store loaded models
1093
+ seconds : int, default 0
1094
+ Number of seconds to wait prior to timing out.
1095
+ minutes : int, default 0
1096
+ Number of minutes to wait prior to timing out.
1097
+ hours : int, default 0
1098
+ Number of hours to wait prior to timing out.
1068
1099
  """
1069
1100
  ...
1070
1101
 
1071
1102
  @typing.overload
1072
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1073
- ...
1074
-
1075
- @typing.overload
1076
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1077
- ...
1078
-
1079
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1103
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1080
1104
  """
1081
- Enables loading / saving of models within a step.
1082
-
1083
- > Examples
1084
- - Saving Models
1085
- ```python
1086
- @model
1087
- @step
1088
- def train(self):
1089
- # current.model.save returns a dictionary reference to the model saved
1090
- self.my_model = current.model.save(
1091
- path_to_my_model,
1092
- label="my_model",
1093
- metadata={
1094
- "epochs": 10,
1095
- "batch-size": 32,
1096
- "learning-rate": 0.001,
1097
- }
1098
- )
1099
- self.next(self.test)
1100
-
1101
- @model(load="my_model")
1102
- @step
1103
- def test(self):
1104
- # `current.model.loaded` returns a dictionary of the loaded models
1105
- # where the key is the name of the artifact and the value is the path to the model
1106
- print(os.listdir(current.model.loaded["my_model"]))
1107
- self.next(self.end)
1108
- ```
1109
-
1110
- - Loading models
1111
- ```python
1112
- @step
1113
- def train(self):
1114
- # current.model.load returns the path to the model loaded
1115
- checkpoint_path = current.model.load(
1116
- self.checkpoint_key,
1117
- )
1118
- model_path = current.model.load(
1119
- self.model,
1120
- )
1121
- self.next(self.test)
1122
- ```
1123
-
1124
-
1125
- Parameters
1126
- ----------
1127
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1128
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1129
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1130
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1131
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1132
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1105
+ Specifies the Conda environment for the step.
1133
1106
 
1134
- temp_dir_root : str, default: None
1135
- The root directory under which `current.model.loaded` will store loaded models
1136
- """
1137
- ...
1138
-
1139
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1140
- """
1141
- Specifies that this step should execute on Kubernetes.
1107
+ Information in this decorator will augment any
1108
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1109
+ you can use `@conda_base` to set packages required by all
1110
+ steps and use `@conda` to specify step-specific overrides.
1142
1111
 
1143
1112
 
1144
1113
  Parameters
1145
1114
  ----------
1146
- cpu : int, default 1
1147
- Number of CPUs required for this step. If `@resources` is
1148
- also present, the maximum value from all decorators is used.
1149
- memory : int, default 4096
1150
- Memory size (in MB) required for this step. If
1151
- `@resources` is also present, the maximum value from all decorators is
1152
- used.
1153
- disk : int, default 10240
1154
- Disk size (in MB) required for this step. If
1155
- `@resources` is also present, the maximum value from all decorators is
1156
- used.
1157
- image : str, optional, default None
1158
- Docker image to use when launching on Kubernetes. If not specified, and
1159
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1160
- not, a default Docker image mapping to the current version of Python is used.
1161
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1162
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1163
- image_pull_secrets: List[str], default []
1164
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
1165
- Kubernetes image pull secrets to use when pulling container images
1166
- in Kubernetes.
1167
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1168
- Kubernetes service account to use when launching pod in Kubernetes.
1169
- secrets : List[str], optional, default None
1170
- Kubernetes secrets to use when launching pod in Kubernetes. These
1171
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1172
- in Metaflow configuration.
1173
- node_selector: Union[Dict[str,str], str], optional, default None
1174
- Kubernetes node selector(s) to apply to the pod running the task.
1175
- Can be passed in as a comma separated string of values e.g.
1176
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
1177
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
1178
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1179
- Kubernetes namespace to use when launching pod in Kubernetes.
1180
- gpu : int, optional, default None
1181
- Number of GPUs required for this step. A value of zero implies that
1182
- the scheduled node should not have GPUs.
1183
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1184
- The vendor of the GPUs to be used for this step.
1185
- tolerations : List[Dict[str,str]], default []
1186
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1187
- Kubernetes tolerations to use when launching pod in Kubernetes.
1188
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
1189
- Kubernetes labels to use when launching pod in Kubernetes.
1190
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
1191
- Kubernetes annotations to use when launching pod in Kubernetes.
1192
- use_tmpfs : bool, default False
1193
- This enables an explicit tmpfs mount for this step.
1194
- tmpfs_tempdir : bool, default True
1195
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1196
- tmpfs_size : int, optional, default: None
1197
- The value for the size (in MiB) of the tmpfs mount for this step.
1198
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1199
- memory allocated for this step.
1200
- tmpfs_path : str, optional, default /metaflow_temp
1201
- Path to tmpfs mount for this step.
1202
- persistent_volume_claims : Dict[str, str], optional, default None
1203
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1204
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1205
- shared_memory: int, optional
1206
- Shared memory size (in MiB) required for this step
1207
- port: int, optional
1208
- Port number to specify in the Kubernetes job object
1209
- compute_pool : str, optional, default None
1210
- Compute pool to be used for for this step.
1211
- If not specified, any accessible compute pool within the perimeter is used.
1212
- hostname_resolution_timeout: int, default 10 * 60
1213
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
1214
- Only applicable when @parallel is used.
1215
- qos: str, default: Burstable
1216
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
1217
-
1218
- security_context: Dict[str, Any], optional, default None
1219
- Container security context. Applies to the task container. Allows the following keys:
1220
- - privileged: bool, optional, default None
1221
- - allow_privilege_escalation: bool, optional, default None
1222
- - run_as_user: int, optional, default None
1223
- - run_as_group: int, optional, default None
1224
- - run_as_non_root: bool, optional, default None
1115
+ packages : Dict[str, str], default {}
1116
+ Packages to use for this step. The key is the name of the package
1117
+ and the value is the version to use.
1118
+ libraries : Dict[str, str], default {}
1119
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1120
+ python : str, optional, default None
1121
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1122
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1123
+ disabled : bool, default False
1124
+ If set to True, disables @conda.
1225
1125
  """
1226
1126
  ...
1227
1127
 
1228
1128
  @typing.overload
1229
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1129
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1130
+ ...
1131
+
1132
+ @typing.overload
1133
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1134
+ ...
1135
+
1136
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1230
1137
  """
1231
- Specifies a timeout for your step.
1138
+ Specifies the Conda environment for the step.
1232
1139
 
1233
- This decorator is useful if this step may hang indefinitely.
1140
+ Information in this decorator will augment any
1141
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1142
+ you can use `@conda_base` to set packages required by all
1143
+ steps and use `@conda` to specify step-specific overrides.
1234
1144
 
1235
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1236
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1237
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1238
1145
 
1239
- Note that all the values specified in parameters are added together so if you specify
1240
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1146
+ Parameters
1147
+ ----------
1148
+ packages : Dict[str, str], default {}
1149
+ Packages to use for this step. The key is the name of the package
1150
+ and the value is the version to use.
1151
+ libraries : Dict[str, str], default {}
1152
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1153
+ python : str, optional, default None
1154
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1155
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1156
+ disabled : bool, default False
1157
+ If set to True, disables @conda.
1158
+ """
1159
+ ...
1160
+
1161
+ @typing.overload
1162
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1163
+ """
1164
+ Specifies the resources needed when executing this step.
1165
+
1166
+ Use `@resources` to specify the resource requirements
1167
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1168
+
1169
+ You can choose the compute layer on the command line by executing e.g.
1170
+ ```
1171
+ python myflow.py run --with batch
1172
+ ```
1173
+ or
1174
+ ```
1175
+ python myflow.py run --with kubernetes
1176
+ ```
1177
+ which executes the flow on the desired system using the
1178
+ requirements specified in `@resources`.
1241
1179
 
1242
1180
 
1243
1181
  Parameters
1244
1182
  ----------
1245
- seconds : int, default 0
1246
- Number of seconds to wait prior to timing out.
1247
- minutes : int, default 0
1248
- Number of minutes to wait prior to timing out.
1249
- hours : int, default 0
1250
- Number of hours to wait prior to timing out.
1183
+ cpu : int, default 1
1184
+ Number of CPUs required for this step.
1185
+ gpu : int, optional, default None
1186
+ Number of GPUs required for this step.
1187
+ disk : int, optional, default None
1188
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1189
+ memory : int, default 4096
1190
+ Memory size (in MB) required for this step.
1191
+ shared_memory : int, optional, default None
1192
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1193
+ This parameter maps to the `--shm-size` option in Docker.
1251
1194
  """
1252
1195
  ...
1253
1196
 
1254
1197
  @typing.overload
1255
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1198
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1256
1199
  ...
1257
1200
 
1258
1201
  @typing.overload
1259
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1202
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1260
1203
  ...
1261
1204
 
1262
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1205
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1263
1206
  """
1264
- Specifies a timeout for your step.
1265
-
1266
- This decorator is useful if this step may hang indefinitely.
1207
+ Specifies the resources needed when executing this step.
1267
1208
 
1268
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1269
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1270
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1209
+ Use `@resources` to specify the resource requirements
1210
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1271
1211
 
1272
- Note that all the values specified in parameters are added together so if you specify
1273
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1212
+ You can choose the compute layer on the command line by executing e.g.
1213
+ ```
1214
+ python myflow.py run --with batch
1215
+ ```
1216
+ or
1217
+ ```
1218
+ python myflow.py run --with kubernetes
1219
+ ```
1220
+ which executes the flow on the desired system using the
1221
+ requirements specified in `@resources`.
1274
1222
 
1275
1223
 
1276
1224
  Parameters
1277
1225
  ----------
1278
- seconds : int, default 0
1279
- Number of seconds to wait prior to timing out.
1280
- minutes : int, default 0
1281
- Number of minutes to wait prior to timing out.
1282
- hours : int, default 0
1283
- Number of hours to wait prior to timing out.
1226
+ cpu : int, default 1
1227
+ Number of CPUs required for this step.
1228
+ gpu : int, optional, default None
1229
+ Number of GPUs required for this step.
1230
+ disk : int, optional, default None
1231
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1232
+ memory : int, default 4096
1233
+ Memory size (in MB) required for this step.
1234
+ shared_memory : int, optional, default None
1235
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1236
+ This parameter maps to the `--shm-size` option in Docker.
1284
1237
  """
1285
1238
  ...
1286
1239
 
1287
1240
  @typing.overload
1288
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1241
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1289
1242
  """
1290
- Specifies that the step will success under all circumstances.
1291
-
1292
- The decorator will create an optional artifact, specified by `var`, which
1293
- contains the exception raised. You can use it to detect the presence
1294
- of errors, indicating that all happy-path artifacts produced by the step
1295
- are missing.
1296
-
1297
-
1298
- Parameters
1299
- ----------
1300
- var : str, optional, default None
1301
- Name of the artifact in which to store the caught exception.
1302
- If not specified, the exception is not stored.
1303
- print_exception : bool, default True
1304
- Determines whether or not the exception is printed to
1305
- stdout when caught.
1243
+ A simple decorator that demonstrates using CardDecoratorInjector
1244
+ to inject a card and render simple markdown content.
1306
1245
  """
1307
1246
  ...
1308
1247
 
1309
1248
  @typing.overload
1310
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1249
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1311
1250
  ...
1312
1251
 
1313
- @typing.overload
1314
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1252
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1253
+ """
1254
+ A simple decorator that demonstrates using CardDecoratorInjector
1255
+ to inject a card and render simple markdown content.
1256
+ """
1315
1257
  ...
1316
1258
 
1317
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1259
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1318
1260
  """
1319
- Specifies that the step will success under all circumstances.
1320
-
1321
- The decorator will create an optional artifact, specified by `var`, which
1322
- contains the exception raised. You can use it to detect the presence
1323
- of errors, indicating that all happy-path artifacts produced by the step
1324
- are missing.
1261
+ Specifies that this step should execute on DGX cloud.
1325
1262
 
1326
1263
 
1327
1264
  Parameters
1328
1265
  ----------
1329
- var : str, optional, default None
1330
- Name of the artifact in which to store the caught exception.
1331
- If not specified, the exception is not stored.
1332
- print_exception : bool, default True
1333
- Determines whether or not the exception is printed to
1334
- stdout when caught.
1266
+ gpu : int
1267
+ Number of GPUs to use.
1268
+ gpu_type : str
1269
+ Type of Nvidia GPU to use.
1270
+ queue_timeout : int
1271
+ Time to keep the job in NVCF's queue.
1335
1272
  """
1336
1273
  ...
1337
1274
 
1338
1275
  @typing.overload
1339
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1276
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1340
1277
  """
1341
- Internal decorator to support Fast bakery
1278
+ Specifies secrets to be retrieved and injected as environment variables prior to
1279
+ the execution of a step.
1280
+
1281
+
1282
+ Parameters
1283
+ ----------
1284
+ sources : List[Union[str, Dict[str, Any]]], default: []
1285
+ List of secret specs, defining how the secrets are to be retrieved
1286
+ role : str, optional, default: None
1287
+ Role to use for fetching secrets
1342
1288
  """
1343
1289
  ...
1344
1290
 
1345
1291
  @typing.overload
1346
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1292
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1347
1293
  ...
1348
1294
 
1349
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1295
+ @typing.overload
1296
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1297
+ ...
1298
+
1299
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
1350
1300
  """
1351
- Internal decorator to support Fast bakery
1301
+ Specifies secrets to be retrieved and injected as environment variables prior to
1302
+ the execution of a step.
1303
+
1304
+
1305
+ Parameters
1306
+ ----------
1307
+ sources : List[Union[str, Dict[str, Any]]], default: []
1308
+ List of secret specs, defining how the secrets are to be retrieved
1309
+ role : str, optional, default: None
1310
+ Role to use for fetching secrets
1311
+ """
1312
+ ...
1313
+
1314
+ def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1315
+ """
1316
+ `@coreweave_s3_proxy` is a CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1317
+ It exists to make it easier for users to know that this decorator should only be used with
1318
+ a Neo Cloud like CoreWeave. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
1319
+
1320
+
1321
+ Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
1322
+ for S3 read and write requests.
1323
+
1324
+ This decorator requires an integration in the Outerbounds platform that
1325
+ points to an external bucket. It affects S3 operations performed via
1326
+ Metaflow's `get_aws_client` and `S3` within a `@step`.
1327
+
1328
+ Read operations
1329
+ ---------------
1330
+ All read operations pass through the proxy. If an object does not already
1331
+ exist in the external bucket, it is cached there. For example, if code reads
1332
+ from buckets `FOO` and `BAR` using the `S3` interface, objects from both
1333
+ buckets are cached in the external bucket.
1334
+
1335
+ During task execution, all S3‑related read requests are routed through the
1336
+ proxy:
1337
+ - If the object is present in the external object store, the proxy
1338
+ streams it directly from there without accessing the requested origin
1339
+ bucket.
1340
+ - If the object is not present in the external storage, the proxy
1341
+ fetches it from the requested bucket, caches it in the external
1342
+ storage, and streams the response from the origin bucket.
1343
+
1344
+ Warning
1345
+ -------
1346
+ All READ operations (e.g., GetObject, HeadObject) pass through the external
1347
+ bucket regardless of the bucket specified in user code. Even
1348
+ `S3(run=self)` and `S3(s3root="mybucketfoo")` requests go through the
1349
+ external bucket cache.
1350
+
1351
+ Write operations
1352
+ ----------------
1353
+ Write behavior is controlled by the `write_mode` parameter, which determines
1354
+ whether writes also persist objects in the cache.
1355
+
1356
+ `write_mode` values:
1357
+ - `origin-and-cache`: objects are written both to the cache and to their
1358
+ intended origin bucket.
1359
+ - `origin`: objects are written only to their intended origin bucket.
1360
+
1361
+
1362
+ Parameters
1363
+ ----------
1364
+ integration_name : str, optional
1365
+ [Outerbounds integration name](https://docs.outerbounds.com/outerbounds/configuring-secrets/#integrations-view)
1366
+ that holds the configuration for the external, S3‑compatible object
1367
+ storage bucket. If not specified, the only available S3 proxy
1368
+ integration in the namespace is used (fails if multiple exist).
1369
+ write_mode : str, optional
1370
+ Controls whether writes also go to the external bucket.
1371
+ - `origin` (default)
1372
+ - `origin-and-cache`
1373
+ debug : bool, optional
1374
+ Enables debug logging for proxy operations.
1352
1375
  """
1353
1376
  ...
1354
1377
 
@@ -1437,76 +1460,53 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1437
1460
  ...
1438
1461
 
1439
1462
  @typing.overload
1440
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1463
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1441
1464
  """
1442
- Specifies the number of times the task corresponding
1443
- to a step needs to be retried.
1444
-
1445
- This decorator is useful for handling transient errors, such as networking issues.
1446
- If your task contains operations that can't be retried safely, e.g. database updates,
1447
- it is advisable to annotate it with `@retry(times=0)`.
1465
+ Specifies that the step will success under all circumstances.
1448
1466
 
1449
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1450
- decorator will execute a no-op task after all retries have been exhausted,
1451
- ensuring that the flow execution can continue.
1467
+ The decorator will create an optional artifact, specified by `var`, which
1468
+ contains the exception raised. You can use it to detect the presence
1469
+ of errors, indicating that all happy-path artifacts produced by the step
1470
+ are missing.
1452
1471
 
1453
1472
 
1454
1473
  Parameters
1455
1474
  ----------
1456
- times : int, default 3
1457
- Number of times to retry this task.
1458
- minutes_between_retries : int, default 2
1459
- Number of minutes between retries.
1475
+ var : str, optional, default None
1476
+ Name of the artifact in which to store the caught exception.
1477
+ If not specified, the exception is not stored.
1478
+ print_exception : bool, default True
1479
+ Determines whether or not the exception is printed to
1480
+ stdout when caught.
1460
1481
  """
1461
1482
  ...
1462
1483
 
1463
1484
  @typing.overload
1464
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1485
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1465
1486
  ...
1466
1487
 
1467
1488
  @typing.overload
1468
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1489
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1469
1490
  ...
1470
1491
 
1471
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1492
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1472
1493
  """
1473
- Specifies the number of times the task corresponding
1474
- to a step needs to be retried.
1475
-
1476
- This decorator is useful for handling transient errors, such as networking issues.
1477
- If your task contains operations that can't be retried safely, e.g. database updates,
1478
- it is advisable to annotate it with `@retry(times=0)`.
1494
+ Specifies that the step will success under all circumstances.
1479
1495
 
1480
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1481
- decorator will execute a no-op task after all retries have been exhausted,
1482
- ensuring that the flow execution can continue.
1496
+ The decorator will create an optional artifact, specified by `var`, which
1497
+ contains the exception raised. You can use it to detect the presence
1498
+ of errors, indicating that all happy-path artifacts produced by the step
1499
+ are missing.
1483
1500
 
1484
1501
 
1485
1502
  Parameters
1486
1503
  ----------
1487
- times : int, default 3
1488
- Number of times to retry this task.
1489
- minutes_between_retries : int, default 2
1490
- Number of minutes between retries.
1491
- """
1492
- ...
1493
-
1494
- @typing.overload
1495
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1496
- """
1497
- Decorator prototype for all step decorators. This function gets specialized
1498
- and imported for all decorators types by _import_plugin_decorators().
1499
- """
1500
- ...
1501
-
1502
- @typing.overload
1503
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1504
- ...
1505
-
1506
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1507
- """
1508
- Decorator prototype for all step decorators. This function gets specialized
1509
- and imported for all decorators types by _import_plugin_decorators().
1504
+ var : str, optional, default None
1505
+ Name of the artifact in which to store the caught exception.
1506
+ If not specified, the exception is not stored.
1507
+ print_exception : bool, default True
1508
+ Determines whether or not the exception is printed to
1509
+ stdout when caught.
1510
1510
  """
1511
1511
  ...
1512
1512
 
@@ -1624,104 +1624,165 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1624
1624
  """
1625
1625
  ...
1626
1626
 
1627
- @typing.overload
1628
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1627
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1629
1628
  """
1630
- Specifies the flow(s) that this flow depends on.
1631
-
1632
- ```
1633
- @trigger_on_finish(flow='FooFlow')
1634
- ```
1635
- or
1636
- ```
1637
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1638
- ```
1639
- This decorator respects the @project decorator and triggers the flow
1640
- when upstream runs within the same namespace complete successfully
1641
-
1642
- Additionally, you can specify project aware upstream flow dependencies
1643
- by specifying the fully qualified project_flow_name.
1644
- ```
1645
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1646
- ```
1647
- or
1648
- ```
1649
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1650
- ```
1629
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1630
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1651
1631
 
1652
- You can also specify just the project or project branch (other values will be
1653
- inferred from the current project or project branch):
1654
- ```
1655
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1656
- ```
1657
1632
 
1658
- Note that `branch` is typically one of:
1659
- - `prod`
1660
- - `user.bob`
1661
- - `test.my_experiment`
1662
- - `prod.staging`
1633
+ Parameters
1634
+ ----------
1635
+ timeout : int
1636
+ Time, in seconds before the task times out and fails. (Default: 3600)
1637
+ poke_interval : int
1638
+ Time in seconds that the job should wait in between each try. (Default: 60)
1639
+ mode : str
1640
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1641
+ exponential_backoff : bool
1642
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1643
+ pool : str
1644
+ the slot pool this task should run in,
1645
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1646
+ soft_fail : bool
1647
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1648
+ name : str
1649
+ Name of the sensor on Airflow
1650
+ description : str
1651
+ Description of sensor in the Airflow UI
1652
+ external_dag_id : str
1653
+ The dag_id that contains the task you want to wait for.
1654
+ external_task_ids : List[str]
1655
+ The list of task_ids that you want to wait for.
1656
+ If None (default value) the sensor waits for the DAG. (Default: None)
1657
+ allowed_states : List[str]
1658
+ Iterable of allowed states, (Default: ['success'])
1659
+ failed_states : List[str]
1660
+ Iterable of failed or dis-allowed states. (Default: None)
1661
+ execution_delta : datetime.timedelta
1662
+ time difference with the previous execution to look at,
1663
+ the default is the same logical date as the current task or DAG. (Default: None)
1664
+ check_existence: bool
1665
+ Set to True to check if the external task exists or check if
1666
+ the DAG to wait for exists. (Default: True)
1667
+ """
1668
+ ...
1669
+
1670
+ @typing.overload
1671
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1672
+ """
1673
+ Specifies the PyPI packages for all steps of the flow.
1663
1674
 
1675
+ Use `@pypi_base` to set common packages required by all
1676
+ steps and use `@pypi` to specify step-specific overrides.
1664
1677
 
1665
1678
  Parameters
1666
1679
  ----------
1667
- flow : Union[str, Dict[str, str]], optional, default None
1668
- Upstream flow dependency for this flow.
1669
- flows : List[Union[str, Dict[str, str]]], default []
1670
- Upstream flow dependencies for this flow.
1671
- options : Dict[str, Any], default {}
1672
- Backend-specific configuration for tuning eventing behavior.
1680
+ packages : Dict[str, str], default: {}
1681
+ Packages to use for this flow. The key is the name of the package
1682
+ and the value is the version to use.
1683
+ python : str, optional, default: None
1684
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1685
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1673
1686
  """
1674
1687
  ...
1675
1688
 
1676
1689
  @typing.overload
1677
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1690
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1678
1691
  ...
1679
1692
 
1680
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1693
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1681
1694
  """
1682
- Specifies the flow(s) that this flow depends on.
1695
+ Specifies the PyPI packages for all steps of the flow.
1683
1696
 
1684
- ```
1685
- @trigger_on_finish(flow='FooFlow')
1686
- ```
1687
- or
1688
- ```
1689
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1690
- ```
1691
- This decorator respects the @project decorator and triggers the flow
1692
- when upstream runs within the same namespace complete successfully
1697
+ Use `@pypi_base` to set common packages required by all
1698
+ steps and use `@pypi` to specify step-specific overrides.
1693
1699
 
1694
- Additionally, you can specify project aware upstream flow dependencies
1695
- by specifying the fully qualified project_flow_name.
1696
- ```
1697
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1698
- ```
1699
- or
1700
- ```
1701
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1702
- ```
1700
+ Parameters
1701
+ ----------
1702
+ packages : Dict[str, str], default: {}
1703
+ Packages to use for this flow. The key is the name of the package
1704
+ and the value is the version to use.
1705
+ python : str, optional, default: None
1706
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1707
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1708
+ """
1709
+ ...
1710
+
1711
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1712
+ """
1713
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1714
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1715
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1716
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1717
+ starts only after all sensors finish.
1703
1718
 
1704
- You can also specify just the project or project branch (other values will be
1705
- inferred from the current project or project branch):
1706
- ```
1707
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1708
- ```
1709
1719
 
1710
- Note that `branch` is typically one of:
1711
- - `prod`
1712
- - `user.bob`
1713
- - `test.my_experiment`
1714
- - `prod.staging`
1720
+ Parameters
1721
+ ----------
1722
+ timeout : int
1723
+ Time, in seconds before the task times out and fails. (Default: 3600)
1724
+ poke_interval : int
1725
+ Time in seconds that the job should wait in between each try. (Default: 60)
1726
+ mode : str
1727
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1728
+ exponential_backoff : bool
1729
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1730
+ pool : str
1731
+ the slot pool this task should run in,
1732
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1733
+ soft_fail : bool
1734
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1735
+ name : str
1736
+ Name of the sensor on Airflow
1737
+ description : str
1738
+ Description of sensor in the Airflow UI
1739
+ bucket_key : Union[str, List[str]]
1740
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1741
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1742
+ bucket_name : str
1743
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1744
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1745
+ wildcard_match : bool
1746
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1747
+ aws_conn_id : str
1748
+ a reference to the s3 connection on Airflow. (Default: None)
1749
+ verify : bool
1750
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1751
+ """
1752
+ ...
1753
+
1754
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1755
+ """
1756
+ Specifies what flows belong to the same project.
1757
+
1758
+ A project-specific namespace is created for all flows that
1759
+ use the same `@project(name)`.
1715
1760
 
1716
1761
 
1717
1762
  Parameters
1718
1763
  ----------
1719
- flow : Union[str, Dict[str, str]], optional, default None
1720
- Upstream flow dependency for this flow.
1721
- flows : List[Union[str, Dict[str, str]]], default []
1722
- Upstream flow dependencies for this flow.
1723
- options : Dict[str, Any], default {}
1724
- Backend-specific configuration for tuning eventing behavior.
1764
+ name : str
1765
+ Project name. Make sure that the name is unique amongst all
1766
+ projects that use the same production scheduler. The name may
1767
+ contain only lowercase alphanumeric characters and underscores.
1768
+
1769
+ branch : Optional[str], default None
1770
+ The branch to use. If not specified, the branch is set to
1771
+ `user.<username>` unless `production` is set to `True`. This can
1772
+ also be set on the command line using `--branch` as a top-level option.
1773
+ It is an error to specify `branch` in the decorator and on the command line.
1774
+
1775
+ production : bool, default False
1776
+ Whether or not the branch is the production branch. This can also be set on the
1777
+ command line using `--production` as a top-level option. It is an error to specify
1778
+ `production` in the decorator and on the command line.
1779
+ The project branch name will be:
1780
+ - if `branch` is specified:
1781
+ - if `production` is True: `prod.<branch>`
1782
+ - if `production` is False: `test.<branch>`
1783
+ - if `branch` is not specified:
1784
+ - if `production` is True: `prod`
1785
+ - if `production` is False: `user.<username>`
1725
1786
  """
1726
1787
  ...
1727
1788
 
@@ -1776,84 +1837,6 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1776
1837
  """
1777
1838
  ...
1778
1839
 
1779
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1780
- """
1781
- Specifies what flows belong to the same project.
1782
-
1783
- A project-specific namespace is created for all flows that
1784
- use the same `@project(name)`.
1785
-
1786
-
1787
- Parameters
1788
- ----------
1789
- name : str
1790
- Project name. Make sure that the name is unique amongst all
1791
- projects that use the same production scheduler. The name may
1792
- contain only lowercase alphanumeric characters and underscores.
1793
-
1794
- branch : Optional[str], default None
1795
- The branch to use. If not specified, the branch is set to
1796
- `user.<username>` unless `production` is set to `True`. This can
1797
- also be set on the command line using `--branch` as a top-level option.
1798
- It is an error to specify `branch` in the decorator and on the command line.
1799
-
1800
- production : bool, default False
1801
- Whether or not the branch is the production branch. This can also be set on the
1802
- command line using `--production` as a top-level option. It is an error to specify
1803
- `production` in the decorator and on the command line.
1804
- The project branch name will be:
1805
- - if `branch` is specified:
1806
- - if `production` is True: `prod.<branch>`
1807
- - if `production` is False: `test.<branch>`
1808
- - if `branch` is not specified:
1809
- - if `production` is True: `prod`
1810
- - if `production` is False: `user.<username>`
1811
- """
1812
- ...
1813
-
1814
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1815
- """
1816
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1817
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1818
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1819
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1820
- starts only after all sensors finish.
1821
-
1822
-
1823
- Parameters
1824
- ----------
1825
- timeout : int
1826
- Time, in seconds before the task times out and fails. (Default: 3600)
1827
- poke_interval : int
1828
- Time in seconds that the job should wait in between each try. (Default: 60)
1829
- mode : str
1830
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1831
- exponential_backoff : bool
1832
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1833
- pool : str
1834
- the slot pool this task should run in,
1835
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1836
- soft_fail : bool
1837
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1838
- name : str
1839
- Name of the sensor on Airflow
1840
- description : str
1841
- Description of sensor in the Airflow UI
1842
- bucket_key : Union[str, List[str]]
1843
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1844
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1845
- bucket_name : str
1846
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1847
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1848
- wildcard_match : bool
1849
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1850
- aws_conn_id : str
1851
- a reference to the s3 connection on Airflow. (Default: None)
1852
- verify : bool
1853
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1854
- """
1855
- ...
1856
-
1857
1840
  @typing.overload
1858
1841
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1859
1842
  """
@@ -1999,86 +1982,103 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1999
1982
  ...
2000
1983
 
2001
1984
  @typing.overload
2002
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1985
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
2003
1986
  """
2004
- Specifies the PyPI packages for all steps of the flow.
1987
+ Specifies the flow(s) that this flow depends on.
1988
+
1989
+ ```
1990
+ @trigger_on_finish(flow='FooFlow')
1991
+ ```
1992
+ or
1993
+ ```
1994
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1995
+ ```
1996
+ This decorator respects the @project decorator and triggers the flow
1997
+ when upstream runs within the same namespace complete successfully
1998
+
1999
+ Additionally, you can specify project aware upstream flow dependencies
2000
+ by specifying the fully qualified project_flow_name.
2001
+ ```
2002
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
2003
+ ```
2004
+ or
2005
+ ```
2006
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
2007
+ ```
2008
+
2009
+ You can also specify just the project or project branch (other values will be
2010
+ inferred from the current project or project branch):
2011
+ ```
2012
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
2013
+ ```
2014
+
2015
+ Note that `branch` is typically one of:
2016
+ - `prod`
2017
+ - `user.bob`
2018
+ - `test.my_experiment`
2019
+ - `prod.staging`
2005
2020
 
2006
- Use `@pypi_base` to set common packages required by all
2007
- steps and use `@pypi` to specify step-specific overrides.
2008
2021
 
2009
2022
  Parameters
2010
2023
  ----------
2011
- packages : Dict[str, str], default: {}
2012
- Packages to use for this flow. The key is the name of the package
2013
- and the value is the version to use.
2014
- python : str, optional, default: None
2015
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
2016
- that the version used will correspond to the version of the Python interpreter used to start the run.
2024
+ flow : Union[str, Dict[str, str]], optional, default None
2025
+ Upstream flow dependency for this flow.
2026
+ flows : List[Union[str, Dict[str, str]]], default []
2027
+ Upstream flow dependencies for this flow.
2028
+ options : Dict[str, Any], default {}
2029
+ Backend-specific configuration for tuning eventing behavior.
2017
2030
  """
2018
2031
  ...
2019
2032
 
2020
2033
  @typing.overload
2021
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
2034
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
2022
2035
  ...
2023
2036
 
2024
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
2037
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
2025
2038
  """
2026
- Specifies the PyPI packages for all steps of the flow.
2039
+ Specifies the flow(s) that this flow depends on.
2027
2040
 
2028
- Use `@pypi_base` to set common packages required by all
2029
- steps and use `@pypi` to specify step-specific overrides.
2041
+ ```
2042
+ @trigger_on_finish(flow='FooFlow')
2043
+ ```
2044
+ or
2045
+ ```
2046
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
2047
+ ```
2048
+ This decorator respects the @project decorator and triggers the flow
2049
+ when upstream runs within the same namespace complete successfully
2030
2050
 
2031
- Parameters
2032
- ----------
2033
- packages : Dict[str, str], default: {}
2034
- Packages to use for this flow. The key is the name of the package
2035
- and the value is the version to use.
2036
- python : str, optional, default: None
2037
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
2038
- that the version used will correspond to the version of the Python interpreter used to start the run.
2039
- """
2040
- ...
2041
-
2042
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
2043
- """
2044
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
2045
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
2051
+ Additionally, you can specify project aware upstream flow dependencies
2052
+ by specifying the fully qualified project_flow_name.
2053
+ ```
2054
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
2055
+ ```
2056
+ or
2057
+ ```
2058
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
2059
+ ```
2060
+
2061
+ You can also specify just the project or project branch (other values will be
2062
+ inferred from the current project or project branch):
2063
+ ```
2064
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
2065
+ ```
2066
+
2067
+ Note that `branch` is typically one of:
2068
+ - `prod`
2069
+ - `user.bob`
2070
+ - `test.my_experiment`
2071
+ - `prod.staging`
2046
2072
 
2047
2073
 
2048
2074
  Parameters
2049
2075
  ----------
2050
- timeout : int
2051
- Time, in seconds before the task times out and fails. (Default: 3600)
2052
- poke_interval : int
2053
- Time in seconds that the job should wait in between each try. (Default: 60)
2054
- mode : str
2055
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
2056
- exponential_backoff : bool
2057
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
2058
- pool : str
2059
- the slot pool this task should run in,
2060
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
2061
- soft_fail : bool
2062
- Set to true to mark the task as SKIPPED on failure. (Default: False)
2063
- name : str
2064
- Name of the sensor on Airflow
2065
- description : str
2066
- Description of sensor in the Airflow UI
2067
- external_dag_id : str
2068
- The dag_id that contains the task you want to wait for.
2069
- external_task_ids : List[str]
2070
- The list of task_ids that you want to wait for.
2071
- If None (default value) the sensor waits for the DAG. (Default: None)
2072
- allowed_states : List[str]
2073
- Iterable of allowed states, (Default: ['success'])
2074
- failed_states : List[str]
2075
- Iterable of failed or dis-allowed states. (Default: None)
2076
- execution_delta : datetime.timedelta
2077
- time difference with the previous execution to look at,
2078
- the default is the same logical date as the current task or DAG. (Default: None)
2079
- check_existence: bool
2080
- Set to True to check if the external task exists or check if
2081
- the DAG to wait for exists. (Default: True)
2076
+ flow : Union[str, Dict[str, str]], optional, default None
2077
+ Upstream flow dependency for this flow.
2078
+ flows : List[Union[str, Dict[str, str]]], default []
2079
+ Upstream flow dependencies for this flow.
2080
+ options : Dict[str, Any], default {}
2081
+ Backend-specific configuration for tuning eventing behavior.
2082
2082
  """
2083
2083
  ...
2084
2084