ob-metaflow-stubs 6.0.10.13__py2.py3-none-any.whl → 6.0.10.15__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-stubs might be problematic. Click here for more details.

Files changed (266) hide show
  1. metaflow-stubs/__init__.pyi +1134 -1134
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +3 -3
  8. metaflow-stubs/client/filecache.pyi +1 -1
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +1 -1
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +86 -86
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +2 -2
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +2 -2
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +1 -1
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/cards/__init__.pyi +1 -1
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/cards/hf_hub_card.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  64. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +1 -1
  65. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +2 -2
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +3 -3
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +1 -1
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +4 -4
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +1 -1
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +3 -3
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +3 -3
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +2 -2
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  110. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  111. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  112. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  113. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  114. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  116. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  117. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +1 -1
  118. metaflow-stubs/multicore_utils.pyi +1 -1
  119. metaflow-stubs/ob_internal.pyi +1 -1
  120. metaflow-stubs/packaging_sys/__init__.pyi +4 -4
  121. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  122. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  123. metaflow-stubs/packaging_sys/tar_backend.pyi +4 -4
  124. metaflow-stubs/packaging_sys/utils.pyi +1 -1
  125. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  126. metaflow-stubs/parameters.pyi +3 -3
  127. metaflow-stubs/plugins/__init__.pyi +13 -13
  128. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  129. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  130. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  131. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  132. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  133. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  134. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  135. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  136. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  137. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  138. metaflow-stubs/plugins/argo/argo_workflows.pyi +1 -1
  139. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  140. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  141. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  142. metaflow-stubs/plugins/argo/exit_hooks.pyi +1 -1
  143. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  144. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  145. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  146. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  147. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  148. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  149. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  150. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  151. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  152. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  153. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  154. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  155. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  157. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  158. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +1 -1
  159. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  160. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  161. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  162. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  163. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  164. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  165. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  166. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  167. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  168. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  169. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  170. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  171. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  172. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  173. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  174. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  175. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  176. metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +2 -2
  177. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  178. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  179. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  180. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  181. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  182. metaflow-stubs/plugins/catch_decorator.pyi +1 -1
  183. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  184. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  185. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  186. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  187. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  188. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  189. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  190. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  191. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  192. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  193. metaflow-stubs/plugins/exit_hook/__init__.pyi +1 -1
  194. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +1 -1
  195. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  196. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  197. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  198. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  199. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  200. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  201. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  202. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  203. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  204. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  205. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  206. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  207. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  208. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  209. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  210. metaflow-stubs/plugins/ollama/__init__.pyi +1 -1
  211. metaflow-stubs/plugins/optuna/__init__.pyi +1 -1
  212. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  213. metaflow-stubs/plugins/parsers.pyi +1 -1
  214. metaflow-stubs/plugins/perimeters.pyi +1 -1
  215. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  216. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  217. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  218. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  219. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  220. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  221. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  222. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  223. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  224. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  225. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  226. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  227. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  228. metaflow-stubs/plugins/secrets/secrets_func.pyi +1 -1
  229. metaflow-stubs/plugins/secrets/secrets_spec.pyi +1 -1
  230. metaflow-stubs/plugins/secrets/utils.pyi +1 -1
  231. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  232. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  233. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  234. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  235. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  236. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  237. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  238. metaflow-stubs/profilers/__init__.pyi +1 -1
  239. metaflow-stubs/pylint_wrapper.pyi +1 -1
  240. metaflow-stubs/runner/__init__.pyi +1 -1
  241. metaflow-stubs/runner/deployer.pyi +31 -31
  242. metaflow-stubs/runner/deployer_impl.pyi +1 -1
  243. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  244. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  245. metaflow-stubs/runner/nbrun.pyi +1 -1
  246. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  247. metaflow-stubs/runner/utils.pyi +2 -2
  248. metaflow-stubs/system/__init__.pyi +1 -1
  249. metaflow-stubs/system/system_logger.pyi +1 -1
  250. metaflow-stubs/system/system_monitor.pyi +1 -1
  251. metaflow-stubs/tagging_util.pyi +1 -1
  252. metaflow-stubs/tuple_util.pyi +1 -1
  253. metaflow-stubs/user_configs/__init__.pyi +1 -1
  254. metaflow-stubs/user_configs/config_options.pyi +2 -2
  255. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  256. metaflow-stubs/user_decorators/__init__.pyi +1 -1
  257. metaflow-stubs/user_decorators/common.pyi +1 -1
  258. metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
  259. metaflow-stubs/user_decorators/mutable_step.pyi +4 -4
  260. metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
  261. metaflow-stubs/user_decorators/user_step_decorator.pyi +4 -4
  262. {ob_metaflow_stubs-6.0.10.13.dist-info → ob_metaflow_stubs-6.0.10.15.dist-info}/METADATA +1 -1
  263. ob_metaflow_stubs-6.0.10.15.dist-info/RECORD +266 -0
  264. ob_metaflow_stubs-6.0.10.13.dist-info/RECORD +0 -266
  265. {ob_metaflow_stubs-6.0.10.13.dist-info → ob_metaflow_stubs-6.0.10.15.dist-info}/WHEEL +0 -0
  266. {ob_metaflow_stubs-6.0.10.13.dist-info → ob_metaflow_stubs-6.0.10.15.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.18.10.1+obcheckpoint(0.2.8);ob(v1) #
4
- # Generated on 2025-10-02T16:55:23.853861 #
4
+ # Generated on 2025-10-09T09:15:42.339267 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -39,18 +39,18 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import tuple_util as tuple_util
43
- from . import cards as cards
44
42
  from . import metaflow_git as metaflow_git
43
+ from . import cards as cards
44
+ from . import tuple_util as tuple_util
45
45
  from . import events as events
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
- from .plugins.parsers import yaml_parser as yaml_parser
52
51
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
52
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
+ from .plugins.parsers import yaml_parser as yaml_parser
54
54
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
55
55
  from . import client as client
56
56
  from .client.core import namespace as namespace
@@ -170,410 +170,301 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
170
170
  ...
171
171
 
172
172
  @typing.overload
173
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
173
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
174
174
  """
175
- Specifies the number of times the task corresponding
176
- to a step needs to be retried.
175
+ Specifies the resources needed when executing this step.
177
176
 
178
- This decorator is useful for handling transient errors, such as networking issues.
179
- If your task contains operations that can't be retried safely, e.g. database updates,
180
- it is advisable to annotate it with `@retry(times=0)`.
177
+ Use `@resources` to specify the resource requirements
178
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
181
179
 
182
- This can be used in conjunction with the `@catch` decorator. The `@catch`
183
- decorator will execute a no-op task after all retries have been exhausted,
184
- ensuring that the flow execution can continue.
180
+ You can choose the compute layer on the command line by executing e.g.
181
+ ```
182
+ python myflow.py run --with batch
183
+ ```
184
+ or
185
+ ```
186
+ python myflow.py run --with kubernetes
187
+ ```
188
+ which executes the flow on the desired system using the
189
+ requirements specified in `@resources`.
185
190
 
186
191
 
187
192
  Parameters
188
193
  ----------
189
- times : int, default 3
190
- Number of times to retry this task.
191
- minutes_between_retries : int, default 2
192
- Number of minutes between retries.
194
+ cpu : int, default 1
195
+ Number of CPUs required for this step.
196
+ gpu : int, optional, default None
197
+ Number of GPUs required for this step.
198
+ disk : int, optional, default None
199
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
200
+ memory : int, default 4096
201
+ Memory size (in MB) required for this step.
202
+ shared_memory : int, optional, default None
203
+ The value for the size (in MiB) of the /dev/shm volume for this step.
204
+ This parameter maps to the `--shm-size` option in Docker.
193
205
  """
194
206
  ...
195
207
 
196
208
  @typing.overload
197
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
209
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
198
210
  ...
199
211
 
200
212
  @typing.overload
201
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
213
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
202
214
  ...
203
215
 
204
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
216
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
205
217
  """
206
- Specifies the number of times the task corresponding
207
- to a step needs to be retried.
218
+ Specifies the resources needed when executing this step.
208
219
 
209
- This decorator is useful for handling transient errors, such as networking issues.
210
- If your task contains operations that can't be retried safely, e.g. database updates,
211
- it is advisable to annotate it with `@retry(times=0)`.
220
+ Use `@resources` to specify the resource requirements
221
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
212
222
 
213
- This can be used in conjunction with the `@catch` decorator. The `@catch`
214
- decorator will execute a no-op task after all retries have been exhausted,
215
- ensuring that the flow execution can continue.
223
+ You can choose the compute layer on the command line by executing e.g.
224
+ ```
225
+ python myflow.py run --with batch
226
+ ```
227
+ or
228
+ ```
229
+ python myflow.py run --with kubernetes
230
+ ```
231
+ which executes the flow on the desired system using the
232
+ requirements specified in `@resources`.
216
233
 
217
234
 
218
235
  Parameters
219
236
  ----------
220
- times : int, default 3
221
- Number of times to retry this task.
222
- minutes_between_retries : int, default 2
223
- Number of minutes between retries.
237
+ cpu : int, default 1
238
+ Number of CPUs required for this step.
239
+ gpu : int, optional, default None
240
+ Number of GPUs required for this step.
241
+ disk : int, optional, default None
242
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
243
+ memory : int, default 4096
244
+ Memory size (in MB) required for this step.
245
+ shared_memory : int, optional, default None
246
+ The value for the size (in MiB) of the /dev/shm volume for this step.
247
+ This parameter maps to the `--shm-size` option in Docker.
224
248
  """
225
249
  ...
226
250
 
227
251
  @typing.overload
228
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
252
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
229
253
  """
230
- Enables loading / saving of models within a step.
231
-
232
- > Examples
233
- - Saving Models
234
- ```python
235
- @model
236
- @step
237
- def train(self):
238
- # current.model.save returns a dictionary reference to the model saved
239
- self.my_model = current.model.save(
240
- path_to_my_model,
241
- label="my_model",
242
- metadata={
243
- "epochs": 10,
244
- "batch-size": 32,
245
- "learning-rate": 0.001,
246
- }
247
- )
248
- self.next(self.test)
249
-
250
- @model(load="my_model")
251
- @step
252
- def test(self):
253
- # `current.model.loaded` returns a dictionary of the loaded models
254
- # where the key is the name of the artifact and the value is the path to the model
255
- print(os.listdir(current.model.loaded["my_model"]))
256
- self.next(self.end)
257
- ```
254
+ Specifies the Conda environment for the step.
258
255
 
259
- - Loading models
260
- ```python
261
- @step
262
- def train(self):
263
- # current.model.load returns the path to the model loaded
264
- checkpoint_path = current.model.load(
265
- self.checkpoint_key,
266
- )
267
- model_path = current.model.load(
268
- self.model,
269
- )
270
- self.next(self.test)
271
- ```
256
+ Information in this decorator will augment any
257
+ attributes set in the `@conda_base` flow-level decorator. Hence,
258
+ you can use `@conda_base` to set packages required by all
259
+ steps and use `@conda` to specify step-specific overrides.
272
260
 
273
261
 
274
262
  Parameters
275
263
  ----------
276
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
277
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
278
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
279
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
280
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
281
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
282
-
283
- temp_dir_root : str, default: None
284
- The root directory under which `current.model.loaded` will store loaded models
264
+ packages : Dict[str, str], default {}
265
+ Packages to use for this step. The key is the name of the package
266
+ and the value is the version to use.
267
+ libraries : Dict[str, str], default {}
268
+ Supported for backward compatibility. When used with packages, packages will take precedence.
269
+ python : str, optional, default None
270
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
271
+ that the version used will correspond to the version of the Python interpreter used to start the run.
272
+ disabled : bool, default False
273
+ If set to True, disables @conda.
285
274
  """
286
275
  ...
287
276
 
288
277
  @typing.overload
289
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
278
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
290
279
  ...
291
280
 
292
281
  @typing.overload
293
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
282
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
294
283
  ...
295
284
 
296
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
285
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
297
286
  """
298
- Enables loading / saving of models within a step.
299
-
300
- > Examples
301
- - Saving Models
302
- ```python
303
- @model
304
- @step
305
- def train(self):
306
- # current.model.save returns a dictionary reference to the model saved
307
- self.my_model = current.model.save(
308
- path_to_my_model,
309
- label="my_model",
310
- metadata={
311
- "epochs": 10,
312
- "batch-size": 32,
313
- "learning-rate": 0.001,
314
- }
315
- )
316
- self.next(self.test)
317
-
318
- @model(load="my_model")
319
- @step
320
- def test(self):
321
- # `current.model.loaded` returns a dictionary of the loaded models
322
- # where the key is the name of the artifact and the value is the path to the model
323
- print(os.listdir(current.model.loaded["my_model"]))
324
- self.next(self.end)
325
- ```
287
+ Specifies the Conda environment for the step.
326
288
 
327
- - Loading models
328
- ```python
329
- @step
330
- def train(self):
331
- # current.model.load returns the path to the model loaded
332
- checkpoint_path = current.model.load(
333
- self.checkpoint_key,
334
- )
335
- model_path = current.model.load(
336
- self.model,
337
- )
338
- self.next(self.test)
339
- ```
289
+ Information in this decorator will augment any
290
+ attributes set in the `@conda_base` flow-level decorator. Hence,
291
+ you can use `@conda_base` to set packages required by all
292
+ steps and use `@conda` to specify step-specific overrides.
340
293
 
341
294
 
342
295
  Parameters
343
296
  ----------
344
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
345
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
346
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
347
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
348
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
349
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
350
-
351
- temp_dir_root : str, default: None
352
- The root directory under which `current.model.loaded` will store loaded models
353
- """
354
- ...
355
-
356
- @typing.overload
357
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
358
- """
359
- Decorator prototype for all step decorators. This function gets specialized
360
- and imported for all decorators types by _import_plugin_decorators().
361
- """
362
- ...
363
-
364
- @typing.overload
365
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
366
- ...
367
-
368
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
369
- """
370
- Decorator prototype for all step decorators. This function gets specialized
371
- and imported for all decorators types by _import_plugin_decorators().
297
+ packages : Dict[str, str], default {}
298
+ Packages to use for this step. The key is the name of the package
299
+ and the value is the version to use.
300
+ libraries : Dict[str, str], default {}
301
+ Supported for backward compatibility. When used with packages, packages will take precedence.
302
+ python : str, optional, default None
303
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
304
+ that the version used will correspond to the version of the Python interpreter used to start the run.
305
+ disabled : bool, default False
306
+ If set to True, disables @conda.
372
307
  """
373
308
  ...
374
309
 
375
- @typing.overload
376
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
310
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
377
311
  """
378
- Specifies environment variables to be set prior to the execution of a step.
312
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
379
313
 
314
+ User code call
315
+ --------------
316
+ @ollama(
317
+ models=[...],
318
+ ...
319
+ )
380
320
 
381
- Parameters
382
- ----------
383
- vars : Dict[str, str], default {}
384
- Dictionary of environment variables to set.
385
- """
386
- ...
387
-
388
- @typing.overload
389
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
390
- ...
391
-
392
- @typing.overload
393
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
394
- ...
395
-
396
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
397
- """
398
- Specifies environment variables to be set prior to the execution of a step.
321
+ Valid backend options
322
+ ---------------------
323
+ - 'local': Run as a separate process on the local task machine.
324
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
325
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
326
+
327
+ Valid model options
328
+ -------------------
329
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
399
330
 
400
331
 
401
332
  Parameters
402
333
  ----------
403
- vars : Dict[str, str], default {}
404
- Dictionary of environment variables to set.
334
+ models: list[str]
335
+ List of Ollama containers running models in sidecars.
336
+ backend: str
337
+ Determines where and how to run the Ollama process.
338
+ force_pull: bool
339
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
340
+ cache_update_policy: str
341
+ Cache update policy: "auto", "force", or "never".
342
+ force_cache_update: bool
343
+ Simple override for "force" cache update policy.
344
+ debug: bool
345
+ Whether to turn on verbose debugging logs.
346
+ circuit_breaker_config: dict
347
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
348
+ timeout_config: dict
349
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
405
350
  """
406
351
  ...
407
352
 
408
- @typing.overload
409
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
353
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
410
354
  """
411
- Specifies the Conda environment for the step.
355
+ Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
356
+ for S3 read and write requests.
412
357
 
413
- Information in this decorator will augment any
414
- attributes set in the `@conda_base` flow-level decorator. Hence,
415
- you can use `@conda_base` to set packages required by all
416
- steps and use `@conda` to specify step-specific overrides.
358
+ This decorator requires an integration in the Outerbounds platform that
359
+ points to an external bucket. It affects S3 operations performed via
360
+ Metaflow's `get_aws_client` and `S3` within a `@step`.
417
361
 
362
+ Read operations
363
+ ---------------
364
+ All read operations pass through the proxy. If an object does not already
365
+ exist in the external bucket, it is cached there. For example, if code reads
366
+ from buckets `FOO` and `BAR` using the `S3` interface, objects from both
367
+ buckets are cached in the external bucket.
418
368
 
419
- Parameters
420
- ----------
421
- packages : Dict[str, str], default {}
422
- Packages to use for this step. The key is the name of the package
423
- and the value is the version to use.
424
- libraries : Dict[str, str], default {}
425
- Supported for backward compatibility. When used with packages, packages will take precedence.
426
- python : str, optional, default None
427
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
428
- that the version used will correspond to the version of the Python interpreter used to start the run.
429
- disabled : bool, default False
430
- If set to True, disables @conda.
431
- """
432
- ...
433
-
434
- @typing.overload
435
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
436
- ...
437
-
438
- @typing.overload
439
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
440
- ...
441
-
442
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
443
- """
444
- Specifies the Conda environment for the step.
369
+ During task execution, all S3‑related read requests are routed through the
370
+ proxy:
371
+ - If the object is present in the external object store, the proxy
372
+ streams it directly from there without accessing the requested origin
373
+ bucket.
374
+ - If the object is not present in the external storage, the proxy
375
+ fetches it from the requested bucket, caches it in the external
376
+ storage, and streams the response from the origin bucket.
445
377
 
446
- Information in this decorator will augment any
447
- attributes set in the `@conda_base` flow-level decorator. Hence,
448
- you can use `@conda_base` to set packages required by all
449
- steps and use `@conda` to specify step-specific overrides.
378
+ Warning
379
+ -------
380
+ All READ operations (e.g., GetObject, HeadObject) pass through the external
381
+ bucket regardless of the bucket specified in user code. Even
382
+ `S3(run=self)` and `S3(s3root="mybucketfoo")` requests go through the
383
+ external bucket cache.
450
384
 
385
+ Write operations
386
+ ----------------
387
+ Write behavior is controlled by the `write_mode` parameter, which determines
388
+ whether writes also persist objects in the cache.
451
389
 
452
- Parameters
453
- ----------
454
- packages : Dict[str, str], default {}
455
- Packages to use for this step. The key is the name of the package
456
- and the value is the version to use.
457
- libraries : Dict[str, str], default {}
458
- Supported for backward compatibility. When used with packages, packages will take precedence.
459
- python : str, optional, default None
460
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
461
- that the version used will correspond to the version of the Python interpreter used to start the run.
462
- disabled : bool, default False
463
- If set to True, disables @conda.
464
- """
465
- ...
466
-
467
- @typing.overload
468
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
469
- """
470
- Specifies secrets to be retrieved and injected as environment variables prior to
471
- the execution of a step.
390
+ `write_mode` values:
391
+ - `origin-and-cache`: objects are written both to the cache and to their
392
+ intended origin bucket.
393
+ - `origin`: objects are written only to their intended origin bucket.
472
394
 
473
395
 
474
396
  Parameters
475
397
  ----------
476
- sources : List[Union[str, Dict[str, Any]]], default: []
477
- List of secret specs, defining how the secrets are to be retrieved
478
- role : str, optional, default: None
479
- Role to use for fetching secrets
398
+ integration_name : str, optional
399
+ [Outerbounds integration name](https://docs.outerbounds.com/outerbounds/configuring-secrets/#integrations-view)
400
+ that holds the configuration for the external, S3‑compatible object
401
+ storage bucket. If not specified, the only available S3 proxy
402
+ integration in the namespace is used (fails if multiple exist).
403
+ write_mode : str, optional
404
+ Controls whether writes also go to the external bucket.
405
+ - `origin` (default)
406
+ - `origin-and-cache`
407
+ debug : bool, optional
408
+ Enables debug logging for proxy operations.
480
409
  """
481
410
  ...
482
411
 
483
412
  @typing.overload
484
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
485
- ...
486
-
487
- @typing.overload
488
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
489
- ...
490
-
491
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
413
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
492
414
  """
493
- Specifies secrets to be retrieved and injected as environment variables prior to
494
- the execution of a step.
415
+ Creates a human-readable report, a Metaflow Card, after this step completes.
416
+
417
+ Note that you may add multiple `@card` decorators in a step with different parameters.
495
418
 
496
419
 
497
420
  Parameters
498
421
  ----------
499
- sources : List[Union[str, Dict[str, Any]]], default: []
500
- List of secret specs, defining how the secrets are to be retrieved
501
- role : str, optional, default: None
502
- Role to use for fetching secrets
422
+ type : str, default 'default'
423
+ Card type.
424
+ id : str, optional, default None
425
+ If multiple cards are present, use this id to identify this card.
426
+ options : Dict[str, Any], default {}
427
+ Options passed to the card. The contents depend on the card type.
428
+ timeout : int, default 45
429
+ Interrupt reporting if it takes more than this many seconds.
503
430
  """
504
431
  ...
505
432
 
506
433
  @typing.overload
507
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
508
- """
509
- Decorator prototype for all step decorators. This function gets specialized
510
- and imported for all decorators types by _import_plugin_decorators().
511
- """
434
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
512
435
  ...
513
436
 
514
437
  @typing.overload
515
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
516
- ...
517
-
518
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
519
- """
520
- Decorator prototype for all step decorators. This function gets specialized
521
- and imported for all decorators types by _import_plugin_decorators().
522
- """
438
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
523
439
  ...
524
440
 
525
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
441
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
526
442
  """
527
- This decorator is used to run vllm APIs as Metaflow task sidecars.
528
-
529
- User code call
530
- --------------
531
- @vllm(
532
- model="...",
533
- ...
534
- )
535
-
536
- Valid backend options
537
- ---------------------
538
- - 'local': Run as a separate process on the local task machine.
539
-
540
- Valid model options
541
- -------------------
542
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
443
+ Creates a human-readable report, a Metaflow Card, after this step completes.
543
444
 
544
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
545
- If you need multiple models, you must create multiple @vllm decorators.
445
+ Note that you may add multiple `@card` decorators in a step with different parameters.
546
446
 
547
447
 
548
448
  Parameters
549
449
  ----------
550
- model: str
551
- HuggingFace model identifier to be served by vLLM.
552
- backend: str
553
- Determines where and how to run the vLLM process.
554
- openai_api_server: bool
555
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
556
- Default is False (uses native engine).
557
- Set to True for backward compatibility with existing code.
558
- debug: bool
559
- Whether to turn on verbose debugging logs.
560
- card_refresh_interval: int
561
- Interval in seconds for refreshing the vLLM status card.
562
- Only used when openai_api_server=True.
563
- max_retries: int
564
- Maximum number of retries checking for vLLM server startup.
565
- Only used when openai_api_server=True.
566
- retry_alert_frequency: int
567
- Frequency of alert logs for vLLM server startup retries.
568
- Only used when openai_api_server=True.
569
- engine_args : dict
570
- Additional keyword arguments to pass to the vLLM engine.
571
- For example, `tensor_parallel_size=2`.
450
+ type : str, default 'default'
451
+ Card type.
452
+ id : str, optional, default None
453
+ If multiple cards are present, use this id to identify this card.
454
+ options : Dict[str, Any], default {}
455
+ Options passed to the card. The contents depend on the card type.
456
+ timeout : int, default 45
457
+ Interrupt reporting if it takes more than this many seconds.
572
458
  """
573
459
  ...
574
460
 
575
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
461
+ def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
576
462
  """
463
+ `@nebius_s3_proxy` is a Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
464
+ It exists to make it easier for users to know that this decorator should only be used with
465
+ a Neo Cloud like Nebius. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
466
+
467
+
577
468
  Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
578
469
  for S3 read and write requests.
579
470
 
@@ -632,19 +523,41 @@ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typin
632
523
  ...
633
524
 
634
525
  @typing.overload
635
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
636
- """
637
- Internal decorator to support Fast bakery
526
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
638
527
  """
639
- ...
528
+ Specifies secrets to be retrieved and injected as environment variables prior to
529
+ the execution of a step.
530
+
531
+
532
+ Parameters
533
+ ----------
534
+ sources : List[Union[str, Dict[str, Any]]], default: []
535
+ List of secret specs, defining how the secrets are to be retrieved
536
+ role : str, optional, default: None
537
+ Role to use for fetching secrets
538
+ """
539
+ ...
640
540
 
641
541
  @typing.overload
642
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
542
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
643
543
  ...
644
544
 
645
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
545
+ @typing.overload
546
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
547
+ ...
548
+
549
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
646
550
  """
647
- Internal decorator to support Fast bakery
551
+ Specifies secrets to be retrieved and injected as environment variables prior to
552
+ the execution of a step.
553
+
554
+
555
+ Parameters
556
+ ----------
557
+ sources : List[Union[str, Dict[str, Any]]], default: []
558
+ List of secret specs, defining how the secrets are to be retrieved
559
+ role : str, optional, default: None
560
+ Role to use for fetching secrets
648
561
  """
649
562
  ...
650
563
 
@@ -765,198 +678,193 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope:
765
678
  """
766
679
  ...
767
680
 
681
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
682
+ """
683
+ Specifies that this step should execute on DGX cloud.
684
+
685
+
686
+ Parameters
687
+ ----------
688
+ gpu : int
689
+ Number of GPUs to use.
690
+ gpu_type : str
691
+ Type of Nvidia GPU to use.
692
+ queue_timeout : int
693
+ Time to keep the job in NVCF's queue.
694
+ """
695
+ ...
696
+
768
697
  @typing.overload
769
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
698
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
770
699
  """
771
- Specifies the resources needed when executing this step.
700
+ Enables checkpointing for a step.
772
701
 
773
- Use `@resources` to specify the resource requirements
774
- independently of the specific compute layer (`@batch`, `@kubernetes`).
702
+ > Examples
775
703
 
776
- You can choose the compute layer on the command line by executing e.g.
777
- ```
778
- python myflow.py run --with batch
779
- ```
780
- or
704
+ - Saving Checkpoints
705
+
706
+ ```python
707
+ @checkpoint
708
+ @step
709
+ def train(self):
710
+ model = create_model(self.parameters, checkpoint_path = None)
711
+ for i in range(self.epochs):
712
+ # some training logic
713
+ loss = model.train(self.dataset)
714
+ if i % 10 == 0:
715
+ model.save(
716
+ current.checkpoint.directory,
717
+ )
718
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
719
+ # and returns a reference dictionary to the checkpoint saved in the datastore
720
+ self.latest_checkpoint = current.checkpoint.save(
721
+ name="epoch_checkpoint",
722
+ metadata={
723
+ "epoch": i,
724
+ "loss": loss,
725
+ }
726
+ )
781
727
  ```
782
- python myflow.py run --with kubernetes
728
+
729
+ - Using Loaded Checkpoints
730
+
731
+ ```python
732
+ @retry(times=3)
733
+ @checkpoint
734
+ @step
735
+ def train(self):
736
+ # Assume that the task has restarted and the previous attempt of the task
737
+ # saved a checkpoint
738
+ checkpoint_path = None
739
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
740
+ print("Loaded checkpoint from the previous attempt")
741
+ checkpoint_path = current.checkpoint.directory
742
+
743
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
744
+ for i in range(self.epochs):
745
+ ...
783
746
  ```
784
- which executes the flow on the desired system using the
785
- requirements specified in `@resources`.
786
747
 
787
748
 
788
749
  Parameters
789
750
  ----------
790
- cpu : int, default 1
791
- Number of CPUs required for this step.
792
- gpu : int, optional, default None
793
- Number of GPUs required for this step.
794
- disk : int, optional, default None
795
- Disk size (in MB) required for this step. Only applies on Kubernetes.
796
- memory : int, default 4096
797
- Memory size (in MB) required for this step.
798
- shared_memory : int, optional, default None
799
- The value for the size (in MiB) of the /dev/shm volume for this step.
800
- This parameter maps to the `--shm-size` option in Docker.
751
+ load_policy : str, default: "fresh"
752
+ The policy for loading the checkpoint. The following policies are supported:
753
+ - "eager": Loads the the latest available checkpoint within the namespace.
754
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
755
+ will be loaded at the start of the task.
756
+ - "none": Do not load any checkpoint
757
+ - "fresh": Loads the lastest checkpoint created within the running Task.
758
+ This mode helps loading checkpoints across various retry attempts of the same task.
759
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
760
+ created within the task will be loaded when the task is retries execution on failure.
761
+
762
+ temp_dir_root : str, default: None
763
+ The root directory under which `current.checkpoint.directory` will be created.
801
764
  """
802
765
  ...
803
766
 
804
767
  @typing.overload
805
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
768
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
806
769
  ...
807
770
 
808
771
  @typing.overload
809
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
772
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
810
773
  ...
811
774
 
812
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
775
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
813
776
  """
814
- Specifies the resources needed when executing this step.
777
+ Enables checkpointing for a step.
815
778
 
816
- Use `@resources` to specify the resource requirements
817
- independently of the specific compute layer (`@batch`, `@kubernetes`).
779
+ > Examples
818
780
 
819
- You can choose the compute layer on the command line by executing e.g.
820
- ```
821
- python myflow.py run --with batch
822
- ```
823
- or
781
+ - Saving Checkpoints
782
+
783
+ ```python
784
+ @checkpoint
785
+ @step
786
+ def train(self):
787
+ model = create_model(self.parameters, checkpoint_path = None)
788
+ for i in range(self.epochs):
789
+ # some training logic
790
+ loss = model.train(self.dataset)
791
+ if i % 10 == 0:
792
+ model.save(
793
+ current.checkpoint.directory,
794
+ )
795
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
796
+ # and returns a reference dictionary to the checkpoint saved in the datastore
797
+ self.latest_checkpoint = current.checkpoint.save(
798
+ name="epoch_checkpoint",
799
+ metadata={
800
+ "epoch": i,
801
+ "loss": loss,
802
+ }
803
+ )
824
804
  ```
825
- python myflow.py run --with kubernetes
805
+
806
+ - Using Loaded Checkpoints
807
+
808
+ ```python
809
+ @retry(times=3)
810
+ @checkpoint
811
+ @step
812
+ def train(self):
813
+ # Assume that the task has restarted and the previous attempt of the task
814
+ # saved a checkpoint
815
+ checkpoint_path = None
816
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
817
+ print("Loaded checkpoint from the previous attempt")
818
+ checkpoint_path = current.checkpoint.directory
819
+
820
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
821
+ for i in range(self.epochs):
822
+ ...
826
823
  ```
827
- which executes the flow on the desired system using the
828
- requirements specified in `@resources`.
829
824
 
830
825
 
831
826
  Parameters
832
827
  ----------
833
- cpu : int, default 1
834
- Number of CPUs required for this step.
835
- gpu : int, optional, default None
836
- Number of GPUs required for this step.
837
- disk : int, optional, default None
838
- Disk size (in MB) required for this step. Only applies on Kubernetes.
839
- memory : int, default 4096
840
- Memory size (in MB) required for this step.
841
- shared_memory : int, optional, default None
842
- The value for the size (in MiB) of the /dev/shm volume for this step.
843
- This parameter maps to the `--shm-size` option in Docker.
828
+ load_policy : str, default: "fresh"
829
+ The policy for loading the checkpoint. The following policies are supported:
830
+ - "eager": Loads the the latest available checkpoint within the namespace.
831
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
832
+ will be loaded at the start of the task.
833
+ - "none": Do not load any checkpoint
834
+ - "fresh": Loads the lastest checkpoint created within the running Task.
835
+ This mode helps loading checkpoints across various retry attempts of the same task.
836
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
837
+ created within the task will be loaded when the task is retries execution on failure.
838
+
839
+ temp_dir_root : str, default: None
840
+ The root directory under which `current.checkpoint.directory` will be created.
844
841
  """
845
842
  ...
846
843
 
847
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
844
+ @typing.overload
845
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
848
846
  """
849
- Specifies that this step should execute on Kubernetes.
850
-
851
-
852
- Parameters
853
- ----------
854
- cpu : int, default 1
855
- Number of CPUs required for this step. If `@resources` is
856
- also present, the maximum value from all decorators is used.
857
- memory : int, default 4096
858
- Memory size (in MB) required for this step. If
859
- `@resources` is also present, the maximum value from all decorators is
860
- used.
861
- disk : int, default 10240
862
- Disk size (in MB) required for this step. If
863
- `@resources` is also present, the maximum value from all decorators is
864
- used.
865
- image : str, optional, default None
866
- Docker image to use when launching on Kubernetes. If not specified, and
867
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
868
- not, a default Docker image mapping to the current version of Python is used.
869
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
870
- If given, the imagePullPolicy to be applied to the Docker image of the step.
871
- image_pull_secrets: List[str], default []
872
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
873
- Kubernetes image pull secrets to use when pulling container images
874
- in Kubernetes.
875
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
876
- Kubernetes service account to use when launching pod in Kubernetes.
877
- secrets : List[str], optional, default None
878
- Kubernetes secrets to use when launching pod in Kubernetes. These
879
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
880
- in Metaflow configuration.
881
- node_selector: Union[Dict[str,str], str], optional, default None
882
- Kubernetes node selector(s) to apply to the pod running the task.
883
- Can be passed in as a comma separated string of values e.g.
884
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
885
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
886
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
887
- Kubernetes namespace to use when launching pod in Kubernetes.
888
- gpu : int, optional, default None
889
- Number of GPUs required for this step. A value of zero implies that
890
- the scheduled node should not have GPUs.
891
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
892
- The vendor of the GPUs to be used for this step.
893
- tolerations : List[Dict[str,str]], default []
894
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
895
- Kubernetes tolerations to use when launching pod in Kubernetes.
896
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
897
- Kubernetes labels to use when launching pod in Kubernetes.
898
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
899
- Kubernetes annotations to use when launching pod in Kubernetes.
900
- use_tmpfs : bool, default False
901
- This enables an explicit tmpfs mount for this step.
902
- tmpfs_tempdir : bool, default True
903
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
904
- tmpfs_size : int, optional, default: None
905
- The value for the size (in MiB) of the tmpfs mount for this step.
906
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
907
- memory allocated for this step.
908
- tmpfs_path : str, optional, default /metaflow_temp
909
- Path to tmpfs mount for this step.
910
- persistent_volume_claims : Dict[str, str], optional, default None
911
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
912
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
913
- shared_memory: int, optional
914
- Shared memory size (in MiB) required for this step
915
- port: int, optional
916
- Port number to specify in the Kubernetes job object
917
- compute_pool : str, optional, default None
918
- Compute pool to be used for for this step.
919
- If not specified, any accessible compute pool within the perimeter is used.
920
- hostname_resolution_timeout: int, default 10 * 60
921
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
922
- Only applicable when @parallel is used.
923
- qos: str, default: Burstable
924
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
925
-
926
- security_context: Dict[str, Any], optional, default None
927
- Container security context. Applies to the task container. Allows the following keys:
928
- - privileged: bool, optional, default None
929
- - allow_privilege_escalation: bool, optional, default None
930
- - run_as_user: int, optional, default None
931
- - run_as_group: int, optional, default None
932
- - run_as_non_root: bool, optional, default None
933
- """
934
- ...
935
-
936
- @typing.overload
937
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
938
- """
939
- A simple decorator that demonstrates using CardDecoratorInjector
940
- to inject a card and render simple markdown content.
941
- """
942
- ...
943
-
944
- @typing.overload
945
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
946
- ...
947
-
948
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
949
- """
950
- A simple decorator that demonstrates using CardDecoratorInjector
951
- to inject a card and render simple markdown content.
952
- """
953
- ...
954
-
955
- def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
956
- """
957
- `@coreweave_s3_proxy` is a CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
958
- It exists to make it easier for users to know that this decorator should only be used with
959
- a Neo Cloud like CoreWeave. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
847
+ A simple decorator that demonstrates using CardDecoratorInjector
848
+ to inject a card and render simple markdown content.
849
+ """
850
+ ...
851
+
852
+ @typing.overload
853
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
854
+ ...
855
+
856
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
857
+ """
858
+ A simple decorator that demonstrates using CardDecoratorInjector
859
+ to inject a card and render simple markdown content.
860
+ """
861
+ ...
862
+
863
+ def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
864
+ """
865
+ `@coreweave_s3_proxy` is a CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
866
+ It exists to make it easier for users to know that this decorator should only be used with
867
+ a Neo Cloud like CoreWeave. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
960
868
 
961
869
 
962
870
  Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
@@ -1016,641 +924,482 @@ def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_m
1016
924
  """
1017
925
  ...
1018
926
 
1019
- @typing.overload
1020
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
927
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1021
928
  """
1022
- Specifies a timeout for your step.
929
+ Specifies that this step should execute on DGX cloud.
1023
930
 
1024
- This decorator is useful if this step may hang indefinitely.
1025
931
 
1026
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1027
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1028
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
932
+ Parameters
933
+ ----------
934
+ gpu : int
935
+ Number of GPUs to use.
936
+ gpu_type : str
937
+ Type of Nvidia GPU to use.
938
+ """
939
+ ...
940
+
941
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
942
+ """
943
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
1029
944
 
1030
- Note that all the values specified in parameters are added together so if you specify
1031
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
945
+ User code call
946
+ --------------
947
+ @vllm(
948
+ model="...",
949
+ ...
950
+ )
951
+
952
+ Valid backend options
953
+ ---------------------
954
+ - 'local': Run as a separate process on the local task machine.
955
+
956
+ Valid model options
957
+ -------------------
958
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
959
+
960
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
961
+ If you need multiple models, you must create multiple @vllm decorators.
1032
962
 
1033
963
 
1034
964
  Parameters
1035
965
  ----------
1036
- seconds : int, default 0
1037
- Number of seconds to wait prior to timing out.
1038
- minutes : int, default 0
1039
- Number of minutes to wait prior to timing out.
1040
- hours : int, default 0
1041
- Number of hours to wait prior to timing out.
966
+ model: str
967
+ HuggingFace model identifier to be served by vLLM.
968
+ backend: str
969
+ Determines where and how to run the vLLM process.
970
+ openai_api_server: bool
971
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
972
+ Default is False (uses native engine).
973
+ Set to True for backward compatibility with existing code.
974
+ debug: bool
975
+ Whether to turn on verbose debugging logs.
976
+ card_refresh_interval: int
977
+ Interval in seconds for refreshing the vLLM status card.
978
+ Only used when openai_api_server=True.
979
+ max_retries: int
980
+ Maximum number of retries checking for vLLM server startup.
981
+ Only used when openai_api_server=True.
982
+ retry_alert_frequency: int
983
+ Frequency of alert logs for vLLM server startup retries.
984
+ Only used when openai_api_server=True.
985
+ engine_args : dict
986
+ Additional keyword arguments to pass to the vLLM engine.
987
+ For example, `tensor_parallel_size=2`.
1042
988
  """
1043
989
  ...
1044
990
 
1045
991
  @typing.overload
1046
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
992
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
993
+ """
994
+ Decorator prototype for all step decorators. This function gets specialized
995
+ and imported for all decorators types by _import_plugin_decorators().
996
+ """
1047
997
  ...
1048
998
 
1049
999
  @typing.overload
1050
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1000
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1051
1001
  ...
1052
1002
 
1053
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1003
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1054
1004
  """
1055
- Specifies a timeout for your step.
1056
-
1057
- This decorator is useful if this step may hang indefinitely.
1058
-
1059
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1060
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1061
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1062
-
1063
- Note that all the values specified in parameters are added together so if you specify
1064
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1065
-
1066
-
1067
- Parameters
1068
- ----------
1069
- seconds : int, default 0
1070
- Number of seconds to wait prior to timing out.
1071
- minutes : int, default 0
1072
- Number of minutes to wait prior to timing out.
1073
- hours : int, default 0
1074
- Number of hours to wait prior to timing out.
1005
+ Decorator prototype for all step decorators. This function gets specialized
1006
+ and imported for all decorators types by _import_plugin_decorators().
1075
1007
  """
1076
1008
  ...
1077
1009
 
1078
1010
  @typing.overload
1079
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1011
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1080
1012
  """
1081
- Creates a human-readable report, a Metaflow Card, after this step completes.
1013
+ Enables loading / saving of models within a step.
1082
1014
 
1083
- Note that you may add multiple `@card` decorators in a step with different parameters.
1015
+ > Examples
1016
+ - Saving Models
1017
+ ```python
1018
+ @model
1019
+ @step
1020
+ def train(self):
1021
+ # current.model.save returns a dictionary reference to the model saved
1022
+ self.my_model = current.model.save(
1023
+ path_to_my_model,
1024
+ label="my_model",
1025
+ metadata={
1026
+ "epochs": 10,
1027
+ "batch-size": 32,
1028
+ "learning-rate": 0.001,
1029
+ }
1030
+ )
1031
+ self.next(self.test)
1032
+
1033
+ @model(load="my_model")
1034
+ @step
1035
+ def test(self):
1036
+ # `current.model.loaded` returns a dictionary of the loaded models
1037
+ # where the key is the name of the artifact and the value is the path to the model
1038
+ print(os.listdir(current.model.loaded["my_model"]))
1039
+ self.next(self.end)
1040
+ ```
1041
+
1042
+ - Loading models
1043
+ ```python
1044
+ @step
1045
+ def train(self):
1046
+ # current.model.load returns the path to the model loaded
1047
+ checkpoint_path = current.model.load(
1048
+ self.checkpoint_key,
1049
+ )
1050
+ model_path = current.model.load(
1051
+ self.model,
1052
+ )
1053
+ self.next(self.test)
1054
+ ```
1084
1055
 
1085
1056
 
1086
1057
  Parameters
1087
1058
  ----------
1088
- type : str, default 'default'
1089
- Card type.
1090
- id : str, optional, default None
1091
- If multiple cards are present, use this id to identify this card.
1092
- options : Dict[str, Any], default {}
1093
- Options passed to the card. The contents depend on the card type.
1094
- timeout : int, default 45
1095
- Interrupt reporting if it takes more than this many seconds.
1059
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1060
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1061
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1062
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1063
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1064
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1065
+
1066
+ temp_dir_root : str, default: None
1067
+ The root directory under which `current.model.loaded` will store loaded models
1096
1068
  """
1097
1069
  ...
1098
1070
 
1099
1071
  @typing.overload
1100
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1072
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1101
1073
  ...
1102
1074
 
1103
1075
  @typing.overload
1104
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1076
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1105
1077
  ...
1106
1078
 
1107
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1108
- """
1109
- Creates a human-readable report, a Metaflow Card, after this step completes.
1110
-
1111
- Note that you may add multiple `@card` decorators in a step with different parameters.
1112
-
1113
-
1114
- Parameters
1115
- ----------
1116
- type : str, default 'default'
1117
- Card type.
1118
- id : str, optional, default None
1119
- If multiple cards are present, use this id to identify this card.
1120
- options : Dict[str, Any], default {}
1121
- Options passed to the card. The contents depend on the card type.
1122
- timeout : int, default 45
1123
- Interrupt reporting if it takes more than this many seconds.
1124
- """
1125
- ...
1126
-
1127
- def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1128
- """
1129
- `@nebius_s3_proxy` is a Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1130
- It exists to make it easier for users to know that this decorator should only be used with
1131
- a Neo Cloud like Nebius. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
1132
-
1133
-
1134
- Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
1135
- for S3 read and write requests.
1136
-
1137
- This decorator requires an integration in the Outerbounds platform that
1138
- points to an external bucket. It affects S3 operations performed via
1139
- Metaflow's `get_aws_client` and `S3` within a `@step`.
1140
-
1141
- Read operations
1142
- ---------------
1143
- All read operations pass through the proxy. If an object does not already
1144
- exist in the external bucket, it is cached there. For example, if code reads
1145
- from buckets `FOO` and `BAR` using the `S3` interface, objects from both
1146
- buckets are cached in the external bucket.
1147
-
1148
- During task execution, all S3‑related read requests are routed through the
1149
- proxy:
1150
- - If the object is present in the external object store, the proxy
1151
- streams it directly from there without accessing the requested origin
1152
- bucket.
1153
- - If the object is not present in the external storage, the proxy
1154
- fetches it from the requested bucket, caches it in the external
1155
- storage, and streams the response from the origin bucket.
1156
-
1157
- Warning
1158
- -------
1159
- All READ operations (e.g., GetObject, HeadObject) pass through the external
1160
- bucket regardless of the bucket specified in user code. Even
1161
- `S3(run=self)` and `S3(s3root="mybucketfoo")` requests go through the
1162
- external bucket cache.
1163
-
1164
- Write operations
1165
- ----------------
1166
- Write behavior is controlled by the `write_mode` parameter, which determines
1167
- whether writes also persist objects in the cache.
1168
-
1169
- `write_mode` values:
1170
- - `origin-and-cache`: objects are written both to the cache and to their
1171
- intended origin bucket.
1172
- - `origin`: objects are written only to their intended origin bucket.
1173
-
1174
-
1175
- Parameters
1176
- ----------
1177
- integration_name : str, optional
1178
- [Outerbounds integration name](https://docs.outerbounds.com/outerbounds/configuring-secrets/#integrations-view)
1179
- that holds the configuration for the external, S3‑compatible object
1180
- storage bucket. If not specified, the only available S3 proxy
1181
- integration in the namespace is used (fails if multiple exist).
1182
- write_mode : str, optional
1183
- Controls whether writes also go to the external bucket.
1184
- - `origin` (default)
1185
- - `origin-and-cache`
1186
- debug : bool, optional
1187
- Enables debug logging for proxy operations.
1188
- """
1189
- ...
1190
-
1191
- @typing.overload
1192
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1193
- """
1194
- Specifies that the step will success under all circumstances.
1195
-
1196
- The decorator will create an optional artifact, specified by `var`, which
1197
- contains the exception raised. You can use it to detect the presence
1198
- of errors, indicating that all happy-path artifacts produced by the step
1199
- are missing.
1200
-
1201
-
1202
- Parameters
1203
- ----------
1204
- var : str, optional, default None
1205
- Name of the artifact in which to store the caught exception.
1206
- If not specified, the exception is not stored.
1207
- print_exception : bool, default True
1208
- Determines whether or not the exception is printed to
1209
- stdout when caught.
1210
- """
1211
- ...
1212
-
1213
- @typing.overload
1214
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1215
- ...
1216
-
1217
- @typing.overload
1218
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1219
- ...
1220
-
1221
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1079
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1222
1080
  """
1223
- Specifies that the step will success under all circumstances.
1224
-
1225
- The decorator will create an optional artifact, specified by `var`, which
1226
- contains the exception raised. You can use it to detect the presence
1227
- of errors, indicating that all happy-path artifacts produced by the step
1228
- are missing.
1229
-
1081
+ Enables loading / saving of models within a step.
1230
1082
 
1231
- Parameters
1232
- ----------
1233
- var : str, optional, default None
1234
- Name of the artifact in which to store the caught exception.
1235
- If not specified, the exception is not stored.
1236
- print_exception : bool, default True
1237
- Determines whether or not the exception is printed to
1238
- stdout when caught.
1239
- """
1240
- ...
1241
-
1242
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1243
- """
1244
- Specifies that this step should execute on DGX cloud.
1083
+ > Examples
1084
+ - Saving Models
1085
+ ```python
1086
+ @model
1087
+ @step
1088
+ def train(self):
1089
+ # current.model.save returns a dictionary reference to the model saved
1090
+ self.my_model = current.model.save(
1091
+ path_to_my_model,
1092
+ label="my_model",
1093
+ metadata={
1094
+ "epochs": 10,
1095
+ "batch-size": 32,
1096
+ "learning-rate": 0.001,
1097
+ }
1098
+ )
1099
+ self.next(self.test)
1245
1100
 
1101
+ @model(load="my_model")
1102
+ @step
1103
+ def test(self):
1104
+ # `current.model.loaded` returns a dictionary of the loaded models
1105
+ # where the key is the name of the artifact and the value is the path to the model
1106
+ print(os.listdir(current.model.loaded["my_model"]))
1107
+ self.next(self.end)
1108
+ ```
1246
1109
 
1247
- Parameters
1248
- ----------
1249
- gpu : int
1250
- Number of GPUs to use.
1251
- gpu_type : str
1252
- Type of Nvidia GPU to use.
1253
- queue_timeout : int
1254
- Time to keep the job in NVCF's queue.
1255
- """
1256
- ...
1257
-
1258
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1259
- """
1260
- Specifies that this step should execute on DGX cloud.
1110
+ - Loading models
1111
+ ```python
1112
+ @step
1113
+ def train(self):
1114
+ # current.model.load returns the path to the model loaded
1115
+ checkpoint_path = current.model.load(
1116
+ self.checkpoint_key,
1117
+ )
1118
+ model_path = current.model.load(
1119
+ self.model,
1120
+ )
1121
+ self.next(self.test)
1122
+ ```
1261
1123
 
1262
1124
 
1263
1125
  Parameters
1264
1126
  ----------
1265
- gpu : int
1266
- Number of GPUs to use.
1267
- gpu_type : str
1268
- Type of Nvidia GPU to use.
1269
- """
1270
- ...
1271
-
1272
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1273
- """
1274
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
1275
-
1276
- User code call
1277
- --------------
1278
- @ollama(
1279
- models=[...],
1280
- ...
1281
- )
1282
-
1283
- Valid backend options
1284
- ---------------------
1285
- - 'local': Run as a separate process on the local task machine.
1286
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
1287
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
1288
-
1289
- Valid model options
1290
- -------------------
1291
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
1292
-
1127
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1128
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1129
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1130
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1131
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1132
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1293
1133
 
1294
- Parameters
1295
- ----------
1296
- models: list[str]
1297
- List of Ollama containers running models in sidecars.
1298
- backend: str
1299
- Determines where and how to run the Ollama process.
1300
- force_pull: bool
1301
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1302
- cache_update_policy: str
1303
- Cache update policy: "auto", "force", or "never".
1304
- force_cache_update: bool
1305
- Simple override for "force" cache update policy.
1306
- debug: bool
1307
- Whether to turn on verbose debugging logs.
1308
- circuit_breaker_config: dict
1309
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1310
- timeout_config: dict
1311
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1134
+ temp_dir_root : str, default: None
1135
+ The root directory under which `current.model.loaded` will store loaded models
1312
1136
  """
1313
1137
  ...
1314
1138
 
1315
- @typing.overload
1316
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1139
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1317
1140
  """
1318
- Specifies the PyPI packages for the step.
1319
-
1320
- Information in this decorator will augment any
1321
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1322
- you can use `@pypi_base` to set packages required by all
1323
- steps and use `@pypi` to specify step-specific overrides.
1141
+ Specifies that this step should execute on Kubernetes.
1324
1142
 
1325
1143
 
1326
1144
  Parameters
1327
1145
  ----------
1328
- packages : Dict[str, str], default: {}
1329
- Packages to use for this step. The key is the name of the package
1330
- and the value is the version to use.
1331
- python : str, optional, default: None
1332
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1333
- that the version used will correspond to the version of the Python interpreter used to start the run.
1334
- """
1335
- ...
1336
-
1337
- @typing.overload
1338
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1339
- ...
1340
-
1341
- @typing.overload
1342
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1343
- ...
1344
-
1345
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1346
- """
1347
- Specifies the PyPI packages for the step.
1348
-
1349
- Information in this decorator will augment any
1350
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1351
- you can use `@pypi_base` to set packages required by all
1352
- steps and use `@pypi` to specify step-specific overrides.
1353
-
1146
+ cpu : int, default 1
1147
+ Number of CPUs required for this step. If `@resources` is
1148
+ also present, the maximum value from all decorators is used.
1149
+ memory : int, default 4096
1150
+ Memory size (in MB) required for this step. If
1151
+ `@resources` is also present, the maximum value from all decorators is
1152
+ used.
1153
+ disk : int, default 10240
1154
+ Disk size (in MB) required for this step. If
1155
+ `@resources` is also present, the maximum value from all decorators is
1156
+ used.
1157
+ image : str, optional, default None
1158
+ Docker image to use when launching on Kubernetes. If not specified, and
1159
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1160
+ not, a default Docker image mapping to the current version of Python is used.
1161
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1162
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1163
+ image_pull_secrets: List[str], default []
1164
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
1165
+ Kubernetes image pull secrets to use when pulling container images
1166
+ in Kubernetes.
1167
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1168
+ Kubernetes service account to use when launching pod in Kubernetes.
1169
+ secrets : List[str], optional, default None
1170
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1171
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1172
+ in Metaflow configuration.
1173
+ node_selector: Union[Dict[str,str], str], optional, default None
1174
+ Kubernetes node selector(s) to apply to the pod running the task.
1175
+ Can be passed in as a comma separated string of values e.g.
1176
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
1177
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
1178
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1179
+ Kubernetes namespace to use when launching pod in Kubernetes.
1180
+ gpu : int, optional, default None
1181
+ Number of GPUs required for this step. A value of zero implies that
1182
+ the scheduled node should not have GPUs.
1183
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1184
+ The vendor of the GPUs to be used for this step.
1185
+ tolerations : List[Dict[str,str]], default []
1186
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1187
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1188
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
1189
+ Kubernetes labels to use when launching pod in Kubernetes.
1190
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
1191
+ Kubernetes annotations to use when launching pod in Kubernetes.
1192
+ use_tmpfs : bool, default False
1193
+ This enables an explicit tmpfs mount for this step.
1194
+ tmpfs_tempdir : bool, default True
1195
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1196
+ tmpfs_size : int, optional, default: None
1197
+ The value for the size (in MiB) of the tmpfs mount for this step.
1198
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1199
+ memory allocated for this step.
1200
+ tmpfs_path : str, optional, default /metaflow_temp
1201
+ Path to tmpfs mount for this step.
1202
+ persistent_volume_claims : Dict[str, str], optional, default None
1203
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1204
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1205
+ shared_memory: int, optional
1206
+ Shared memory size (in MiB) required for this step
1207
+ port: int, optional
1208
+ Port number to specify in the Kubernetes job object
1209
+ compute_pool : str, optional, default None
1210
+ Compute pool to be used for for this step.
1211
+ If not specified, any accessible compute pool within the perimeter is used.
1212
+ hostname_resolution_timeout: int, default 10 * 60
1213
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
1214
+ Only applicable when @parallel is used.
1215
+ qos: str, default: Burstable
1216
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
1354
1217
 
1355
- Parameters
1356
- ----------
1357
- packages : Dict[str, str], default: {}
1358
- Packages to use for this step. The key is the name of the package
1359
- and the value is the version to use.
1360
- python : str, optional, default: None
1361
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1362
- that the version used will correspond to the version of the Python interpreter used to start the run.
1218
+ security_context: Dict[str, Any], optional, default None
1219
+ Container security context. Applies to the task container. Allows the following keys:
1220
+ - privileged: bool, optional, default None
1221
+ - allow_privilege_escalation: bool, optional, default None
1222
+ - run_as_user: int, optional, default None
1223
+ - run_as_group: int, optional, default None
1224
+ - run_as_non_root: bool, optional, default None
1363
1225
  """
1364
1226
  ...
1365
1227
 
1366
1228
  @typing.overload
1367
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1229
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1368
1230
  """
1369
- Enables checkpointing for a step.
1370
-
1371
- > Examples
1372
-
1373
- - Saving Checkpoints
1374
-
1375
- ```python
1376
- @checkpoint
1377
- @step
1378
- def train(self):
1379
- model = create_model(self.parameters, checkpoint_path = None)
1380
- for i in range(self.epochs):
1381
- # some training logic
1382
- loss = model.train(self.dataset)
1383
- if i % 10 == 0:
1384
- model.save(
1385
- current.checkpoint.directory,
1386
- )
1387
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
1388
- # and returns a reference dictionary to the checkpoint saved in the datastore
1389
- self.latest_checkpoint = current.checkpoint.save(
1390
- name="epoch_checkpoint",
1391
- metadata={
1392
- "epoch": i,
1393
- "loss": loss,
1394
- }
1395
- )
1396
- ```
1231
+ Specifies a timeout for your step.
1397
1232
 
1398
- - Using Loaded Checkpoints
1233
+ This decorator is useful if this step may hang indefinitely.
1399
1234
 
1400
- ```python
1401
- @retry(times=3)
1402
- @checkpoint
1403
- @step
1404
- def train(self):
1405
- # Assume that the task has restarted and the previous attempt of the task
1406
- # saved a checkpoint
1407
- checkpoint_path = None
1408
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
1409
- print("Loaded checkpoint from the previous attempt")
1410
- checkpoint_path = current.checkpoint.directory
1235
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1236
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1237
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1411
1238
 
1412
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
1413
- for i in range(self.epochs):
1414
- ...
1415
- ```
1239
+ Note that all the values specified in parameters are added together so if you specify
1240
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1416
1241
 
1417
1242
 
1418
1243
  Parameters
1419
1244
  ----------
1420
- load_policy : str, default: "fresh"
1421
- The policy for loading the checkpoint. The following policies are supported:
1422
- - "eager": Loads the the latest available checkpoint within the namespace.
1423
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1424
- will be loaded at the start of the task.
1425
- - "none": Do not load any checkpoint
1426
- - "fresh": Loads the lastest checkpoint created within the running Task.
1427
- This mode helps loading checkpoints across various retry attempts of the same task.
1428
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1429
- created within the task will be loaded when the task is retries execution on failure.
1430
-
1431
- temp_dir_root : str, default: None
1432
- The root directory under which `current.checkpoint.directory` will be created.
1245
+ seconds : int, default 0
1246
+ Number of seconds to wait prior to timing out.
1247
+ minutes : int, default 0
1248
+ Number of minutes to wait prior to timing out.
1249
+ hours : int, default 0
1250
+ Number of hours to wait prior to timing out.
1433
1251
  """
1434
1252
  ...
1435
1253
 
1436
1254
  @typing.overload
1437
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1438
- ...
1439
-
1440
- @typing.overload
1441
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1255
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1442
1256
  ...
1443
1257
 
1444
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
1445
- """
1446
- Enables checkpointing for a step.
1447
-
1448
- > Examples
1449
-
1450
- - Saving Checkpoints
1451
-
1452
- ```python
1453
- @checkpoint
1454
- @step
1455
- def train(self):
1456
- model = create_model(self.parameters, checkpoint_path = None)
1457
- for i in range(self.epochs):
1458
- # some training logic
1459
- loss = model.train(self.dataset)
1460
- if i % 10 == 0:
1461
- model.save(
1462
- current.checkpoint.directory,
1463
- )
1464
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
1465
- # and returns a reference dictionary to the checkpoint saved in the datastore
1466
- self.latest_checkpoint = current.checkpoint.save(
1467
- name="epoch_checkpoint",
1468
- metadata={
1469
- "epoch": i,
1470
- "loss": loss,
1471
- }
1472
- )
1473
- ```
1258
+ @typing.overload
1259
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1260
+ ...
1261
+
1262
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1263
+ """
1264
+ Specifies a timeout for your step.
1474
1265
 
1475
- - Using Loaded Checkpoints
1266
+ This decorator is useful if this step may hang indefinitely.
1476
1267
 
1477
- ```python
1478
- @retry(times=3)
1479
- @checkpoint
1480
- @step
1481
- def train(self):
1482
- # Assume that the task has restarted and the previous attempt of the task
1483
- # saved a checkpoint
1484
- checkpoint_path = None
1485
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
1486
- print("Loaded checkpoint from the previous attempt")
1487
- checkpoint_path = current.checkpoint.directory
1268
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1269
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1270
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1488
1271
 
1489
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
1490
- for i in range(self.epochs):
1491
- ...
1492
- ```
1272
+ Note that all the values specified in parameters are added together so if you specify
1273
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1493
1274
 
1494
1275
 
1495
1276
  Parameters
1496
1277
  ----------
1497
- load_policy : str, default: "fresh"
1498
- The policy for loading the checkpoint. The following policies are supported:
1499
- - "eager": Loads the the latest available checkpoint within the namespace.
1500
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1501
- will be loaded at the start of the task.
1502
- - "none": Do not load any checkpoint
1503
- - "fresh": Loads the lastest checkpoint created within the running Task.
1504
- This mode helps loading checkpoints across various retry attempts of the same task.
1505
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1506
- created within the task will be loaded when the task is retries execution on failure.
1507
-
1508
- temp_dir_root : str, default: None
1509
- The root directory under which `current.checkpoint.directory` will be created.
1278
+ seconds : int, default 0
1279
+ Number of seconds to wait prior to timing out.
1280
+ minutes : int, default 0
1281
+ Number of minutes to wait prior to timing out.
1282
+ hours : int, default 0
1283
+ Number of hours to wait prior to timing out.
1510
1284
  """
1511
1285
  ...
1512
1286
 
1513
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1287
+ @typing.overload
1288
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1514
1289
  """
1515
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1516
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1290
+ Specifies that the step will success under all circumstances.
1291
+
1292
+ The decorator will create an optional artifact, specified by `var`, which
1293
+ contains the exception raised. You can use it to detect the presence
1294
+ of errors, indicating that all happy-path artifacts produced by the step
1295
+ are missing.
1517
1296
 
1518
1297
 
1519
1298
  Parameters
1520
1299
  ----------
1521
- timeout : int
1522
- Time, in seconds before the task times out and fails. (Default: 3600)
1523
- poke_interval : int
1524
- Time in seconds that the job should wait in between each try. (Default: 60)
1525
- mode : str
1526
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1527
- exponential_backoff : bool
1528
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1529
- pool : str
1530
- the slot pool this task should run in,
1531
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1532
- soft_fail : bool
1533
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1534
- name : str
1535
- Name of the sensor on Airflow
1536
- description : str
1537
- Description of sensor in the Airflow UI
1538
- external_dag_id : str
1539
- The dag_id that contains the task you want to wait for.
1540
- external_task_ids : List[str]
1541
- The list of task_ids that you want to wait for.
1542
- If None (default value) the sensor waits for the DAG. (Default: None)
1543
- allowed_states : List[str]
1544
- Iterable of allowed states, (Default: ['success'])
1545
- failed_states : List[str]
1546
- Iterable of failed or dis-allowed states. (Default: None)
1547
- execution_delta : datetime.timedelta
1548
- time difference with the previous execution to look at,
1549
- the default is the same logical date as the current task or DAG. (Default: None)
1550
- check_existence: bool
1551
- Set to True to check if the external task exists or check if
1552
- the DAG to wait for exists. (Default: True)
1300
+ var : str, optional, default None
1301
+ Name of the artifact in which to store the caught exception.
1302
+ If not specified, the exception is not stored.
1303
+ print_exception : bool, default True
1304
+ Determines whether or not the exception is printed to
1305
+ stdout when caught.
1553
1306
  """
1554
1307
  ...
1555
1308
 
1556
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1309
+ @typing.overload
1310
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1311
+ ...
1312
+
1313
+ @typing.overload
1314
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1315
+ ...
1316
+
1317
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1557
1318
  """
1558
- Specifies what flows belong to the same project.
1319
+ Specifies that the step will success under all circumstances.
1559
1320
 
1560
- A project-specific namespace is created for all flows that
1561
- use the same `@project(name)`.
1321
+ The decorator will create an optional artifact, specified by `var`, which
1322
+ contains the exception raised. You can use it to detect the presence
1323
+ of errors, indicating that all happy-path artifacts produced by the step
1324
+ are missing.
1562
1325
 
1563
1326
 
1564
1327
  Parameters
1565
1328
  ----------
1566
- name : str
1567
- Project name. Make sure that the name is unique amongst all
1568
- projects that use the same production scheduler. The name may
1569
- contain only lowercase alphanumeric characters and underscores.
1570
-
1571
- branch : Optional[str], default None
1572
- The branch to use. If not specified, the branch is set to
1573
- `user.<username>` unless `production` is set to `True`. This can
1574
- also be set on the command line using `--branch` as a top-level option.
1575
- It is an error to specify `branch` in the decorator and on the command line.
1576
-
1577
- production : bool, default False
1578
- Whether or not the branch is the production branch. This can also be set on the
1579
- command line using `--production` as a top-level option. It is an error to specify
1580
- `production` in the decorator and on the command line.
1581
- The project branch name will be:
1582
- - if `branch` is specified:
1583
- - if `production` is True: `prod.<branch>`
1584
- - if `production` is False: `test.<branch>`
1585
- - if `branch` is not specified:
1586
- - if `production` is True: `prod`
1587
- - if `production` is False: `user.<username>`
1329
+ var : str, optional, default None
1330
+ Name of the artifact in which to store the caught exception.
1331
+ If not specified, the exception is not stored.
1332
+ print_exception : bool, default True
1333
+ Determines whether or not the exception is printed to
1334
+ stdout when caught.
1588
1335
  """
1589
1336
  ...
1590
1337
 
1591
1338
  @typing.overload
1592
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1339
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1593
1340
  """
1594
- Specifies the times when the flow should be run when running on a
1595
- production scheduler.
1341
+ Internal decorator to support Fast bakery
1342
+ """
1343
+ ...
1344
+
1345
+ @typing.overload
1346
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1347
+ ...
1348
+
1349
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1350
+ """
1351
+ Internal decorator to support Fast bakery
1352
+ """
1353
+ ...
1354
+
1355
+ @typing.overload
1356
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1357
+ """
1358
+ Specifies environment variables to be set prior to the execution of a step.
1596
1359
 
1597
1360
 
1598
1361
  Parameters
1599
1362
  ----------
1600
- hourly : bool, default False
1601
- Run the workflow hourly.
1602
- daily : bool, default True
1603
- Run the workflow daily.
1604
- weekly : bool, default False
1605
- Run the workflow weekly.
1606
- cron : str, optional, default None
1607
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1608
- specified by this expression.
1609
- timezone : str, optional, default None
1610
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1611
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1363
+ vars : Dict[str, str], default {}
1364
+ Dictionary of environment variables to set.
1612
1365
  """
1613
1366
  ...
1614
1367
 
1615
1368
  @typing.overload
1616
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1369
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1617
1370
  ...
1618
1371
 
1619
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1372
+ @typing.overload
1373
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1374
+ ...
1375
+
1376
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1620
1377
  """
1621
- Specifies the times when the flow should be run when running on a
1622
- production scheduler.
1378
+ Specifies environment variables to be set prior to the execution of a step.
1623
1379
 
1624
1380
 
1625
1381
  Parameters
1626
1382
  ----------
1627
- hourly : bool, default False
1628
- Run the workflow hourly.
1629
- daily : bool, default True
1630
- Run the workflow daily.
1631
- weekly : bool, default False
1632
- Run the workflow weekly.
1633
- cron : str, optional, default None
1634
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1635
- specified by this expression.
1636
- timezone : str, optional, default None
1637
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1638
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1383
+ vars : Dict[str, str], default {}
1384
+ Dictionary of environment variables to set.
1639
1385
  """
1640
1386
  ...
1641
1387
 
1642
1388
  @typing.overload
1643
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1389
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1644
1390
  """
1645
- Specifies the PyPI packages for all steps of the flow.
1391
+ Specifies the PyPI packages for the step.
1646
1392
 
1647
- Use `@pypi_base` to set common packages required by all
1393
+ Information in this decorator will augment any
1394
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1395
+ you can use `@pypi_base` to set packages required by all
1648
1396
  steps and use `@pypi` to specify step-specific overrides.
1649
1397
 
1398
+
1650
1399
  Parameters
1651
1400
  ----------
1652
1401
  packages : Dict[str, str], default: {}
1653
- Packages to use for this flow. The key is the name of the package
1402
+ Packages to use for this step. The key is the name of the package
1654
1403
  and the value is the version to use.
1655
1404
  python : str, optional, default: None
1656
1405
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
@@ -1659,20 +1408,27 @@ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[s
1659
1408
  ...
1660
1409
 
1661
1410
  @typing.overload
1662
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1411
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1663
1412
  ...
1664
1413
 
1665
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1414
+ @typing.overload
1415
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1416
+ ...
1417
+
1418
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1666
1419
  """
1667
- Specifies the PyPI packages for all steps of the flow.
1420
+ Specifies the PyPI packages for the step.
1668
1421
 
1669
- Use `@pypi_base` to set common packages required by all
1422
+ Information in this decorator will augment any
1423
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1424
+ you can use `@pypi_base` to set packages required by all
1670
1425
  steps and use `@pypi` to specify step-specific overrides.
1671
1426
 
1427
+
1672
1428
  Parameters
1673
1429
  ----------
1674
1430
  packages : Dict[str, str], default: {}
1675
- Packages to use for this flow. The key is the name of the package
1431
+ Packages to use for this step. The key is the name of the package
1676
1432
  and the value is the version to use.
1677
1433
  python : str, optional, default: None
1678
1434
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
@@ -1681,95 +1437,76 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1681
1437
  ...
1682
1438
 
1683
1439
  @typing.overload
1684
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1440
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1685
1441
  """
1686
- Specifies the event(s) that this flow depends on.
1687
-
1688
- ```
1689
- @trigger(event='foo')
1690
- ```
1691
- or
1692
- ```
1693
- @trigger(events=['foo', 'bar'])
1694
- ```
1695
-
1696
- Additionally, you can specify the parameter mappings
1697
- to map event payload to Metaflow parameters for the flow.
1698
- ```
1699
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1700
- ```
1701
- or
1702
- ```
1703
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1704
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1705
- ```
1442
+ Specifies the number of times the task corresponding
1443
+ to a step needs to be retried.
1706
1444
 
1707
- 'parameters' can also be a list of strings and tuples like so:
1708
- ```
1709
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1710
- ```
1711
- This is equivalent to:
1712
- ```
1713
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1714
- ```
1445
+ This decorator is useful for handling transient errors, such as networking issues.
1446
+ If your task contains operations that can't be retried safely, e.g. database updates,
1447
+ it is advisable to annotate it with `@retry(times=0)`.
1448
+
1449
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1450
+ decorator will execute a no-op task after all retries have been exhausted,
1451
+ ensuring that the flow execution can continue.
1715
1452
 
1716
1453
 
1717
1454
  Parameters
1718
1455
  ----------
1719
- event : Union[str, Dict[str, Any]], optional, default None
1720
- Event dependency for this flow.
1721
- events : List[Union[str, Dict[str, Any]]], default []
1722
- Events dependency for this flow.
1723
- options : Dict[str, Any], default {}
1724
- Backend-specific configuration for tuning eventing behavior.
1456
+ times : int, default 3
1457
+ Number of times to retry this task.
1458
+ minutes_between_retries : int, default 2
1459
+ Number of minutes between retries.
1725
1460
  """
1726
1461
  ...
1727
1462
 
1728
1463
  @typing.overload
1729
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1464
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1730
1465
  ...
1731
1466
 
1732
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1467
+ @typing.overload
1468
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1469
+ ...
1470
+
1471
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1733
1472
  """
1734
- Specifies the event(s) that this flow depends on.
1735
-
1736
- ```
1737
- @trigger(event='foo')
1738
- ```
1739
- or
1740
- ```
1741
- @trigger(events=['foo', 'bar'])
1742
- ```
1473
+ Specifies the number of times the task corresponding
1474
+ to a step needs to be retried.
1743
1475
 
1744
- Additionally, you can specify the parameter mappings
1745
- to map event payload to Metaflow parameters for the flow.
1746
- ```
1747
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1748
- ```
1749
- or
1750
- ```
1751
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1752
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1753
- ```
1476
+ This decorator is useful for handling transient errors, such as networking issues.
1477
+ If your task contains operations that can't be retried safely, e.g. database updates,
1478
+ it is advisable to annotate it with `@retry(times=0)`.
1754
1479
 
1755
- 'parameters' can also be a list of strings and tuples like so:
1756
- ```
1757
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1758
- ```
1759
- This is equivalent to:
1760
- ```
1761
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1762
- ```
1480
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1481
+ decorator will execute a no-op task after all retries have been exhausted,
1482
+ ensuring that the flow execution can continue.
1763
1483
 
1764
1484
 
1765
1485
  Parameters
1766
1486
  ----------
1767
- event : Union[str, Dict[str, Any]], optional, default None
1768
- Event dependency for this flow.
1769
- events : List[Union[str, Dict[str, Any]]], default []
1770
- Events dependency for this flow.
1771
- options : Dict[str, Any], default {}
1772
- Backend-specific configuration for tuning eventing behavior.
1487
+ times : int, default 3
1488
+ Number of times to retry this task.
1489
+ minutes_between_retries : int, default 2
1490
+ Number of minutes between retries.
1491
+ """
1492
+ ...
1493
+
1494
+ @typing.overload
1495
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1496
+ """
1497
+ Decorator prototype for all step decorators. This function gets specialized
1498
+ and imported for all decorators types by _import_plugin_decorators().
1499
+ """
1500
+ ...
1501
+
1502
+ @typing.overload
1503
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1504
+ ...
1505
+
1506
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1507
+ """
1508
+ Decorator prototype for all step decorators. This function gets specialized
1509
+ and imported for all decorators types by _import_plugin_decorators().
1773
1510
  """
1774
1511
  ...
1775
1512
 
@@ -1888,53 +1625,189 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1888
1625
  ...
1889
1626
 
1890
1627
  @typing.overload
1891
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1628
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1892
1629
  """
1893
- Specifies the Conda environment for all steps of the flow.
1630
+ Specifies the flow(s) that this flow depends on.
1894
1631
 
1895
- Use `@conda_base` to set common libraries required by all
1896
- steps and use `@conda` to specify step-specific additions.
1632
+ ```
1633
+ @trigger_on_finish(flow='FooFlow')
1634
+ ```
1635
+ or
1636
+ ```
1637
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1638
+ ```
1639
+ This decorator respects the @project decorator and triggers the flow
1640
+ when upstream runs within the same namespace complete successfully
1641
+
1642
+ Additionally, you can specify project aware upstream flow dependencies
1643
+ by specifying the fully qualified project_flow_name.
1644
+ ```
1645
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1646
+ ```
1647
+ or
1648
+ ```
1649
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1650
+ ```
1651
+
1652
+ You can also specify just the project or project branch (other values will be
1653
+ inferred from the current project or project branch):
1654
+ ```
1655
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1656
+ ```
1657
+
1658
+ Note that `branch` is typically one of:
1659
+ - `prod`
1660
+ - `user.bob`
1661
+ - `test.my_experiment`
1662
+ - `prod.staging`
1897
1663
 
1898
1664
 
1899
1665
  Parameters
1900
1666
  ----------
1901
- packages : Dict[str, str], default {}
1902
- Packages to use for this flow. The key is the name of the package
1903
- and the value is the version to use.
1904
- libraries : Dict[str, str], default {}
1905
- Supported for backward compatibility. When used with packages, packages will take precedence.
1906
- python : str, optional, default None
1907
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1908
- that the version used will correspond to the version of the Python interpreter used to start the run.
1909
- disabled : bool, default False
1910
- If set to True, disables Conda.
1667
+ flow : Union[str, Dict[str, str]], optional, default None
1668
+ Upstream flow dependency for this flow.
1669
+ flows : List[Union[str, Dict[str, str]]], default []
1670
+ Upstream flow dependencies for this flow.
1671
+ options : Dict[str, Any], default {}
1672
+ Backend-specific configuration for tuning eventing behavior.
1911
1673
  """
1912
1674
  ...
1913
1675
 
1914
1676
  @typing.overload
1915
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1677
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1916
1678
  ...
1917
1679
 
1918
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1680
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1919
1681
  """
1920
- Specifies the Conda environment for all steps of the flow.
1682
+ Specifies the flow(s) that this flow depends on.
1921
1683
 
1922
- Use `@conda_base` to set common libraries required by all
1923
- steps and use `@conda` to specify step-specific additions.
1684
+ ```
1685
+ @trigger_on_finish(flow='FooFlow')
1686
+ ```
1687
+ or
1688
+ ```
1689
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1690
+ ```
1691
+ This decorator respects the @project decorator and triggers the flow
1692
+ when upstream runs within the same namespace complete successfully
1693
+
1694
+ Additionally, you can specify project aware upstream flow dependencies
1695
+ by specifying the fully qualified project_flow_name.
1696
+ ```
1697
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1698
+ ```
1699
+ or
1700
+ ```
1701
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1702
+ ```
1703
+
1704
+ You can also specify just the project or project branch (other values will be
1705
+ inferred from the current project or project branch):
1706
+ ```
1707
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1708
+ ```
1709
+
1710
+ Note that `branch` is typically one of:
1711
+ - `prod`
1712
+ - `user.bob`
1713
+ - `test.my_experiment`
1714
+ - `prod.staging`
1715
+
1716
+
1717
+ Parameters
1718
+ ----------
1719
+ flow : Union[str, Dict[str, str]], optional, default None
1720
+ Upstream flow dependency for this flow.
1721
+ flows : List[Union[str, Dict[str, str]]], default []
1722
+ Upstream flow dependencies for this flow.
1723
+ options : Dict[str, Any], default {}
1724
+ Backend-specific configuration for tuning eventing behavior.
1725
+ """
1726
+ ...
1727
+
1728
+ @typing.overload
1729
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1730
+ """
1731
+ Specifies the times when the flow should be run when running on a
1732
+ production scheduler.
1733
+
1734
+
1735
+ Parameters
1736
+ ----------
1737
+ hourly : bool, default False
1738
+ Run the workflow hourly.
1739
+ daily : bool, default True
1740
+ Run the workflow daily.
1741
+ weekly : bool, default False
1742
+ Run the workflow weekly.
1743
+ cron : str, optional, default None
1744
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1745
+ specified by this expression.
1746
+ timezone : str, optional, default None
1747
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1748
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1749
+ """
1750
+ ...
1751
+
1752
+ @typing.overload
1753
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1754
+ ...
1755
+
1756
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1757
+ """
1758
+ Specifies the times when the flow should be run when running on a
1759
+ production scheduler.
1760
+
1761
+
1762
+ Parameters
1763
+ ----------
1764
+ hourly : bool, default False
1765
+ Run the workflow hourly.
1766
+ daily : bool, default True
1767
+ Run the workflow daily.
1768
+ weekly : bool, default False
1769
+ Run the workflow weekly.
1770
+ cron : str, optional, default None
1771
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1772
+ specified by this expression.
1773
+ timezone : str, optional, default None
1774
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1775
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1776
+ """
1777
+ ...
1778
+
1779
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1780
+ """
1781
+ Specifies what flows belong to the same project.
1782
+
1783
+ A project-specific namespace is created for all flows that
1784
+ use the same `@project(name)`.
1924
1785
 
1925
1786
 
1926
1787
  Parameters
1927
1788
  ----------
1928
- packages : Dict[str, str], default {}
1929
- Packages to use for this flow. The key is the name of the package
1930
- and the value is the version to use.
1931
- libraries : Dict[str, str], default {}
1932
- Supported for backward compatibility. When used with packages, packages will take precedence.
1933
- python : str, optional, default None
1934
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1935
- that the version used will correspond to the version of the Python interpreter used to start the run.
1936
- disabled : bool, default False
1937
- If set to True, disables Conda.
1789
+ name : str
1790
+ Project name. Make sure that the name is unique amongst all
1791
+ projects that use the same production scheduler. The name may
1792
+ contain only lowercase alphanumeric characters and underscores.
1793
+
1794
+ branch : Optional[str], default None
1795
+ The branch to use. If not specified, the branch is set to
1796
+ `user.<username>` unless `production` is set to `True`. This can
1797
+ also be set on the command line using `--branch` as a top-level option.
1798
+ It is an error to specify `branch` in the decorator and on the command line.
1799
+
1800
+ production : bool, default False
1801
+ Whether or not the branch is the production branch. This can also be set on the
1802
+ command line using `--production` as a top-level option. It is an error to specify
1803
+ `production` in the decorator and on the command line.
1804
+ The project branch name will be:
1805
+ - if `branch` is specified:
1806
+ - if `production` is True: `prod.<branch>`
1807
+ - if `production` is False: `test.<branch>`
1808
+ - if `branch` is not specified:
1809
+ - if `production` is True: `prod`
1810
+ - if `production` is False: `user.<username>`
1938
1811
  """
1939
1812
  ...
1940
1813
 
@@ -1982,105 +1855,232 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1982
1855
  ...
1983
1856
 
1984
1857
  @typing.overload
1985
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1858
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1986
1859
  """
1987
- Specifies the flow(s) that this flow depends on.
1860
+ Specifies the event(s) that this flow depends on.
1988
1861
 
1989
1862
  ```
1990
- @trigger_on_finish(flow='FooFlow')
1863
+ @trigger(event='foo')
1991
1864
  ```
1992
1865
  or
1993
1866
  ```
1994
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1867
+ @trigger(events=['foo', 'bar'])
1995
1868
  ```
1996
- This decorator respects the @project decorator and triggers the flow
1997
- when upstream runs within the same namespace complete successfully
1998
1869
 
1999
- Additionally, you can specify project aware upstream flow dependencies
2000
- by specifying the fully qualified project_flow_name.
1870
+ Additionally, you can specify the parameter mappings
1871
+ to map event payload to Metaflow parameters for the flow.
2001
1872
  ```
2002
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1873
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
2003
1874
  ```
2004
1875
  or
2005
1876
  ```
2006
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1877
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1878
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
2007
1879
  ```
2008
1880
 
2009
- You can also specify just the project or project branch (other values will be
2010
- inferred from the current project or project branch):
1881
+ 'parameters' can also be a list of strings and tuples like so:
2011
1882
  ```
2012
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1883
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1884
+ ```
1885
+ This is equivalent to:
1886
+ ```
1887
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
2013
1888
  ```
2014
-
2015
- Note that `branch` is typically one of:
2016
- - `prod`
2017
- - `user.bob`
2018
- - `test.my_experiment`
2019
- - `prod.staging`
2020
1889
 
2021
1890
 
2022
1891
  Parameters
2023
1892
  ----------
2024
- flow : Union[str, Dict[str, str]], optional, default None
2025
- Upstream flow dependency for this flow.
2026
- flows : List[Union[str, Dict[str, str]]], default []
2027
- Upstream flow dependencies for this flow.
1893
+ event : Union[str, Dict[str, Any]], optional, default None
1894
+ Event dependency for this flow.
1895
+ events : List[Union[str, Dict[str, Any]]], default []
1896
+ Events dependency for this flow.
2028
1897
  options : Dict[str, Any], default {}
2029
1898
  Backend-specific configuration for tuning eventing behavior.
2030
1899
  """
2031
1900
  ...
2032
1901
 
2033
1902
  @typing.overload
2034
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1903
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
2035
1904
  ...
2036
1905
 
2037
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1906
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
2038
1907
  """
2039
- Specifies the flow(s) that this flow depends on.
1908
+ Specifies the event(s) that this flow depends on.
2040
1909
 
2041
1910
  ```
2042
- @trigger_on_finish(flow='FooFlow')
1911
+ @trigger(event='foo')
2043
1912
  ```
2044
1913
  or
2045
1914
  ```
2046
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1915
+ @trigger(events=['foo', 'bar'])
2047
1916
  ```
2048
- This decorator respects the @project decorator and triggers the flow
2049
- when upstream runs within the same namespace complete successfully
2050
1917
 
2051
- Additionally, you can specify project aware upstream flow dependencies
2052
- by specifying the fully qualified project_flow_name.
1918
+ Additionally, you can specify the parameter mappings
1919
+ to map event payload to Metaflow parameters for the flow.
2053
1920
  ```
2054
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1921
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
2055
1922
  ```
2056
1923
  or
2057
1924
  ```
2058
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1925
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1926
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
2059
1927
  ```
2060
1928
 
2061
- You can also specify just the project or project branch (other values will be
2062
- inferred from the current project or project branch):
1929
+ 'parameters' can also be a list of strings and tuples like so:
2063
1930
  ```
2064
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1931
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1932
+ ```
1933
+ This is equivalent to:
1934
+ ```
1935
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
2065
1936
  ```
2066
-
2067
- Note that `branch` is typically one of:
2068
- - `prod`
2069
- - `user.bob`
2070
- - `test.my_experiment`
2071
- - `prod.staging`
2072
1937
 
2073
1938
 
2074
1939
  Parameters
2075
1940
  ----------
2076
- flow : Union[str, Dict[str, str]], optional, default None
2077
- Upstream flow dependency for this flow.
2078
- flows : List[Union[str, Dict[str, str]]], default []
2079
- Upstream flow dependencies for this flow.
1941
+ event : Union[str, Dict[str, Any]], optional, default None
1942
+ Event dependency for this flow.
1943
+ events : List[Union[str, Dict[str, Any]]], default []
1944
+ Events dependency for this flow.
2080
1945
  options : Dict[str, Any], default {}
2081
1946
  Backend-specific configuration for tuning eventing behavior.
2082
1947
  """
2083
1948
  ...
2084
1949
 
1950
+ @typing.overload
1951
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1952
+ """
1953
+ Specifies the Conda environment for all steps of the flow.
1954
+
1955
+ Use `@conda_base` to set common libraries required by all
1956
+ steps and use `@conda` to specify step-specific additions.
1957
+
1958
+
1959
+ Parameters
1960
+ ----------
1961
+ packages : Dict[str, str], default {}
1962
+ Packages to use for this flow. The key is the name of the package
1963
+ and the value is the version to use.
1964
+ libraries : Dict[str, str], default {}
1965
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1966
+ python : str, optional, default None
1967
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1968
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1969
+ disabled : bool, default False
1970
+ If set to True, disables Conda.
1971
+ """
1972
+ ...
1973
+
1974
+ @typing.overload
1975
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1976
+ ...
1977
+
1978
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1979
+ """
1980
+ Specifies the Conda environment for all steps of the flow.
1981
+
1982
+ Use `@conda_base` to set common libraries required by all
1983
+ steps and use `@conda` to specify step-specific additions.
1984
+
1985
+
1986
+ Parameters
1987
+ ----------
1988
+ packages : Dict[str, str], default {}
1989
+ Packages to use for this flow. The key is the name of the package
1990
+ and the value is the version to use.
1991
+ libraries : Dict[str, str], default {}
1992
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1993
+ python : str, optional, default None
1994
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1995
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1996
+ disabled : bool, default False
1997
+ If set to True, disables Conda.
1998
+ """
1999
+ ...
2000
+
2001
+ @typing.overload
2002
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
2003
+ """
2004
+ Specifies the PyPI packages for all steps of the flow.
2005
+
2006
+ Use `@pypi_base` to set common packages required by all
2007
+ steps and use `@pypi` to specify step-specific overrides.
2008
+
2009
+ Parameters
2010
+ ----------
2011
+ packages : Dict[str, str], default: {}
2012
+ Packages to use for this flow. The key is the name of the package
2013
+ and the value is the version to use.
2014
+ python : str, optional, default: None
2015
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
2016
+ that the version used will correspond to the version of the Python interpreter used to start the run.
2017
+ """
2018
+ ...
2019
+
2020
+ @typing.overload
2021
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
2022
+ ...
2023
+
2024
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
2025
+ """
2026
+ Specifies the PyPI packages for all steps of the flow.
2027
+
2028
+ Use `@pypi_base` to set common packages required by all
2029
+ steps and use `@pypi` to specify step-specific overrides.
2030
+
2031
+ Parameters
2032
+ ----------
2033
+ packages : Dict[str, str], default: {}
2034
+ Packages to use for this flow. The key is the name of the package
2035
+ and the value is the version to use.
2036
+ python : str, optional, default: None
2037
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
2038
+ that the version used will correspond to the version of the Python interpreter used to start the run.
2039
+ """
2040
+ ...
2041
+
2042
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
2043
+ """
2044
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
2045
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
2046
+
2047
+
2048
+ Parameters
2049
+ ----------
2050
+ timeout : int
2051
+ Time, in seconds before the task times out and fails. (Default: 3600)
2052
+ poke_interval : int
2053
+ Time in seconds that the job should wait in between each try. (Default: 60)
2054
+ mode : str
2055
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
2056
+ exponential_backoff : bool
2057
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
2058
+ pool : str
2059
+ the slot pool this task should run in,
2060
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
2061
+ soft_fail : bool
2062
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
2063
+ name : str
2064
+ Name of the sensor on Airflow
2065
+ description : str
2066
+ Description of sensor in the Airflow UI
2067
+ external_dag_id : str
2068
+ The dag_id that contains the task you want to wait for.
2069
+ external_task_ids : List[str]
2070
+ The list of task_ids that you want to wait for.
2071
+ If None (default value) the sensor waits for the DAG. (Default: None)
2072
+ allowed_states : List[str]
2073
+ Iterable of allowed states, (Default: ['success'])
2074
+ failed_states : List[str]
2075
+ Iterable of failed or dis-allowed states. (Default: None)
2076
+ execution_delta : datetime.timedelta
2077
+ time difference with the previous execution to look at,
2078
+ the default is the same logical date as the current task or DAG. (Default: None)
2079
+ check_existence: bool
2080
+ Set to True to check if the external task exists or check if
2081
+ the DAG to wait for exists. (Default: True)
2082
+ """
2083
+ ...
2084
+
2085
2085
  pkg_name: str
2086
2086